Convert MEMBER_TYPE_FORCES_BLK to target hook
[gcc.git] / gcc / config / xtensa / xtensa.c
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "gimple.h"
51 #include "df.h"
52
53
54 /* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order
56 of EQ, NE, etc. */
57
58 enum internal_test
59 {
60 ITEST_EQ,
61 ITEST_NE,
62 ITEST_GT,
63 ITEST_GE,
64 ITEST_LT,
65 ITEST_LE,
66 ITEST_GTU,
67 ITEST_GEU,
68 ITEST_LTU,
69 ITEST_LEU,
70 ITEST_MAX
71 };
72
73 /* Array giving truth value on whether or not a given hard register
74 can support a given mode. */
75 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
76
77 /* Current frame size calculated by compute_frame_size. */
78 unsigned xtensa_current_frame_size;
79
80 /* Largest block move to handle in-line. */
81 #define LARGEST_MOVE_RATIO 15
82
83 /* Define the structure for the machine field in struct function. */
84 struct GTY(()) machine_function
85 {
86 int accesses_prev_frame;
87 bool need_a7_copy;
88 bool vararg_a7;
89 rtx vararg_a7_copy;
90 rtx set_frame_ptr_insn;
91 };
92
93 /* Vector, indexed by hard register number, which contains 1 for a
94 register that is allowable in a candidate for leaf function
95 treatment. */
96
97 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98 {
99 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
100 1, 1, 1,
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1
103 };
104
105 /* Map hard register number to register class */
106 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107 {
108 RL_REGS, SP_REG, RL_REGS, RL_REGS,
109 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
110 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
111 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
112 AR_REGS, AR_REGS, BR_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 ACC_REG,
118 };
119
120 static void xtensa_option_override (void);
121 static enum internal_test map_test_to_internal_test (enum rtx_code);
122 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
123 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
124 static rtx gen_conditional_move (enum rtx_code, enum machine_mode, rtx, rtx);
125 static rtx fixup_subreg_mem (rtx);
126 static struct machine_function * xtensa_init_machine_status (void);
127 static rtx xtensa_legitimize_tls_address (rtx);
128 static rtx xtensa_legitimize_address (rtx, rtx, enum machine_mode);
129 static bool xtensa_mode_dependent_address_p (const_rtx);
130 static bool xtensa_return_in_msb (const_tree);
131 static void printx (FILE *, signed int);
132 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
133 static rtx xtensa_builtin_saveregs (void);
134 static bool xtensa_legitimate_address_p (enum machine_mode, rtx, bool);
135 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
136 int) ATTRIBUTE_UNUSED;
137 static section *xtensa_select_rtx_section (enum machine_mode, rtx,
138 unsigned HOST_WIDE_INT);
139 static bool xtensa_rtx_costs (rtx, int, int, int, int *, bool);
140 static int xtensa_register_move_cost (enum machine_mode, reg_class_t,
141 reg_class_t);
142 static int xtensa_memory_move_cost (enum machine_mode, reg_class_t, bool);
143 static tree xtensa_build_builtin_va_list (void);
144 static bool xtensa_return_in_memory (const_tree, const_tree);
145 static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
146 gimple_seq *);
147 static void xtensa_function_arg_advance (cumulative_args_t, enum machine_mode,
148 const_tree, bool);
149 static rtx xtensa_function_arg (cumulative_args_t, enum machine_mode,
150 const_tree, bool);
151 static rtx xtensa_function_incoming_arg (cumulative_args_t,
152 enum machine_mode, const_tree, bool);
153 static rtx xtensa_function_value (const_tree, const_tree, bool);
154 static rtx xtensa_libcall_value (enum machine_mode, const_rtx);
155 static bool xtensa_function_value_regno_p (const unsigned int);
156 static unsigned int xtensa_function_arg_boundary (enum machine_mode,
157 const_tree);
158 static void xtensa_init_builtins (void);
159 static tree xtensa_fold_builtin (tree, int, tree *, bool);
160 static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
161 static void xtensa_va_start (tree, rtx);
162 static bool xtensa_frame_pointer_required (void);
163 static rtx xtensa_static_chain (const_tree, bool);
164 static void xtensa_asm_trampoline_template (FILE *);
165 static void xtensa_trampoline_init (rtx, tree, rtx);
166 static bool xtensa_output_addr_const_extra (FILE *, rtx);
167 static bool xtensa_cannot_force_const_mem (enum machine_mode, rtx);
168
169 static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t);
170 static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t);
171 static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t,
172 enum machine_mode,
173 struct secondary_reload_info *);
174
175 static bool constantpool_address_p (const_rtx addr);
176 static bool xtensa_legitimate_constant_p (enum machine_mode, rtx);
177
178 static bool xtensa_member_type_forces_blk (const_tree,
179 enum machine_mode mode);
180
181 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
182 REG_ALLOC_ORDER;
183 \f
184
185 /* This macro generates the assembly code for function exit,
186 on machines that need it. If FUNCTION_EPILOGUE is not defined
187 then individual return instructions are generated for each
188 return statement. Args are same as for FUNCTION_PROLOGUE. */
189
190 #undef TARGET_ASM_FUNCTION_EPILOGUE
191 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
192
193 /* These hooks specify assembly directives for creating certain kinds
194 of integer object. */
195
196 #undef TARGET_ASM_ALIGNED_SI_OP
197 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
198
199 #undef TARGET_ASM_SELECT_RTX_SECTION
200 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
201
202 #undef TARGET_LEGITIMIZE_ADDRESS
203 #define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address
204 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
205 #define TARGET_MODE_DEPENDENT_ADDRESS_P xtensa_mode_dependent_address_p
206
207 #undef TARGET_REGISTER_MOVE_COST
208 #define TARGET_REGISTER_MOVE_COST xtensa_register_move_cost
209 #undef TARGET_MEMORY_MOVE_COST
210 #define TARGET_MEMORY_MOVE_COST xtensa_memory_move_cost
211 #undef TARGET_RTX_COSTS
212 #define TARGET_RTX_COSTS xtensa_rtx_costs
213 #undef TARGET_ADDRESS_COST
214 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
215
216 #undef TARGET_MEMBER_TYPE_FORCES_BLK
217 #define TARGET_MEMBER_TYPE_FORCES_BLK xtensa_member_type_forces_blk
218
219 #undef TARGET_BUILD_BUILTIN_VA_LIST
220 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
221
222 #undef TARGET_EXPAND_BUILTIN_VA_START
223 #define TARGET_EXPAND_BUILTIN_VA_START xtensa_va_start
224
225 #undef TARGET_PROMOTE_FUNCTION_MODE
226 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
227 #undef TARGET_PROMOTE_PROTOTYPES
228 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
229
230 #undef TARGET_RETURN_IN_MEMORY
231 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
232 #undef TARGET_FUNCTION_VALUE
233 #define TARGET_FUNCTION_VALUE xtensa_function_value
234 #undef TARGET_LIBCALL_VALUE
235 #define TARGET_LIBCALL_VALUE xtensa_libcall_value
236 #undef TARGET_FUNCTION_VALUE_REGNO_P
237 #define TARGET_FUNCTION_VALUE_REGNO_P xtensa_function_value_regno_p
238
239 #undef TARGET_SPLIT_COMPLEX_ARG
240 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
241 #undef TARGET_MUST_PASS_IN_STACK
242 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
243 #undef TARGET_FUNCTION_ARG_ADVANCE
244 #define TARGET_FUNCTION_ARG_ADVANCE xtensa_function_arg_advance
245 #undef TARGET_FUNCTION_ARG
246 #define TARGET_FUNCTION_ARG xtensa_function_arg
247 #undef TARGET_FUNCTION_INCOMING_ARG
248 #define TARGET_FUNCTION_INCOMING_ARG xtensa_function_incoming_arg
249 #undef TARGET_FUNCTION_ARG_BOUNDARY
250 #define TARGET_FUNCTION_ARG_BOUNDARY xtensa_function_arg_boundary
251
252 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
253 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
254 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
255 #define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
256
257 #undef TARGET_RETURN_IN_MSB
258 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
259
260 #undef TARGET_INIT_BUILTINS
261 #define TARGET_INIT_BUILTINS xtensa_init_builtins
262 #undef TARGET_FOLD_BUILTIN
263 #define TARGET_FOLD_BUILTIN xtensa_fold_builtin
264 #undef TARGET_EXPAND_BUILTIN
265 #define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
266
267 #undef TARGET_PREFERRED_RELOAD_CLASS
268 #define TARGET_PREFERRED_RELOAD_CLASS xtensa_preferred_reload_class
269 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
270 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xtensa_preferred_output_reload_class
271
272 #undef TARGET_SECONDARY_RELOAD
273 #define TARGET_SECONDARY_RELOAD xtensa_secondary_reload
274
275 #undef TARGET_HAVE_TLS
276 #define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS)
277
278 #undef TARGET_CANNOT_FORCE_CONST_MEM
279 #define TARGET_CANNOT_FORCE_CONST_MEM xtensa_cannot_force_const_mem
280
281 #undef TARGET_LEGITIMATE_ADDRESS_P
282 #define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
283
284 #undef TARGET_FRAME_POINTER_REQUIRED
285 #define TARGET_FRAME_POINTER_REQUIRED xtensa_frame_pointer_required
286
287 #undef TARGET_STATIC_CHAIN
288 #define TARGET_STATIC_CHAIN xtensa_static_chain
289 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
290 #define TARGET_ASM_TRAMPOLINE_TEMPLATE xtensa_asm_trampoline_template
291 #undef TARGET_TRAMPOLINE_INIT
292 #define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init
293
294 #undef TARGET_OPTION_OVERRIDE
295 #define TARGET_OPTION_OVERRIDE xtensa_option_override
296
297 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
298 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA xtensa_output_addr_const_extra
299
300 #undef TARGET_LEGITIMATE_CONSTANT_P
301 #define TARGET_LEGITIMATE_CONSTANT_P xtensa_legitimate_constant_p
302
303 struct gcc_target targetm = TARGET_INITIALIZER;
304
305 \f
306 /* Functions to test Xtensa immediate operand validity. */
307
308 bool
309 xtensa_simm8 (HOST_WIDE_INT v)
310 {
311 return v >= -128 && v <= 127;
312 }
313
314
315 bool
316 xtensa_simm8x256 (HOST_WIDE_INT v)
317 {
318 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
319 }
320
321
322 bool
323 xtensa_simm12b (HOST_WIDE_INT v)
324 {
325 return v >= -2048 && v <= 2047;
326 }
327
328
329 static bool
330 xtensa_uimm8 (HOST_WIDE_INT v)
331 {
332 return v >= 0 && v <= 255;
333 }
334
335
336 static bool
337 xtensa_uimm8x2 (HOST_WIDE_INT v)
338 {
339 return (v & 1) == 0 && (v >= 0 && v <= 510);
340 }
341
342
343 static bool
344 xtensa_uimm8x4 (HOST_WIDE_INT v)
345 {
346 return (v & 3) == 0 && (v >= 0 && v <= 1020);
347 }
348
349
350 static bool
351 xtensa_b4const (HOST_WIDE_INT v)
352 {
353 switch (v)
354 {
355 case -1:
356 case 1:
357 case 2:
358 case 3:
359 case 4:
360 case 5:
361 case 6:
362 case 7:
363 case 8:
364 case 10:
365 case 12:
366 case 16:
367 case 32:
368 case 64:
369 case 128:
370 case 256:
371 return true;
372 }
373 return false;
374 }
375
376
377 bool
378 xtensa_b4const_or_zero (HOST_WIDE_INT v)
379 {
380 if (v == 0)
381 return true;
382 return xtensa_b4const (v);
383 }
384
385
386 bool
387 xtensa_b4constu (HOST_WIDE_INT v)
388 {
389 switch (v)
390 {
391 case 32768:
392 case 65536:
393 case 2:
394 case 3:
395 case 4:
396 case 5:
397 case 6:
398 case 7:
399 case 8:
400 case 10:
401 case 12:
402 case 16:
403 case 32:
404 case 64:
405 case 128:
406 case 256:
407 return true;
408 }
409 return false;
410 }
411
412
413 bool
414 xtensa_mask_immediate (HOST_WIDE_INT v)
415 {
416 #define MAX_MASK_SIZE 16
417 int mask_size;
418
419 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
420 {
421 if ((v & 1) == 0)
422 return false;
423 v = v >> 1;
424 if (v == 0)
425 return true;
426 }
427
428 return false;
429 }
430
431
432 /* This is just like the standard true_regnum() function except that it
433 works even when reg_renumber is not initialized. */
434
435 int
436 xt_true_regnum (rtx x)
437 {
438 if (GET_CODE (x) == REG)
439 {
440 if (reg_renumber
441 && REGNO (x) >= FIRST_PSEUDO_REGISTER
442 && reg_renumber[REGNO (x)] >= 0)
443 return reg_renumber[REGNO (x)];
444 return REGNO (x);
445 }
446 if (GET_CODE (x) == SUBREG)
447 {
448 int base = xt_true_regnum (SUBREG_REG (x));
449 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
450 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
451 GET_MODE (SUBREG_REG (x)),
452 SUBREG_BYTE (x), GET_MODE (x));
453 }
454 return -1;
455 }
456
457
458 int
459 xtensa_valid_move (enum machine_mode mode, rtx *operands)
460 {
461 /* Either the destination or source must be a register, and the
462 MAC16 accumulator doesn't count. */
463
464 if (register_operand (operands[0], mode))
465 {
466 int dst_regnum = xt_true_regnum (operands[0]);
467
468 /* The stack pointer can only be assigned with a MOVSP opcode. */
469 if (dst_regnum == STACK_POINTER_REGNUM)
470 return (mode == SImode
471 && register_operand (operands[1], mode)
472 && !ACC_REG_P (xt_true_regnum (operands[1])));
473
474 if (!ACC_REG_P (dst_regnum))
475 return true;
476 }
477 if (register_operand (operands[1], mode))
478 {
479 int src_regnum = xt_true_regnum (operands[1]);
480 if (!ACC_REG_P (src_regnum))
481 return true;
482 }
483 return FALSE;
484 }
485
486
487 int
488 smalloffset_mem_p (rtx op)
489 {
490 if (GET_CODE (op) == MEM)
491 {
492 rtx addr = XEXP (op, 0);
493 if (GET_CODE (addr) == REG)
494 return BASE_REG_P (addr, 0);
495 if (GET_CODE (addr) == PLUS)
496 {
497 rtx offset = XEXP (addr, 0);
498 HOST_WIDE_INT val;
499 if (GET_CODE (offset) != CONST_INT)
500 offset = XEXP (addr, 1);
501 if (GET_CODE (offset) != CONST_INT)
502 return FALSE;
503
504 val = INTVAL (offset);
505 return (val & 3) == 0 && (val >= 0 && val <= 60);
506 }
507 }
508 return FALSE;
509 }
510
511
512 static bool
513 constantpool_address_p (const_rtx addr)
514 {
515 const_rtx sym = addr;
516
517 if (GET_CODE (addr) == CONST)
518 {
519 rtx offset;
520
521 /* Only handle (PLUS (SYM, OFFSET)) form. */
522 addr = XEXP (addr, 0);
523 if (GET_CODE (addr) != PLUS)
524 return false;
525
526 /* Make sure the address is word aligned. */
527 offset = XEXP (addr, 1);
528 if ((!CONST_INT_P (offset))
529 || ((INTVAL (offset) & 3) != 0))
530 return false;
531
532 sym = XEXP (addr, 0);
533 }
534
535 if ((GET_CODE (sym) == SYMBOL_REF)
536 && CONSTANT_POOL_ADDRESS_P (sym))
537 return true;
538 return false;
539 }
540
541
542 int
543 constantpool_mem_p (rtx op)
544 {
545 if (GET_CODE (op) == SUBREG)
546 op = SUBREG_REG (op);
547 if (GET_CODE (op) == MEM)
548 return constantpool_address_p (XEXP (op, 0));
549 return FALSE;
550 }
551
552
553 /* Return TRUE if X is a thread-local symbol. */
554
555 static bool
556 xtensa_tls_symbol_p (rtx x)
557 {
558 if (! TARGET_HAVE_TLS)
559 return false;
560
561 return GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0;
562 }
563
564
565 void
566 xtensa_extend_reg (rtx dst, rtx src)
567 {
568 rtx temp = gen_reg_rtx (SImode);
569 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
570
571 /* Generate paradoxical subregs as needed so that the modes match. */
572 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
573 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
574
575 emit_insn (gen_ashlsi3 (temp, src, shift));
576 emit_insn (gen_ashrsi3 (dst, temp, shift));
577 }
578
579
580 bool
581 xtensa_mem_offset (unsigned v, enum machine_mode mode)
582 {
583 switch (mode)
584 {
585 case BLKmode:
586 /* Handle the worst case for block moves. See xtensa_expand_block_move
587 where we emit an optimized block move operation if the block can be
588 moved in < "move_ratio" pieces. The worst case is when the block is
589 aligned but has a size of (3 mod 4) (does this happen?) so that the
590 last piece requires a byte load/store. */
591 return (xtensa_uimm8 (v)
592 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
593
594 case QImode:
595 return xtensa_uimm8 (v);
596
597 case HImode:
598 return xtensa_uimm8x2 (v);
599
600 case DFmode:
601 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
602
603 default:
604 break;
605 }
606
607 return xtensa_uimm8x4 (v);
608 }
609
610
611 /* Make normal rtx_code into something we can index from an array. */
612
613 static enum internal_test
614 map_test_to_internal_test (enum rtx_code test_code)
615 {
616 enum internal_test test = ITEST_MAX;
617
618 switch (test_code)
619 {
620 default: break;
621 case EQ: test = ITEST_EQ; break;
622 case NE: test = ITEST_NE; break;
623 case GT: test = ITEST_GT; break;
624 case GE: test = ITEST_GE; break;
625 case LT: test = ITEST_LT; break;
626 case LE: test = ITEST_LE; break;
627 case GTU: test = ITEST_GTU; break;
628 case GEU: test = ITEST_GEU; break;
629 case LTU: test = ITEST_LTU; break;
630 case LEU: test = ITEST_LEU; break;
631 }
632
633 return test;
634 }
635
636
637 /* Generate the code to compare two integer values. The return value is
638 the comparison expression. */
639
640 static rtx
641 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
642 rtx cmp0, /* first operand to compare */
643 rtx cmp1, /* second operand to compare */
644 int *p_invert /* whether branch needs to reverse test */)
645 {
646 struct cmp_info
647 {
648 enum rtx_code test_code; /* test code to use in insn */
649 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
650 int const_add; /* constant to add (convert LE -> LT) */
651 int reverse_regs; /* reverse registers in test */
652 int invert_const; /* != 0 if invert value if cmp1 is constant */
653 int invert_reg; /* != 0 if invert value if cmp1 is register */
654 int unsignedp; /* != 0 for unsigned comparisons. */
655 };
656
657 static struct cmp_info info[ (int)ITEST_MAX ] = {
658
659 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
660 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
661
662 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
663 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
664 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
665 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
666
667 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
668 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
669 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
670 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
671 };
672
673 enum internal_test test;
674 enum machine_mode mode;
675 struct cmp_info *p_info;
676
677 test = map_test_to_internal_test (test_code);
678 gcc_assert (test != ITEST_MAX);
679
680 p_info = &info[ (int)test ];
681
682 mode = GET_MODE (cmp0);
683 if (mode == VOIDmode)
684 mode = GET_MODE (cmp1);
685
686 /* Make sure we can handle any constants given to us. */
687 if (GET_CODE (cmp1) == CONST_INT)
688 {
689 HOST_WIDE_INT value = INTVAL (cmp1);
690 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
691
692 /* if the immediate overflows or does not fit in the immediate field,
693 spill it to a register */
694
695 if ((p_info->unsignedp ?
696 (uvalue + p_info->const_add > uvalue) :
697 (value + p_info->const_add > value)) != (p_info->const_add > 0))
698 {
699 cmp1 = force_reg (mode, cmp1);
700 }
701 else if (!(p_info->const_range_p) (value + p_info->const_add))
702 {
703 cmp1 = force_reg (mode, cmp1);
704 }
705 }
706 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
707 {
708 cmp1 = force_reg (mode, cmp1);
709 }
710
711 /* See if we need to invert the result. */
712 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
713 ? p_info->invert_const
714 : p_info->invert_reg);
715
716 /* Comparison to constants, may involve adding 1 to change a LT into LE.
717 Comparison between two registers, may involve switching operands. */
718 if (GET_CODE (cmp1) == CONST_INT)
719 {
720 if (p_info->const_add != 0)
721 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
722
723 }
724 else if (p_info->reverse_regs)
725 {
726 rtx temp = cmp0;
727 cmp0 = cmp1;
728 cmp1 = temp;
729 }
730
731 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
732 }
733
734
735 /* Generate the code to compare two float values. The return value is
736 the comparison expression. */
737
738 static rtx
739 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
740 rtx cmp0, /* first operand to compare */
741 rtx cmp1 /* second operand to compare */)
742 {
743 rtx (*gen_fn) (rtx, rtx, rtx);
744 rtx brtmp;
745 int reverse_regs, invert;
746
747 switch (test_code)
748 {
749 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
750 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
751 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
752 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
753 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
754 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
755 case UNEQ: reverse_regs = 0; invert = 0; gen_fn = gen_suneq_sf; break;
756 case LTGT: reverse_regs = 0; invert = 1; gen_fn = gen_suneq_sf; break;
757 case UNLE: reverse_regs = 0; invert = 0; gen_fn = gen_sunle_sf; break;
758 case UNGT: reverse_regs = 1; invert = 0; gen_fn = gen_sunlt_sf; break;
759 case UNLT: reverse_regs = 0; invert = 0; gen_fn = gen_sunlt_sf; break;
760 case UNGE: reverse_regs = 1; invert = 0; gen_fn = gen_sunle_sf; break;
761 case UNORDERED:
762 reverse_regs = 0; invert = 0; gen_fn = gen_sunordered_sf; break;
763 case ORDERED:
764 reverse_regs = 0; invert = 1; gen_fn = gen_sunordered_sf; break;
765 default:
766 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
767 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
768 }
769
770 if (reverse_regs)
771 {
772 rtx temp = cmp0;
773 cmp0 = cmp1;
774 cmp1 = temp;
775 }
776
777 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
778 emit_insn (gen_fn (brtmp, cmp0, cmp1));
779
780 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
781 }
782
783
784 void
785 xtensa_expand_conditional_branch (rtx *operands, enum machine_mode mode)
786 {
787 enum rtx_code test_code = GET_CODE (operands[0]);
788 rtx cmp0 = operands[1];
789 rtx cmp1 = operands[2];
790 rtx cmp;
791 int invert;
792 rtx label1, label2;
793
794 switch (mode)
795 {
796 case DFmode:
797 default:
798 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
799
800 case SImode:
801 invert = FALSE;
802 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
803 break;
804
805 case SFmode:
806 if (!TARGET_HARD_FLOAT)
807 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
808 cmp0, cmp1));
809 invert = FALSE;
810 cmp = gen_float_relational (test_code, cmp0, cmp1);
811 break;
812 }
813
814 /* Generate the branch. */
815
816 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
817 label2 = pc_rtx;
818
819 if (invert)
820 {
821 label2 = label1;
822 label1 = pc_rtx;
823 }
824
825 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
826 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
827 label1,
828 label2)));
829 }
830
831
832 static rtx
833 gen_conditional_move (enum rtx_code code, enum machine_mode mode,
834 rtx op0, rtx op1)
835 {
836 if (mode == SImode)
837 {
838 rtx cmp;
839
840 /* Jump optimization calls get_condition() which canonicalizes
841 comparisons like (GE x <const>) to (GT x <const-1>).
842 Transform those comparisons back to GE, since that is the
843 comparison supported in Xtensa. We shouldn't have to
844 transform <LE x const> comparisons, because neither
845 xtensa_expand_conditional_branch() nor get_condition() will
846 produce them. */
847
848 if ((code == GT) && (op1 == constm1_rtx))
849 {
850 code = GE;
851 op1 = const0_rtx;
852 }
853 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
854
855 if (boolean_operator (cmp, VOIDmode))
856 {
857 /* Swap the operands to make const0 second. */
858 if (op0 == const0_rtx)
859 {
860 op0 = op1;
861 op1 = const0_rtx;
862 }
863
864 /* If not comparing against zero, emit a comparison (subtract). */
865 if (op1 != const0_rtx)
866 {
867 op0 = expand_binop (SImode, sub_optab, op0, op1,
868 0, 0, OPTAB_LIB_WIDEN);
869 op1 = const0_rtx;
870 }
871 }
872 else if (branch_operator (cmp, VOIDmode))
873 {
874 /* Swap the operands to make const0 second. */
875 if (op0 == const0_rtx)
876 {
877 op0 = op1;
878 op1 = const0_rtx;
879
880 switch (code)
881 {
882 case LT: code = GE; break;
883 case GE: code = LT; break;
884 default: gcc_unreachable ();
885 }
886 }
887
888 if (op1 != const0_rtx)
889 return 0;
890 }
891 else
892 return 0;
893
894 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
895 }
896
897 if (TARGET_HARD_FLOAT && mode == SFmode)
898 return gen_float_relational (code, op0, op1);
899
900 return 0;
901 }
902
903
904 int
905 xtensa_expand_conditional_move (rtx *operands, int isflt)
906 {
907 rtx dest = operands[0];
908 rtx cmp = operands[1];
909 enum machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
910 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
911
912 if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
913 XEXP (cmp, 0), XEXP (cmp, 1))))
914 return 0;
915
916 if (isflt)
917 gen_fn = (cmp_mode == SImode
918 ? gen_movsfcc_internal0
919 : gen_movsfcc_internal1);
920 else
921 gen_fn = (cmp_mode == SImode
922 ? gen_movsicc_internal0
923 : gen_movsicc_internal1);
924
925 emit_insn (gen_fn (dest, XEXP (cmp, 0), operands[2], operands[3], cmp));
926 return 1;
927 }
928
929
930 int
931 xtensa_expand_scc (rtx operands[4], enum machine_mode cmp_mode)
932 {
933 rtx dest = operands[0];
934 rtx cmp;
935 rtx one_tmp, zero_tmp;
936 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
937
938 if (!(cmp = gen_conditional_move (GET_CODE (operands[1]), cmp_mode,
939 operands[2], operands[3])))
940 return 0;
941
942 one_tmp = gen_reg_rtx (SImode);
943 zero_tmp = gen_reg_rtx (SImode);
944 emit_insn (gen_movsi (one_tmp, const_true_rtx));
945 emit_insn (gen_movsi (zero_tmp, const0_rtx));
946
947 gen_fn = (cmp_mode == SImode
948 ? gen_movsicc_internal0
949 : gen_movsicc_internal1);
950 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
951 return 1;
952 }
953
954
955 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
956 for the output, i.e., the input operands are twice as big as MODE. */
957
958 void
959 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
960 {
961 switch (GET_CODE (operands[1]))
962 {
963 case REG:
964 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
965 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
966 break;
967
968 case MEM:
969 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
970 operands[2] = adjust_address (operands[1], mode, 0);
971 break;
972
973 case CONST_INT:
974 case CONST_DOUBLE:
975 split_double (operands[1], &operands[2], &operands[3]);
976 break;
977
978 default:
979 gcc_unreachable ();
980 }
981
982 switch (GET_CODE (operands[0]))
983 {
984 case REG:
985 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
986 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
987 break;
988
989 case MEM:
990 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
991 operands[0] = adjust_address (operands[0], mode, 0);
992 break;
993
994 default:
995 gcc_unreachable ();
996 }
997 }
998
999
1000 /* Emit insns to move operands[1] into operands[0].
1001 Return 1 if we have written out everything that needs to be done to
1002 do the move. Otherwise, return 0 and the caller will emit the move
1003 normally. */
1004
1005 int
1006 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1007 {
1008 rtx src = operands[1];
1009
1010 if (CONSTANT_P (src)
1011 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
1012 {
1013 rtx dst = operands[0];
1014
1015 if (xtensa_tls_referenced_p (src))
1016 {
1017 rtx addend = NULL;
1018
1019 if (GET_CODE (src) == CONST && GET_CODE (XEXP (src, 0)) == PLUS)
1020 {
1021 addend = XEXP (XEXP (src, 0), 1);
1022 src = XEXP (XEXP (src, 0), 0);
1023 }
1024
1025 src = xtensa_legitimize_tls_address (src);
1026 if (addend)
1027 {
1028 src = gen_rtx_PLUS (mode, src, addend);
1029 src = force_operand (src, dst);
1030 }
1031 emit_move_insn (dst, src);
1032 return 1;
1033 }
1034
1035 if (! TARGET_CONST16)
1036 {
1037 src = force_const_mem (SImode, src);
1038 operands[1] = src;
1039 }
1040
1041 /* PC-relative loads are always SImode, and CONST16 is only
1042 supported in the movsi pattern, so add a SUBREG for any other
1043 (smaller) mode. */
1044
1045 if (mode != SImode)
1046 {
1047 if (register_operand (dst, mode))
1048 {
1049 emit_move_insn (simplify_gen_subreg (SImode, dst, mode, 0), src);
1050 return 1;
1051 }
1052 else
1053 {
1054 src = force_reg (SImode, src);
1055 src = gen_lowpart_SUBREG (mode, src);
1056 operands[1] = src;
1057 }
1058 }
1059 }
1060
1061 if (!(reload_in_progress | reload_completed)
1062 && !xtensa_valid_move (mode, operands))
1063 operands[1] = force_reg (mode, operands[1]);
1064
1065 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1066
1067 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1068 instruction won't be recognized after reload, so we remove the
1069 subreg and adjust mem accordingly. */
1070 if (reload_in_progress)
1071 {
1072 operands[0] = fixup_subreg_mem (operands[0]);
1073 operands[1] = fixup_subreg_mem (operands[1]);
1074 }
1075 return 0;
1076 }
1077
1078
1079 static rtx
1080 fixup_subreg_mem (rtx x)
1081 {
1082 if (GET_CODE (x) == SUBREG
1083 && GET_CODE (SUBREG_REG (x)) == REG
1084 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1085 {
1086 rtx temp =
1087 gen_rtx_SUBREG (GET_MODE (x),
1088 reg_equiv_mem (REGNO (SUBREG_REG (x))),
1089 SUBREG_BYTE (x));
1090 x = alter_subreg (&temp);
1091 }
1092 return x;
1093 }
1094
1095
1096 /* Check if an incoming argument in a7 is expected to be used soon and
1097 if OPND is a register or register pair that includes a7. If so,
1098 create a new pseudo and copy a7 into that pseudo at the very
1099 beginning of the function, followed by the special "set_frame_ptr"
1100 unspec_volatile insn. The return value is either the original
1101 operand, if it is not a7, or the new pseudo containing a copy of
1102 the incoming argument. This is necessary because the register
1103 allocator will ignore conflicts with a7 and may either assign some
1104 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1105 the incoming argument in a7. By copying the argument out of a7 as
1106 the very first thing, and then immediately following that with an
1107 unspec_volatile to keep the scheduler away, we should avoid any
1108 problems. Putting the set_frame_ptr insn at the beginning, with
1109 only the a7 copy before it, also makes it easier for the prologue
1110 expander to initialize the frame pointer after the a7 copy and to
1111 fix up the a7 copy to use the stack pointer instead of the frame
1112 pointer. */
1113
1114 rtx
1115 xtensa_copy_incoming_a7 (rtx opnd)
1116 {
1117 rtx entry_insns = 0;
1118 rtx reg, tmp;
1119 enum machine_mode mode;
1120
1121 if (!cfun->machine->need_a7_copy)
1122 return opnd;
1123
1124 /* This function should never be called again once a7 has been copied. */
1125 gcc_assert (!cfun->machine->set_frame_ptr_insn);
1126
1127 mode = GET_MODE (opnd);
1128
1129 /* The operand using a7 may come in a later instruction, so just return
1130 the original operand if it doesn't use a7. */
1131 reg = opnd;
1132 if (GET_CODE (reg) == SUBREG)
1133 {
1134 gcc_assert (SUBREG_BYTE (reg) == 0);
1135 reg = SUBREG_REG (reg);
1136 }
1137 if (GET_CODE (reg) != REG
1138 || REGNO (reg) > A7_REG
1139 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1140 return opnd;
1141
1142 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1143 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
1144
1145 cfun->machine->need_a7_copy = false;
1146
1147 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1148 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1149
1150 start_sequence ();
1151 tmp = gen_reg_rtx (mode);
1152
1153 switch (mode)
1154 {
1155 case DFmode:
1156 case DImode:
1157 /* Copy the value out of A7 here but keep the first word in A6 until
1158 after the set_frame_ptr insn. Otherwise, the register allocator
1159 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1160 value. */
1161 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1162 gen_raw_REG (SImode, A7_REG)));
1163 break;
1164 case SFmode:
1165 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1166 break;
1167 case SImode:
1168 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1169 break;
1170 case HImode:
1171 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1172 break;
1173 case QImode:
1174 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1175 break;
1176 default:
1177 gcc_unreachable ();
1178 }
1179
1180 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1181
1182 /* For DF and DI mode arguments, copy the incoming value in A6 now. */
1183 if (mode == DFmode || mode == DImode)
1184 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1185 gen_rtx_REG (SImode, A7_REG - 1)));
1186 entry_insns = get_insns ();
1187 end_sequence ();
1188
1189 if (cfun->machine->vararg_a7)
1190 {
1191 /* This is called from within builtin_saveregs, which will insert the
1192 saveregs code at the function entry, ahead of anything placed at
1193 the function entry now. Instead, save the sequence to be inserted
1194 at the beginning of the saveregs code. */
1195 cfun->machine->vararg_a7_copy = entry_insns;
1196 }
1197 else
1198 {
1199 /* Put entry_insns after the NOTE that starts the function. If
1200 this is inside a start_sequence, make the outer-level insn
1201 chain current, so the code is placed at the start of the
1202 function. */
1203 push_topmost_sequence ();
1204 /* Do not use entry_of_function() here. This is called from within
1205 expand_function_start, when the CFG still holds GIMPLE. */
1206 emit_insn_after (entry_insns, get_insns ());
1207 pop_topmost_sequence ();
1208 }
1209
1210 return tmp;
1211 }
1212
1213
1214 /* Try to expand a block move operation to a sequence of RTL move
1215 instructions. If not optimizing, or if the block size is not a
1216 constant, or if the block is too large, the expansion fails and GCC
1217 falls back to calling memcpy().
1218
1219 operands[0] is the destination
1220 operands[1] is the source
1221 operands[2] is the length
1222 operands[3] is the alignment */
1223
1224 int
1225 xtensa_expand_block_move (rtx *operands)
1226 {
1227 static const enum machine_mode mode_from_align[] =
1228 {
1229 VOIDmode, QImode, HImode, VOIDmode, SImode,
1230 };
1231
1232 rtx dst_mem = operands[0];
1233 rtx src_mem = operands[1];
1234 HOST_WIDE_INT bytes, align;
1235 int num_pieces, move_ratio;
1236 rtx temp[2];
1237 enum machine_mode mode[2];
1238 int amount[2];
1239 bool active[2];
1240 int phase = 0;
1241 int next;
1242 int offset_ld = 0;
1243 int offset_st = 0;
1244 rtx x;
1245
1246 /* If this is not a fixed size move, just call memcpy. */
1247 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1248 return 0;
1249
1250 bytes = INTVAL (operands[2]);
1251 align = INTVAL (operands[3]);
1252
1253 /* Anything to move? */
1254 if (bytes <= 0)
1255 return 0;
1256
1257 if (align > MOVE_MAX)
1258 align = MOVE_MAX;
1259
1260 /* Decide whether to expand inline based on the optimization level. */
1261 move_ratio = 4;
1262 if (optimize > 2)
1263 move_ratio = LARGEST_MOVE_RATIO;
1264 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1265 if (num_pieces > move_ratio)
1266 return 0;
1267
1268 x = XEXP (dst_mem, 0);
1269 if (!REG_P (x))
1270 {
1271 x = force_reg (Pmode, x);
1272 dst_mem = replace_equiv_address (dst_mem, x);
1273 }
1274
1275 x = XEXP (src_mem, 0);
1276 if (!REG_P (x))
1277 {
1278 x = force_reg (Pmode, x);
1279 src_mem = replace_equiv_address (src_mem, x);
1280 }
1281
1282 active[0] = active[1] = false;
1283
1284 do
1285 {
1286 next = phase;
1287 phase ^= 1;
1288
1289 if (bytes > 0)
1290 {
1291 int next_amount;
1292
1293 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1294 next_amount = MIN (next_amount, align);
1295
1296 amount[next] = next_amount;
1297 mode[next] = mode_from_align[next_amount];
1298 temp[next] = gen_reg_rtx (mode[next]);
1299
1300 x = adjust_address (src_mem, mode[next], offset_ld);
1301 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
1302
1303 offset_ld += next_amount;
1304 bytes -= next_amount;
1305 active[next] = true;
1306 }
1307
1308 if (active[phase])
1309 {
1310 active[phase] = false;
1311
1312 x = adjust_address (dst_mem, mode[phase], offset_st);
1313 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
1314
1315 offset_st += amount[phase];
1316 }
1317 }
1318 while (active[next]);
1319
1320 return 1;
1321 }
1322
1323
1324 void
1325 xtensa_expand_nonlocal_goto (rtx *operands)
1326 {
1327 rtx goto_handler = operands[1];
1328 rtx containing_fp = operands[3];
1329
1330 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1331 is too big to generate in-line. */
1332
1333 if (GET_CODE (containing_fp) != REG)
1334 containing_fp = force_reg (Pmode, containing_fp);
1335
1336 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1337 LCT_NORMAL, VOIDmode, 2,
1338 containing_fp, Pmode,
1339 goto_handler, Pmode);
1340 }
1341
1342
1343 static struct machine_function *
1344 xtensa_init_machine_status (void)
1345 {
1346 return ggc_alloc_cleared_machine_function ();
1347 }
1348
1349
1350 /* Shift VAL of mode MODE left by COUNT bits. */
1351
1352 static inline rtx
1353 xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
1354 {
1355 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1356 NULL_RTX, 1, OPTAB_DIRECT);
1357 return expand_simple_binop (SImode, ASHIFT, val, count,
1358 NULL_RTX, 1, OPTAB_DIRECT);
1359 }
1360
1361
1362 /* Structure to hold the initial parameters for a compare_and_swap operation
1363 in HImode and QImode. */
1364
1365 struct alignment_context
1366 {
1367 rtx memsi; /* SI aligned memory location. */
1368 rtx shift; /* Bit offset with regard to lsb. */
1369 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
1370 rtx modemaski; /* ~modemask */
1371 };
1372
1373
1374 /* Initialize structure AC for word access to HI and QI mode memory. */
1375
1376 static void
1377 init_alignment_context (struct alignment_context *ac, rtx mem)
1378 {
1379 enum machine_mode mode = GET_MODE (mem);
1380 rtx byteoffset = NULL_RTX;
1381 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1382
1383 if (aligned)
1384 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1385 else
1386 {
1387 /* Alignment is unknown. */
1388 rtx addr, align;
1389
1390 /* Force the address into a register. */
1391 addr = force_reg (Pmode, XEXP (mem, 0));
1392
1393 /* Align it to SImode. */
1394 align = expand_simple_binop (Pmode, AND, addr,
1395 GEN_INT (-GET_MODE_SIZE (SImode)),
1396 NULL_RTX, 1, OPTAB_DIRECT);
1397 /* Generate MEM. */
1398 ac->memsi = gen_rtx_MEM (SImode, align);
1399 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
1400 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
1401 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
1402
1403 byteoffset = expand_simple_binop (Pmode, AND, addr,
1404 GEN_INT (GET_MODE_SIZE (SImode) - 1),
1405 NULL_RTX, 1, OPTAB_DIRECT);
1406 }
1407
1408 /* Calculate shiftcount. */
1409 if (TARGET_BIG_ENDIAN)
1410 {
1411 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
1412 if (!aligned)
1413 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
1414 NULL_RTX, 1, OPTAB_DIRECT);
1415 }
1416 else
1417 {
1418 if (aligned)
1419 ac->shift = NULL_RTX;
1420 else
1421 ac->shift = byteoffset;
1422 }
1423
1424 if (ac->shift != NULL_RTX)
1425 {
1426 /* Shift is the byte count, but we need the bitcount. */
1427 ac->shift = expand_simple_binop (SImode, MULT, ac->shift,
1428 GEN_INT (BITS_PER_UNIT),
1429 NULL_RTX, 1, OPTAB_DIRECT);
1430 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1431 GEN_INT (GET_MODE_MASK (mode)),
1432 ac->shift,
1433 NULL_RTX, 1, OPTAB_DIRECT);
1434 }
1435 else
1436 ac->modemask = GEN_INT (GET_MODE_MASK (mode));
1437
1438 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
1439 }
1440
1441
1442 /* Expand an atomic compare and swap operation for HImode and QImode.
1443 MEM is the memory location, CMP the old value to compare MEM with
1444 and NEW_RTX the value to set if CMP == MEM. */
1445
1446 void
1447 xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
1448 {
1449 enum machine_mode mode = GET_MODE (mem);
1450 struct alignment_context ac;
1451 rtx tmp, cmpv, newv, val;
1452 rtx oldval = gen_reg_rtx (SImode);
1453 rtx res = gen_reg_rtx (SImode);
1454 rtx csloop = gen_label_rtx ();
1455 rtx csend = gen_label_rtx ();
1456
1457 init_alignment_context (&ac, mem);
1458
1459 if (ac.shift != NULL_RTX)
1460 {
1461 cmp = xtensa_expand_mask_and_shift (cmp, mode, ac.shift);
1462 new_rtx = xtensa_expand_mask_and_shift (new_rtx, mode, ac.shift);
1463 }
1464
1465 /* Load the surrounding word into VAL with the MEM value masked out. */
1466 val = force_reg (SImode, expand_simple_binop (SImode, AND, ac.memsi,
1467 ac.modemaski, NULL_RTX, 1,
1468 OPTAB_DIRECT));
1469 emit_label (csloop);
1470
1471 /* Patch CMP and NEW_RTX into VAL at correct position. */
1472 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
1473 NULL_RTX, 1, OPTAB_DIRECT));
1474 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
1475 NULL_RTX, 1, OPTAB_DIRECT));
1476
1477 /* Jump to end if we're done. */
1478 emit_insn (gen_sync_compare_and_swapsi (res, ac.memsi, cmpv, newv));
1479 emit_cmp_and_jump_insns (res, cmpv, EQ, const0_rtx, SImode, true, csend);
1480
1481 /* Check for changes outside mode. */
1482 emit_move_insn (oldval, val);
1483 tmp = expand_simple_binop (SImode, AND, res, ac.modemaski,
1484 val, 1, OPTAB_DIRECT);
1485 if (tmp != val)
1486 emit_move_insn (val, tmp);
1487
1488 /* Loop internal if so. */
1489 emit_cmp_and_jump_insns (oldval, val, NE, const0_rtx, SImode, true, csloop);
1490
1491 emit_label (csend);
1492
1493 /* Return the correct part of the bitfield. */
1494 convert_move (target,
1495 (ac.shift == NULL_RTX ? res
1496 : expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
1497 NULL_RTX, 1, OPTAB_DIRECT)),
1498 1);
1499 }
1500
1501
1502 /* Expand an atomic operation CODE of mode MODE (either HImode or QImode --
1503 the default expansion works fine for SImode). MEM is the memory location
1504 and VAL the value to play with. If AFTER is true then store the value
1505 MEM holds after the operation, if AFTER is false then store the value MEM
1506 holds before the operation. If TARGET is zero then discard that value, else
1507 store it to TARGET. */
1508
1509 void
1510 xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1511 bool after)
1512 {
1513 enum machine_mode mode = GET_MODE (mem);
1514 struct alignment_context ac;
1515 rtx csloop = gen_label_rtx ();
1516 rtx cmp, tmp;
1517 rtx old = gen_reg_rtx (SImode);
1518 rtx new_rtx = gen_reg_rtx (SImode);
1519 rtx orig = NULL_RTX;
1520
1521 init_alignment_context (&ac, mem);
1522
1523 /* Prepare values before the compare-and-swap loop. */
1524 if (ac.shift != NULL_RTX)
1525 val = xtensa_expand_mask_and_shift (val, mode, ac.shift);
1526 switch (code)
1527 {
1528 case PLUS:
1529 case MINUS:
1530 orig = gen_reg_rtx (SImode);
1531 convert_move (orig, val, 1);
1532 break;
1533
1534 case SET:
1535 case IOR:
1536 case XOR:
1537 break;
1538
1539 case MULT: /* NAND */
1540 case AND:
1541 /* val = "11..1<val>11..1" */
1542 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
1543 NULL_RTX, 1, OPTAB_DIRECT);
1544 break;
1545
1546 default:
1547 gcc_unreachable ();
1548 }
1549
1550 /* Load full word. Subsequent loads are performed by S32C1I. */
1551 cmp = force_reg (SImode, ac.memsi);
1552
1553 emit_label (csloop);
1554 emit_move_insn (old, cmp);
1555
1556 switch (code)
1557 {
1558 case PLUS:
1559 case MINUS:
1560 val = expand_simple_binop (SImode, code, old, orig,
1561 NULL_RTX, 1, OPTAB_DIRECT);
1562 val = expand_simple_binop (SImode, AND, val, ac.modemask,
1563 NULL_RTX, 1, OPTAB_DIRECT);
1564 /* FALLTHRU */
1565 case SET:
1566 tmp = expand_simple_binop (SImode, AND, old, ac.modemaski,
1567 NULL_RTX, 1, OPTAB_DIRECT);
1568 tmp = expand_simple_binop (SImode, IOR, tmp, val,
1569 new_rtx, 1, OPTAB_DIRECT);
1570 break;
1571
1572 case AND:
1573 case IOR:
1574 case XOR:
1575 tmp = expand_simple_binop (SImode, code, old, val,
1576 new_rtx, 1, OPTAB_DIRECT);
1577 break;
1578
1579 case MULT: /* NAND */
1580 tmp = expand_simple_binop (SImode, XOR, old, ac.modemask,
1581 NULL_RTX, 1, OPTAB_DIRECT);
1582 tmp = expand_simple_binop (SImode, AND, tmp, val,
1583 new_rtx, 1, OPTAB_DIRECT);
1584 break;
1585
1586 default:
1587 gcc_unreachable ();
1588 }
1589
1590 if (tmp != new_rtx)
1591 emit_move_insn (new_rtx, tmp);
1592 emit_insn (gen_sync_compare_and_swapsi (cmp, ac.memsi, old, new_rtx));
1593 emit_cmp_and_jump_insns (cmp, old, NE, const0_rtx, SImode, true, csloop);
1594
1595 if (target)
1596 {
1597 tmp = (after ? new_rtx : cmp);
1598 convert_move (target,
1599 (ac.shift == NULL_RTX ? tmp
1600 : expand_simple_binop (SImode, LSHIFTRT, tmp, ac.shift,
1601 NULL_RTX, 1, OPTAB_DIRECT)),
1602 1);
1603 }
1604 }
1605
1606
1607 void
1608 xtensa_setup_frame_addresses (void)
1609 {
1610 /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */
1611 cfun->machine->accesses_prev_frame = 1;
1612
1613 emit_library_call
1614 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1615 LCT_NORMAL, VOIDmode, 0);
1616 }
1617
1618
1619 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1620 a comment showing where the end of the loop is. However, if there is a
1621 label or a branch at the end of the loop then we need to place a nop
1622 there. If the loop ends with a label we need the nop so that branches
1623 targeting that label will target the nop (and thus remain in the loop),
1624 instead of targeting the instruction after the loop (and thus exiting
1625 the loop). If the loop ends with a branch, we need the nop in case the
1626 branch is targeting a location inside the loop. When the branch
1627 executes it will cause the loop count to be decremented even if it is
1628 taken (because it is the last instruction in the loop), so we need to
1629 nop after the branch to prevent the loop count from being decremented
1630 when the branch is taken. */
1631
1632 void
1633 xtensa_emit_loop_end (rtx insn, rtx *operands)
1634 {
1635 char done = 0;
1636
1637 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1638 {
1639 switch (GET_CODE (insn))
1640 {
1641 case NOTE:
1642 case BARRIER:
1643 break;
1644
1645 case CODE_LABEL:
1646 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1647 done = 1;
1648 break;
1649
1650 default:
1651 {
1652 rtx body = PATTERN (insn);
1653
1654 if (GET_CODE (body) == JUMP_INSN)
1655 {
1656 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1657 done = 1;
1658 }
1659 else if ((GET_CODE (body) != USE)
1660 && (GET_CODE (body) != CLOBBER))
1661 done = 1;
1662 }
1663 break;
1664 }
1665 }
1666
1667 output_asm_insn ("# loop end for %0", operands);
1668 }
1669
1670
1671 char *
1672 xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1673 {
1674 static char result[64];
1675 enum rtx_code code;
1676 const char *op;
1677
1678 code = GET_CODE (operands[3]);
1679 switch (code)
1680 {
1681 case EQ: op = inverted ? "ne" : "eq"; break;
1682 case NE: op = inverted ? "eq" : "ne"; break;
1683 case LT: op = inverted ? "ge" : "lt"; break;
1684 case GE: op = inverted ? "lt" : "ge"; break;
1685 case LTU: op = inverted ? "geu" : "ltu"; break;
1686 case GEU: op = inverted ? "ltu" : "geu"; break;
1687 default: gcc_unreachable ();
1688 }
1689
1690 if (immed)
1691 {
1692 if (INTVAL (operands[1]) == 0)
1693 sprintf (result, "b%sz%s\t%%0, %%2", op,
1694 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1695 else
1696 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1697 }
1698 else
1699 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1700
1701 return result;
1702 }
1703
1704
1705 char *
1706 xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1707 {
1708 static char result[64];
1709 const char *op;
1710
1711 switch (GET_CODE (operands[3]))
1712 {
1713 case EQ: op = inverted ? "bs" : "bc"; break;
1714 case NE: op = inverted ? "bc" : "bs"; break;
1715 default: gcc_unreachable ();
1716 }
1717
1718 if (immed)
1719 {
1720 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1721 operands[1] = GEN_INT (bitnum);
1722 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1723 }
1724 else
1725 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1726
1727 return result;
1728 }
1729
1730
1731 char *
1732 xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1733 {
1734 static char result[64];
1735 enum rtx_code code;
1736 const char *op;
1737
1738 code = GET_CODE (operands[4]);
1739 if (isbool)
1740 {
1741 switch (code)
1742 {
1743 case EQ: op = inverted ? "t" : "f"; break;
1744 case NE: op = inverted ? "f" : "t"; break;
1745 default: gcc_unreachable ();
1746 }
1747 }
1748 else
1749 {
1750 switch (code)
1751 {
1752 case EQ: op = inverted ? "nez" : "eqz"; break;
1753 case NE: op = inverted ? "eqz" : "nez"; break;
1754 case LT: op = inverted ? "gez" : "ltz"; break;
1755 case GE: op = inverted ? "ltz" : "gez"; break;
1756 default: gcc_unreachable ();
1757 }
1758 }
1759
1760 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1761 op, isfp ? ".s" : "", inverted ? 3 : 2);
1762 return result;
1763 }
1764
1765
1766 char *
1767 xtensa_emit_call (int callop, rtx *operands)
1768 {
1769 static char result[64];
1770 rtx tgt = operands[callop];
1771
1772 if (GET_CODE (tgt) == CONST_INT)
1773 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1774 else if (register_operand (tgt, VOIDmode))
1775 sprintf (result, "callx8\t%%%d", callop);
1776 else
1777 sprintf (result, "call8\t%%%d", callop);
1778
1779 return result;
1780 }
1781
1782
1783 bool
1784 xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1785 {
1786 /* Allow constant pool addresses. */
1787 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1788 && ! TARGET_CONST16 && constantpool_address_p (addr)
1789 && ! xtensa_tls_referenced_p (addr))
1790 return true;
1791
1792 while (GET_CODE (addr) == SUBREG)
1793 addr = SUBREG_REG (addr);
1794
1795 /* Allow base registers. */
1796 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1797 return true;
1798
1799 /* Check for "register + offset" addressing. */
1800 if (GET_CODE (addr) == PLUS)
1801 {
1802 rtx xplus0 = XEXP (addr, 0);
1803 rtx xplus1 = XEXP (addr, 1);
1804 enum rtx_code code0;
1805 enum rtx_code code1;
1806
1807 while (GET_CODE (xplus0) == SUBREG)
1808 xplus0 = SUBREG_REG (xplus0);
1809 code0 = GET_CODE (xplus0);
1810
1811 while (GET_CODE (xplus1) == SUBREG)
1812 xplus1 = SUBREG_REG (xplus1);
1813 code1 = GET_CODE (xplus1);
1814
1815 /* Swap operands if necessary so the register is first. */
1816 if (code0 != REG && code1 == REG)
1817 {
1818 xplus0 = XEXP (addr, 1);
1819 xplus1 = XEXP (addr, 0);
1820 code0 = GET_CODE (xplus0);
1821 code1 = GET_CODE (xplus1);
1822 }
1823
1824 if (code0 == REG && BASE_REG_P (xplus0, strict)
1825 && code1 == CONST_INT
1826 && xtensa_mem_offset (INTVAL (xplus1), mode))
1827 return true;
1828 }
1829
1830 return false;
1831 }
1832
1833
1834 /* Construct the SYMBOL_REF for the _TLS_MODULE_BASE_ symbol. */
1835
1836 static GTY(()) rtx xtensa_tls_module_base_symbol;
1837
1838 static rtx
1839 xtensa_tls_module_base (void)
1840 {
1841 if (! xtensa_tls_module_base_symbol)
1842 {
1843 xtensa_tls_module_base_symbol =
1844 gen_rtx_SYMBOL_REF (Pmode, "_TLS_MODULE_BASE_");
1845 SYMBOL_REF_FLAGS (xtensa_tls_module_base_symbol)
1846 |= TLS_MODEL_GLOBAL_DYNAMIC << SYMBOL_FLAG_TLS_SHIFT;
1847 }
1848
1849 return xtensa_tls_module_base_symbol;
1850 }
1851
1852
1853 static rtx
1854 xtensa_call_tls_desc (rtx sym, rtx *retp)
1855 {
1856 rtx fn, arg, a10, call_insn, insns;
1857
1858 start_sequence ();
1859 fn = gen_reg_rtx (Pmode);
1860 arg = gen_reg_rtx (Pmode);
1861 a10 = gen_rtx_REG (Pmode, 10);
1862
1863 emit_insn (gen_tls_func (fn, sym));
1864 emit_insn (gen_tls_arg (arg, sym));
1865 emit_move_insn (a10, arg);
1866 call_insn = emit_call_insn (gen_tls_call (a10, fn, sym, const1_rtx));
1867 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), a10);
1868 insns = get_insns ();
1869 end_sequence ();
1870
1871 *retp = a10;
1872 return insns;
1873 }
1874
1875
1876 static rtx
1877 xtensa_legitimize_tls_address (rtx x)
1878 {
1879 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
1880 rtx dest, tp, ret, modbase, base, addend, insns;
1881
1882 dest = gen_reg_rtx (Pmode);
1883 switch (model)
1884 {
1885 case TLS_MODEL_GLOBAL_DYNAMIC:
1886 insns = xtensa_call_tls_desc (x, &ret);
1887 emit_libcall_block (insns, dest, ret, x);
1888 break;
1889
1890 case TLS_MODEL_LOCAL_DYNAMIC:
1891 base = gen_reg_rtx (Pmode);
1892 modbase = xtensa_tls_module_base ();
1893 insns = xtensa_call_tls_desc (modbase, &ret);
1894 emit_libcall_block (insns, base, ret, modbase);
1895 addend = force_reg (SImode, gen_sym_DTPOFF (x));
1896 emit_insn (gen_addsi3 (dest, base, addend));
1897 break;
1898
1899 case TLS_MODEL_INITIAL_EXEC:
1900 case TLS_MODEL_LOCAL_EXEC:
1901 tp = gen_reg_rtx (SImode);
1902 emit_insn (gen_load_tp (tp));
1903 addend = force_reg (SImode, gen_sym_TPOFF (x));
1904 emit_insn (gen_addsi3 (dest, tp, addend));
1905 break;
1906
1907 default:
1908 gcc_unreachable ();
1909 }
1910
1911 return dest;
1912 }
1913
1914
1915 rtx
1916 xtensa_legitimize_address (rtx x,
1917 rtx oldx ATTRIBUTE_UNUSED,
1918 enum machine_mode mode)
1919 {
1920 if (xtensa_tls_symbol_p (x))
1921 return xtensa_legitimize_tls_address (x);
1922
1923 if (GET_CODE (x) == PLUS)
1924 {
1925 rtx plus0 = XEXP (x, 0);
1926 rtx plus1 = XEXP (x, 1);
1927
1928 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1929 {
1930 plus0 = XEXP (x, 1);
1931 plus1 = XEXP (x, 0);
1932 }
1933
1934 /* Try to split up the offset to use an ADDMI instruction. */
1935 if (GET_CODE (plus0) == REG
1936 && GET_CODE (plus1) == CONST_INT
1937 && !xtensa_mem_offset (INTVAL (plus1), mode)
1938 && !xtensa_simm8 (INTVAL (plus1))
1939 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1940 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1941 {
1942 rtx temp = gen_reg_rtx (Pmode);
1943 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1944 emit_insn (gen_rtx_SET (Pmode, temp,
1945 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1946 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1947 }
1948 }
1949
1950 return x;
1951 }
1952
1953 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
1954
1955 Treat constant-pool references as "mode dependent" since they can
1956 only be accessed with SImode loads. This works around a bug in the
1957 combiner where a constant pool reference is temporarily converted
1958 to an HImode load, which is then assumed to zero-extend based on
1959 our definition of LOAD_EXTEND_OP. This is wrong because the high
1960 bits of a 16-bit value in the constant pool are now sign-extended
1961 by default. */
1962
1963 static bool
1964 xtensa_mode_dependent_address_p (const_rtx addr)
1965 {
1966 return constantpool_address_p (addr);
1967 }
1968
1969 /* Helper for xtensa_tls_referenced_p. */
1970
1971 static int
1972 xtensa_tls_referenced_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1973 {
1974 if (GET_CODE (*x) == SYMBOL_REF)
1975 return SYMBOL_REF_TLS_MODEL (*x) != 0;
1976
1977 /* Ignore TLS references that have already been legitimized. */
1978 if (GET_CODE (*x) == UNSPEC)
1979 {
1980 switch (XINT (*x, 1))
1981 {
1982 case UNSPEC_TPOFF:
1983 case UNSPEC_DTPOFF:
1984 case UNSPEC_TLS_FUNC:
1985 case UNSPEC_TLS_ARG:
1986 case UNSPEC_TLS_CALL:
1987 return -1;
1988 default:
1989 break;
1990 }
1991 }
1992
1993 return 0;
1994 }
1995
1996
1997 /* Return TRUE if X contains any TLS symbol references. */
1998
1999 bool
2000 xtensa_tls_referenced_p (rtx x)
2001 {
2002 if (! TARGET_HAVE_TLS)
2003 return false;
2004
2005 return for_each_rtx (&x, xtensa_tls_referenced_p_1, NULL);
2006 }
2007
2008
2009 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2010
2011 static bool
2012 xtensa_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2013 {
2014 return xtensa_tls_referenced_p (x);
2015 }
2016
2017
2018 /* Return the debugger register number to use for 'regno'. */
2019
2020 int
2021 xtensa_dbx_register_number (int regno)
2022 {
2023 int first = -1;
2024
2025 if (GP_REG_P (regno))
2026 {
2027 regno -= GP_REG_FIRST;
2028 first = 0;
2029 }
2030 else if (BR_REG_P (regno))
2031 {
2032 regno -= BR_REG_FIRST;
2033 first = 16;
2034 }
2035 else if (FP_REG_P (regno))
2036 {
2037 regno -= FP_REG_FIRST;
2038 first = 48;
2039 }
2040 else if (ACC_REG_P (regno))
2041 {
2042 first = 0x200; /* Start of Xtensa special registers. */
2043 regno = 16; /* ACCLO is special register 16. */
2044 }
2045
2046 /* When optimizing, we sometimes get asked about pseudo-registers
2047 that don't represent hard registers. Return 0 for these. */
2048 if (first == -1)
2049 return 0;
2050
2051 return first + regno;
2052 }
2053
2054
2055 /* Argument support functions. */
2056
2057 /* Initialize CUMULATIVE_ARGS for a function. */
2058
2059 void
2060 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
2061 {
2062 cum->arg_words = 0;
2063 cum->incoming = incoming;
2064 }
2065
2066
2067 /* Advance the argument to the next argument position. */
2068
2069 static void
2070 xtensa_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
2071 const_tree type, bool named ATTRIBUTE_UNUSED)
2072 {
2073 int words, max;
2074 int *arg_words;
2075
2076 arg_words = &get_cumulative_args (cum)->arg_words;
2077 max = MAX_ARGS_IN_REGISTERS;
2078
2079 words = (((mode != BLKmode)
2080 ? (int) GET_MODE_SIZE (mode)
2081 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2082
2083 if (*arg_words < max
2084 && (targetm.calls.must_pass_in_stack (mode, type)
2085 || *arg_words + words > max))
2086 *arg_words = max;
2087
2088 *arg_words += words;
2089 }
2090
2091
2092 /* Return an RTL expression containing the register for the given mode,
2093 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
2094 if this is an incoming argument to the current function. */
2095
2096 static rtx
2097 xtensa_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
2098 const_tree type, bool incoming_p)
2099 {
2100 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2101 int regbase, words, max;
2102 int *arg_words;
2103 int regno;
2104
2105 arg_words = &cum->arg_words;
2106 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
2107 max = MAX_ARGS_IN_REGISTERS;
2108
2109 words = (((mode != BLKmode)
2110 ? (int) GET_MODE_SIZE (mode)
2111 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2112
2113 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
2114 {
2115 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
2116 *arg_words = (*arg_words + align - 1) & -align;
2117 }
2118
2119 if (*arg_words + words > max)
2120 return (rtx)0;
2121
2122 regno = regbase + *arg_words;
2123
2124 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
2125 cfun->machine->need_a7_copy = true;
2126
2127 return gen_rtx_REG (mode, regno);
2128 }
2129
2130 /* Implement TARGET_FUNCTION_ARG. */
2131
2132 static rtx
2133 xtensa_function_arg (cumulative_args_t cum, enum machine_mode mode,
2134 const_tree type, bool named ATTRIBUTE_UNUSED)
2135 {
2136 return xtensa_function_arg_1 (cum, mode, type, false);
2137 }
2138
2139 /* Implement TARGET_FUNCTION_INCOMING_ARG. */
2140
2141 static rtx
2142 xtensa_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
2143 const_tree type, bool named ATTRIBUTE_UNUSED)
2144 {
2145 return xtensa_function_arg_1 (cum, mode, type, true);
2146 }
2147
2148 static unsigned int
2149 xtensa_function_arg_boundary (enum machine_mode mode, const_tree type)
2150 {
2151 unsigned int alignment;
2152
2153 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2154 if (alignment < PARM_BOUNDARY)
2155 alignment = PARM_BOUNDARY;
2156 if (alignment > STACK_BOUNDARY)
2157 alignment = STACK_BOUNDARY;
2158 return alignment;
2159 }
2160
2161
2162 static bool
2163 xtensa_return_in_msb (const_tree valtype)
2164 {
2165 return (TARGET_BIG_ENDIAN
2166 && AGGREGATE_TYPE_P (valtype)
2167 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
2168 }
2169
2170
2171 static void
2172 xtensa_option_override (void)
2173 {
2174 int regno;
2175 enum machine_mode mode;
2176
2177 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2178 error ("boolean registers required for the floating-point option");
2179
2180 /* Set up array giving whether a given register can hold a given mode. */
2181 for (mode = VOIDmode;
2182 mode != MAX_MACHINE_MODE;
2183 mode = (enum machine_mode) ((int) mode + 1))
2184 {
2185 int size = GET_MODE_SIZE (mode);
2186 enum mode_class mclass = GET_MODE_CLASS (mode);
2187
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 {
2190 int temp;
2191
2192 if (ACC_REG_P (regno))
2193 temp = (TARGET_MAC16
2194 && (mclass == MODE_INT) && (size <= UNITS_PER_WORD));
2195 else if (GP_REG_P (regno))
2196 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
2197 else if (FP_REG_P (regno))
2198 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
2199 else if (BR_REG_P (regno))
2200 temp = (TARGET_BOOLEANS && (mode == CCmode));
2201 else
2202 temp = FALSE;
2203
2204 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
2205 }
2206 }
2207
2208 init_machine_status = xtensa_init_machine_status;
2209
2210 /* Check PIC settings. PIC is only supported when using L32R
2211 instructions, and some targets need to always use PIC. */
2212 if (flag_pic && TARGET_CONST16)
2213 error ("-f%s is not supported with CONST16 instructions",
2214 (flag_pic > 1 ? "PIC" : "pic"));
2215 else if (TARGET_FORCE_NO_PIC)
2216 flag_pic = 0;
2217 else if (XTENSA_ALWAYS_PIC)
2218 {
2219 if (TARGET_CONST16)
2220 error ("PIC is required but not supported with CONST16 instructions");
2221 flag_pic = 1;
2222 }
2223 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
2224 if (flag_pic > 1)
2225 flag_pic = 1;
2226 if (flag_pic && !flag_pie)
2227 flag_shlib = 1;
2228
2229 /* Hot/cold partitioning does not work on this architecture, because of
2230 constant pools (the load instruction cannot necessarily reach that far).
2231 Therefore disable it on this architecture. */
2232 if (flag_reorder_blocks_and_partition)
2233 {
2234 flag_reorder_blocks_and_partition = 0;
2235 flag_reorder_blocks = 1;
2236 }
2237 }
2238
2239 /* A C compound statement to output to stdio stream STREAM the
2240 assembler syntax for an instruction operand X. X is an RTL
2241 expression.
2242
2243 CODE is a value that can be used to specify one of several ways
2244 of printing the operand. It is used when identical operands
2245 must be printed differently depending on the context. CODE
2246 comes from the '%' specification that was used to request
2247 printing of the operand. If the specification was just '%DIGIT'
2248 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
2249 is the ASCII code for LTR.
2250
2251 If X is a register, this macro should print the register's name.
2252 The names can be found in an array 'reg_names' whose type is
2253 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
2254
2255 When the machine description has a specification '%PUNCT' (a '%'
2256 followed by a punctuation character), this macro is called with
2257 a null pointer for X and the punctuation character for CODE.
2258
2259 'a', 'c', 'l', and 'n' are reserved.
2260
2261 The Xtensa specific codes are:
2262
2263 'd' CONST_INT, print as signed decimal
2264 'x' CONST_INT, print as signed hexadecimal
2265 'K' CONST_INT, print number of bits in mask for EXTUI
2266 'R' CONST_INT, print (X & 0x1f)
2267 'L' CONST_INT, print ((32 - X) & 0x1f)
2268 'D' REG, print second register of double-word register operand
2269 'N' MEM, print address of next word following a memory operand
2270 'v' MEM, if memory reference is volatile, output a MEMW before it
2271 't' any constant, add "@h" suffix for top 16 bits
2272 'b' any constant, add "@l" suffix for bottom 16 bits
2273 */
2274
2275 static void
2276 printx (FILE *file, signed int val)
2277 {
2278 /* Print a hexadecimal value in a nice way. */
2279 if ((val > -0xa) && (val < 0xa))
2280 fprintf (file, "%d", val);
2281 else if (val < 0)
2282 fprintf (file, "-0x%x", -val);
2283 else
2284 fprintf (file, "0x%x", val);
2285 }
2286
2287
2288 void
2289 print_operand (FILE *file, rtx x, int letter)
2290 {
2291 if (!x)
2292 error ("PRINT_OPERAND null pointer");
2293
2294 switch (letter)
2295 {
2296 case 'D':
2297 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2298 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
2299 else
2300 output_operand_lossage ("invalid %%D value");
2301 break;
2302
2303 case 'v':
2304 if (GET_CODE (x) == MEM)
2305 {
2306 /* For a volatile memory reference, emit a MEMW before the
2307 load or store. */
2308 if (MEM_VOLATILE_P (x) && TARGET_SERIALIZE_VOLATILE)
2309 fprintf (file, "memw\n\t");
2310 }
2311 else
2312 output_operand_lossage ("invalid %%v value");
2313 break;
2314
2315 case 'N':
2316 if (GET_CODE (x) == MEM
2317 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2318 {
2319 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
2320 output_address (XEXP (x, 0));
2321 }
2322 else
2323 output_operand_lossage ("invalid %%N value");
2324 break;
2325
2326 case 'K':
2327 if (GET_CODE (x) == CONST_INT)
2328 {
2329 int num_bits = 0;
2330 unsigned val = INTVAL (x);
2331 while (val & 1)
2332 {
2333 num_bits += 1;
2334 val = val >> 1;
2335 }
2336 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2337 fatal_insn ("invalid mask", x);
2338
2339 fprintf (file, "%d", num_bits);
2340 }
2341 else
2342 output_operand_lossage ("invalid %%K value");
2343 break;
2344
2345 case 'L':
2346 if (GET_CODE (x) == CONST_INT)
2347 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2348 else
2349 output_operand_lossage ("invalid %%L value");
2350 break;
2351
2352 case 'R':
2353 if (GET_CODE (x) == CONST_INT)
2354 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2355 else
2356 output_operand_lossage ("invalid %%R value");
2357 break;
2358
2359 case 'x':
2360 if (GET_CODE (x) == CONST_INT)
2361 printx (file, INTVAL (x));
2362 else
2363 output_operand_lossage ("invalid %%x value");
2364 break;
2365
2366 case 'd':
2367 if (GET_CODE (x) == CONST_INT)
2368 fprintf (file, "%ld", INTVAL (x));
2369 else
2370 output_operand_lossage ("invalid %%d value");
2371 break;
2372
2373 case 't':
2374 case 'b':
2375 if (GET_CODE (x) == CONST_INT)
2376 {
2377 printx (file, INTVAL (x));
2378 fputs (letter == 't' ? "@h" : "@l", file);
2379 }
2380 else if (GET_CODE (x) == CONST_DOUBLE)
2381 {
2382 REAL_VALUE_TYPE r;
2383 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2384 if (GET_MODE (x) == SFmode)
2385 {
2386 long l;
2387 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2388 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2389 }
2390 else
2391 output_operand_lossage ("invalid %%t/%%b value");
2392 }
2393 else if (GET_CODE (x) == CONST)
2394 {
2395 /* X must be a symbolic constant on ELF. Write an expression
2396 suitable for 'const16' that sets the high or low 16 bits. */
2397 if (GET_CODE (XEXP (x, 0)) != PLUS
2398 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2399 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2400 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2401 output_operand_lossage ("invalid %%t/%%b value");
2402 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2403 fputs (letter == 't' ? "@h" : "@l", file);
2404 /* There must be a non-alphanumeric character between 'h' or 'l'
2405 and the number. The '-' is added by print_operand() already. */
2406 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2407 fputs ("+", file);
2408 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2409 }
2410 else
2411 {
2412 output_addr_const (file, x);
2413 fputs (letter == 't' ? "@h" : "@l", file);
2414 }
2415 break;
2416
2417 default:
2418 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2419 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2420 else if (GET_CODE (x) == MEM)
2421 output_address (XEXP (x, 0));
2422 else if (GET_CODE (x) == CONST_INT)
2423 fprintf (file, "%ld", INTVAL (x));
2424 else
2425 output_addr_const (file, x);
2426 }
2427 }
2428
2429
2430 /* A C compound statement to output to stdio stream STREAM the
2431 assembler syntax for an instruction operand that is a memory
2432 reference whose address is ADDR. ADDR is an RTL expression. */
2433
2434 void
2435 print_operand_address (FILE *file, rtx addr)
2436 {
2437 if (!addr)
2438 error ("PRINT_OPERAND_ADDRESS, null pointer");
2439
2440 switch (GET_CODE (addr))
2441 {
2442 default:
2443 fatal_insn ("invalid address", addr);
2444 break;
2445
2446 case REG:
2447 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2448 break;
2449
2450 case PLUS:
2451 {
2452 rtx reg = (rtx)0;
2453 rtx offset = (rtx)0;
2454 rtx arg0 = XEXP (addr, 0);
2455 rtx arg1 = XEXP (addr, 1);
2456
2457 if (GET_CODE (arg0) == REG)
2458 {
2459 reg = arg0;
2460 offset = arg1;
2461 }
2462 else if (GET_CODE (arg1) == REG)
2463 {
2464 reg = arg1;
2465 offset = arg0;
2466 }
2467 else
2468 fatal_insn ("no register in address", addr);
2469
2470 if (CONSTANT_P (offset))
2471 {
2472 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2473 output_addr_const (file, offset);
2474 }
2475 else
2476 fatal_insn ("address offset not a constant", addr);
2477 }
2478 break;
2479
2480 case LABEL_REF:
2481 case SYMBOL_REF:
2482 case CONST_INT:
2483 case CONST:
2484 output_addr_const (file, addr);
2485 break;
2486 }
2487 }
2488
2489 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2490
2491 static bool
2492 xtensa_output_addr_const_extra (FILE *fp, rtx x)
2493 {
2494 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
2495 {
2496 switch (XINT (x, 1))
2497 {
2498 case UNSPEC_TPOFF:
2499 output_addr_const (fp, XVECEXP (x, 0, 0));
2500 fputs ("@TPOFF", fp);
2501 return true;
2502 case UNSPEC_DTPOFF:
2503 output_addr_const (fp, XVECEXP (x, 0, 0));
2504 fputs ("@DTPOFF", fp);
2505 return true;
2506 case UNSPEC_PLT:
2507 if (flag_pic)
2508 {
2509 output_addr_const (fp, XVECEXP (x, 0, 0));
2510 fputs ("@PLT", fp);
2511 return true;
2512 }
2513 break;
2514 default:
2515 break;
2516 }
2517 }
2518 return false;
2519 }
2520
2521
2522 void
2523 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2524 {
2525 long value_long[2];
2526 REAL_VALUE_TYPE r;
2527 int size;
2528 rtx first, second;
2529
2530 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2531
2532 switch (GET_MODE_CLASS (mode))
2533 {
2534 case MODE_FLOAT:
2535 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
2536
2537 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2538 switch (mode)
2539 {
2540 case SFmode:
2541 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2542 if (HOST_BITS_PER_LONG > 32)
2543 value_long[0] &= 0xffffffff;
2544 fprintf (file, "0x%08lx\n", value_long[0]);
2545 break;
2546
2547 case DFmode:
2548 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2549 if (HOST_BITS_PER_LONG > 32)
2550 {
2551 value_long[0] &= 0xffffffff;
2552 value_long[1] &= 0xffffffff;
2553 }
2554 fprintf (file, "0x%08lx, 0x%08lx\n",
2555 value_long[0], value_long[1]);
2556 break;
2557
2558 default:
2559 gcc_unreachable ();
2560 }
2561
2562 break;
2563
2564 case MODE_INT:
2565 case MODE_PARTIAL_INT:
2566 size = GET_MODE_SIZE (mode);
2567 switch (size)
2568 {
2569 case 4:
2570 output_addr_const (file, x);
2571 fputs ("\n", file);
2572 break;
2573
2574 case 8:
2575 split_double (x, &first, &second);
2576 output_addr_const (file, first);
2577 fputs (", ", file);
2578 output_addr_const (file, second);
2579 fputs ("\n", file);
2580 break;
2581
2582 default:
2583 gcc_unreachable ();
2584 }
2585 break;
2586
2587 default:
2588 gcc_unreachable ();
2589 }
2590 }
2591
2592
2593 /* Return the bytes needed to compute the frame pointer from the current
2594 stack pointer. */
2595
2596 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2597 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2598
2599 long
2600 compute_frame_size (int size)
2601 {
2602 /* Add space for the incoming static chain value. */
2603 if (cfun->static_chain_decl != NULL)
2604 size += (1 * UNITS_PER_WORD);
2605
2606 xtensa_current_frame_size =
2607 XTENSA_STACK_ALIGN (size
2608 + crtl->outgoing_args_size
2609 + (WINDOW_SIZE * UNITS_PER_WORD));
2610 return xtensa_current_frame_size;
2611 }
2612
2613
2614 bool
2615 xtensa_frame_pointer_required (void)
2616 {
2617 /* The code to expand builtin_frame_addr and builtin_return_addr
2618 currently uses the hard_frame_pointer instead of frame_pointer.
2619 This seems wrong but maybe it's necessary for other architectures.
2620 This function is derived from the i386 code. */
2621
2622 if (cfun->machine->accesses_prev_frame)
2623 return true;
2624
2625 return false;
2626 }
2627
2628
2629 /* minimum frame = reg save area (4 words) plus static chain (1 word)
2630 and the total number of words must be a multiple of 128 bits. */
2631 #define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2632
2633 void
2634 xtensa_expand_prologue (void)
2635 {
2636 HOST_WIDE_INT total_size;
2637 rtx size_rtx;
2638 rtx insn, note_rtx;
2639
2640 total_size = compute_frame_size (get_frame_size ());
2641 size_rtx = GEN_INT (total_size);
2642
2643 if (total_size < (1 << (12+3)))
2644 insn = emit_insn (gen_entry (size_rtx));
2645 else
2646 {
2647 /* Use a8 as a temporary since a0-a7 may be live. */
2648 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2649 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
2650 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2651 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2652 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
2653 }
2654
2655 if (frame_pointer_needed)
2656 {
2657 if (cfun->machine->set_frame_ptr_insn)
2658 {
2659 rtx first;
2660
2661 push_topmost_sequence ();
2662 first = get_insns ();
2663 pop_topmost_sequence ();
2664
2665 /* For all instructions prior to set_frame_ptr_insn, replace
2666 hard_frame_pointer references with stack_pointer. */
2667 for (insn = first;
2668 insn != cfun->machine->set_frame_ptr_insn;
2669 insn = NEXT_INSN (insn))
2670 {
2671 if (INSN_P (insn))
2672 {
2673 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2674 hard_frame_pointer_rtx,
2675 stack_pointer_rtx);
2676 df_insn_rescan (insn);
2677 }
2678 }
2679 }
2680 else
2681 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2682 stack_pointer_rtx));
2683 }
2684
2685 /* Create a note to describe the CFA. Because this is only used to set
2686 DW_AT_frame_base for debug info, don't bother tracking changes through
2687 each instruction in the prologue. It just takes up space. */
2688 note_rtx = gen_rtx_SET (VOIDmode, (frame_pointer_needed
2689 ? hard_frame_pointer_rtx
2690 : stack_pointer_rtx),
2691 plus_constant (Pmode, stack_pointer_rtx,
2692 -total_size));
2693 RTX_FRAME_RELATED_P (insn) = 1;
2694 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2695 }
2696
2697
2698 /* Clear variables at function end. */
2699
2700 void
2701 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2702 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2703 {
2704 xtensa_current_frame_size = 0;
2705 }
2706
2707
2708 rtx
2709 xtensa_return_addr (int count, rtx frame)
2710 {
2711 rtx result, retaddr, curaddr, label;
2712
2713 if (count == -1)
2714 retaddr = gen_rtx_REG (Pmode, A0_REG);
2715 else
2716 {
2717 rtx addr = plus_constant (Pmode, frame, -4 * UNITS_PER_WORD);
2718 addr = memory_address (Pmode, addr);
2719 retaddr = gen_reg_rtx (Pmode);
2720 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2721 }
2722
2723 /* The 2 most-significant bits of the return address on Xtensa hold
2724 the register window size. To get the real return address, these
2725 bits must be replaced with the high bits from some address in the
2726 code. */
2727
2728 /* Get the 2 high bits of a local label in the code. */
2729 curaddr = gen_reg_rtx (Pmode);
2730 label = gen_label_rtx ();
2731 emit_label (label);
2732 LABEL_PRESERVE_P (label) = 1;
2733 emit_move_insn (curaddr, gen_rtx_LABEL_REF (Pmode, label));
2734 emit_insn (gen_lshrsi3 (curaddr, curaddr, GEN_INT (30)));
2735 emit_insn (gen_ashlsi3 (curaddr, curaddr, GEN_INT (30)));
2736
2737 /* Clear the 2 high bits of the return address. */
2738 result = gen_reg_rtx (Pmode);
2739 emit_insn (gen_ashlsi3 (result, retaddr, GEN_INT (2)));
2740 emit_insn (gen_lshrsi3 (result, result, GEN_INT (2)));
2741
2742 /* Combine them to get the result. */
2743 emit_insn (gen_iorsi3 (result, result, curaddr));
2744 return result;
2745 }
2746
2747 /* Disable the use of word-sized or smaller complex modes for structures,
2748 and for function arguments in particular, where they cause problems with
2749 register a7. The xtensa_copy_incoming_a7 function assumes that there is
2750 a single reference to an argument in a7, but with small complex modes the
2751 real and imaginary components may be extracted separately, leading to two
2752 uses of the register, only one of which would be replaced. */
2753
2754 static bool
2755 xtensa_member_type_forces_blk (const_tree, enum machine_mode mode)
2756 {
2757 return mode == CQImode || mode == CHImode;
2758 }
2759
2760 /* Create the va_list data type.
2761
2762 This structure is set up by __builtin_saveregs. The __va_reg field
2763 points to a stack-allocated region holding the contents of the
2764 incoming argument registers. The __va_ndx field is an index
2765 initialized to the position of the first unnamed (variable)
2766 argument. This same index is also used to address the arguments
2767 passed in memory. Thus, the __va_stk field is initialized to point
2768 to the position of the first argument in memory offset to account
2769 for the arguments passed in registers and to account for the size
2770 of the argument registers not being 16-byte aligned. E.G., there
2771 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2772 for the first stack argument to have the maximal alignment of 16
2773 bytes, so we offset the __va_stk address by 32 bytes so that
2774 __va_stk[32] references the first argument on the stack. */
2775
2776 static tree
2777 xtensa_build_builtin_va_list (void)
2778 {
2779 tree f_stk, f_reg, f_ndx, record, type_decl;
2780
2781 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2782 type_decl = build_decl (BUILTINS_LOCATION,
2783 TYPE_DECL, get_identifier ("__va_list_tag"), record);
2784
2785 f_stk = build_decl (BUILTINS_LOCATION,
2786 FIELD_DECL, get_identifier ("__va_stk"),
2787 ptr_type_node);
2788 f_reg = build_decl (BUILTINS_LOCATION,
2789 FIELD_DECL, get_identifier ("__va_reg"),
2790 ptr_type_node);
2791 f_ndx = build_decl (BUILTINS_LOCATION,
2792 FIELD_DECL, get_identifier ("__va_ndx"),
2793 integer_type_node);
2794
2795 DECL_FIELD_CONTEXT (f_stk) = record;
2796 DECL_FIELD_CONTEXT (f_reg) = record;
2797 DECL_FIELD_CONTEXT (f_ndx) = record;
2798
2799 TYPE_STUB_DECL (record) = type_decl;
2800 TYPE_NAME (record) = type_decl;
2801 TYPE_FIELDS (record) = f_stk;
2802 DECL_CHAIN (f_stk) = f_reg;
2803 DECL_CHAIN (f_reg) = f_ndx;
2804
2805 layout_type (record);
2806 return record;
2807 }
2808
2809
2810 /* Save the incoming argument registers on the stack. Returns the
2811 address of the saved registers. */
2812
2813 static rtx
2814 xtensa_builtin_saveregs (void)
2815 {
2816 rtx gp_regs;
2817 int arg_words = crtl->args.info.arg_words;
2818 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2819
2820 if (gp_left <= 0)
2821 return const0_rtx;
2822
2823 /* Allocate the general-purpose register space. */
2824 gp_regs = assign_stack_local
2825 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2826 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2827
2828 /* Now store the incoming registers. */
2829 cfun->machine->need_a7_copy = true;
2830 cfun->machine->vararg_a7 = true;
2831 move_block_from_reg (GP_ARG_FIRST + arg_words,
2832 adjust_address (gp_regs, BLKmode,
2833 arg_words * UNITS_PER_WORD),
2834 gp_left);
2835 gcc_assert (cfun->machine->vararg_a7_copy != 0);
2836 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
2837
2838 return XEXP (gp_regs, 0);
2839 }
2840
2841
2842 /* Implement `va_start' for varargs and stdarg. We look at the
2843 current function to fill in an initial va_list. */
2844
2845 static void
2846 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2847 {
2848 tree f_stk, stk;
2849 tree f_reg, reg;
2850 tree f_ndx, ndx;
2851 tree t, u;
2852 int arg_words;
2853
2854 arg_words = crtl->args.info.arg_words;
2855
2856 f_stk = TYPE_FIELDS (va_list_type_node);
2857 f_reg = DECL_CHAIN (f_stk);
2858 f_ndx = DECL_CHAIN (f_reg);
2859
2860 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2861 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2862 f_reg, NULL_TREE);
2863 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2864 f_ndx, NULL_TREE);
2865
2866 /* Call __builtin_saveregs; save the result in __va_reg */
2867 u = make_tree (sizetype, expand_builtin_saveregs ());
2868 u = fold_convert (ptr_type_node, u);
2869 t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
2870 TREE_SIDE_EFFECTS (t) = 1;
2871 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2872
2873 /* Set the __va_stk member to ($arg_ptr - 32). */
2874 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2875 u = fold_build_pointer_plus_hwi (u, -32);
2876 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
2877 TREE_SIDE_EFFECTS (t) = 1;
2878 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2879
2880 /* Set the __va_ndx member. If the first variable argument is on
2881 the stack, adjust __va_ndx by 2 words to account for the extra
2882 alignment offset for __va_stk. */
2883 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2884 arg_words += 2;
2885 t = build2 (MODIFY_EXPR, integer_type_node, ndx,
2886 build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
2887 TREE_SIDE_EFFECTS (t) = 1;
2888 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2889 }
2890
2891
2892 /* Implement `va_arg'. */
2893
2894 static tree
2895 xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2896 gimple_seq *post_p ATTRIBUTE_UNUSED)
2897 {
2898 tree f_stk, stk;
2899 tree f_reg, reg;
2900 tree f_ndx, ndx;
2901 tree type_size, array, orig_ndx, addr, size, va_size, t;
2902 tree lab_false, lab_over, lab_false2;
2903 bool indirect;
2904
2905 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2906 if (indirect)
2907 type = build_pointer_type (type);
2908
2909 /* Handle complex values as separate real and imaginary parts. */
2910 if (TREE_CODE (type) == COMPLEX_TYPE)
2911 {
2912 tree real_part, imag_part;
2913
2914 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2915 pre_p, NULL);
2916 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2917
2918 imag_part = xtensa_gimplify_va_arg_expr (unshare_expr (valist),
2919 TREE_TYPE (type),
2920 pre_p, NULL);
2921 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2922
2923 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2924 }
2925
2926 f_stk = TYPE_FIELDS (va_list_type_node);
2927 f_reg = DECL_CHAIN (f_stk);
2928 f_ndx = DECL_CHAIN (f_reg);
2929
2930 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist,
2931 f_stk, NULL_TREE);
2932 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), unshare_expr (valist),
2933 f_reg, NULL_TREE);
2934 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), unshare_expr (valist),
2935 f_ndx, NULL_TREE);
2936
2937 type_size = size_in_bytes (type);
2938 va_size = round_up (type_size, UNITS_PER_WORD);
2939 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
2940
2941
2942 /* First align __va_ndx if necessary for this arg:
2943
2944 orig_ndx = (AP).__va_ndx;
2945 if (__alignof__ (TYPE) > 4 )
2946 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
2947 & -__alignof__ (TYPE)); */
2948
2949 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2950
2951 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2952 {
2953 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
2954
2955 t = build2 (PLUS_EXPR, integer_type_node, unshare_expr (orig_ndx),
2956 build_int_cst (integer_type_node, align - 1));
2957 t = build2 (BIT_AND_EXPR, integer_type_node, t,
2958 build_int_cst (integer_type_node, -align));
2959 gimplify_assign (unshare_expr (orig_ndx), t, pre_p);
2960 }
2961
2962
2963 /* Increment __va_ndx to point past the argument:
2964
2965 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
2966
2967 t = fold_convert (integer_type_node, va_size);
2968 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
2969 gimplify_assign (unshare_expr (ndx), t, pre_p);
2970
2971
2972 /* Check if the argument is in registers:
2973
2974 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2975 && !must_pass_in_stack (type))
2976 __array = (AP).__va_reg; */
2977
2978 array = create_tmp_var (ptr_type_node, NULL);
2979
2980 lab_over = NULL;
2981 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2982 {
2983 lab_false = create_artificial_label (UNKNOWN_LOCATION);
2984 lab_over = create_artificial_label (UNKNOWN_LOCATION);
2985
2986 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (ndx),
2987 build_int_cst (integer_type_node,
2988 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
2989 t = build3 (COND_EXPR, void_type_node, t,
2990 build1 (GOTO_EXPR, void_type_node, lab_false),
2991 NULL_TREE);
2992 gimplify_and_add (t, pre_p);
2993
2994 gimplify_assign (unshare_expr (array), reg, pre_p);
2995
2996 t = build1 (GOTO_EXPR, void_type_node, lab_over);
2997 gimplify_and_add (t, pre_p);
2998
2999 t = build1 (LABEL_EXPR, void_type_node, lab_false);
3000 gimplify_and_add (t, pre_p);
3001 }
3002
3003
3004 /* ...otherwise, the argument is on the stack (never split between
3005 registers and the stack -- change __va_ndx if necessary):
3006
3007 else
3008 {
3009 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
3010 (AP).__va_ndx = 32 + __va_size (TYPE);
3011 __array = (AP).__va_stk;
3012 } */
3013
3014 lab_false2 = create_artificial_label (UNKNOWN_LOCATION);
3015
3016 t = build2 (GT_EXPR, boolean_type_node, unshare_expr (orig_ndx),
3017 build_int_cst (integer_type_node,
3018 MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD));
3019 t = build3 (COND_EXPR, void_type_node, t,
3020 build1 (GOTO_EXPR, void_type_node, lab_false2),
3021 NULL_TREE);
3022 gimplify_and_add (t, pre_p);
3023
3024 t = size_binop (PLUS_EXPR, unshare_expr (va_size), size_int (32));
3025 t = fold_convert (integer_type_node, t);
3026 gimplify_assign (unshare_expr (ndx), t, pre_p);
3027
3028 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
3029 gimplify_and_add (t, pre_p);
3030
3031 gimplify_assign (array, stk, pre_p);
3032
3033 if (lab_over)
3034 {
3035 t = build1 (LABEL_EXPR, void_type_node, lab_over);
3036 gimplify_and_add (t, pre_p);
3037 }
3038
3039
3040 /* Given the base array pointer (__array) and index to the subsequent
3041 argument (__va_ndx), find the address:
3042
3043 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
3044 ? sizeof (TYPE)
3045 : __va_size (TYPE))
3046
3047 The results are endian-dependent because values smaller than one word
3048 are aligned differently. */
3049
3050
3051 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
3052 {
3053 t = fold_build2 (GE_EXPR, boolean_type_node, unshare_expr (type_size),
3054 size_int (PARM_BOUNDARY / BITS_PER_UNIT));
3055 t = fold_build3 (COND_EXPR, sizetype, t, unshare_expr (va_size),
3056 unshare_expr (type_size));
3057 size = t;
3058 }
3059 else
3060 size = unshare_expr (va_size);
3061
3062 t = fold_convert (sizetype, unshare_expr (ndx));
3063 t = build2 (MINUS_EXPR, sizetype, t, size);
3064 addr = fold_build_pointer_plus (unshare_expr (array), t);
3065
3066 addr = fold_convert (build_pointer_type (type), addr);
3067 if (indirect)
3068 addr = build_va_arg_indirect_ref (addr);
3069 return build_va_arg_indirect_ref (addr);
3070 }
3071
3072
3073 /* Builtins. */
3074
3075 enum xtensa_builtin
3076 {
3077 XTENSA_BUILTIN_UMULSIDI3,
3078 XTENSA_BUILTIN_THREAD_POINTER,
3079 XTENSA_BUILTIN_SET_THREAD_POINTER,
3080 XTENSA_BUILTIN_max
3081 };
3082
3083
3084 static void
3085 xtensa_init_builtins (void)
3086 {
3087 tree ftype, decl;
3088
3089 ftype = build_function_type_list (unsigned_intDI_type_node,
3090 unsigned_intSI_type_node,
3091 unsigned_intSI_type_node, NULL_TREE);
3092
3093 decl = add_builtin_function ("__builtin_umulsidi3", ftype,
3094 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
3095 "__umulsidi3", NULL_TREE);
3096 TREE_NOTHROW (decl) = 1;
3097 TREE_READONLY (decl) = 1;
3098
3099 if (TARGET_THREADPTR)
3100 {
3101 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
3102 decl = add_builtin_function ("__builtin_thread_pointer", ftype,
3103 XTENSA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
3104 NULL, NULL_TREE);
3105 TREE_READONLY (decl) = 1;
3106 TREE_NOTHROW (decl) = 1;
3107
3108 ftype = build_function_type_list (void_type_node, ptr_type_node,
3109 NULL_TREE);
3110 decl = add_builtin_function ("__builtin_set_thread_pointer", ftype,
3111 XTENSA_BUILTIN_SET_THREAD_POINTER,
3112 BUILT_IN_MD, NULL, NULL_TREE);
3113 TREE_NOTHROW (decl) = 1;
3114 }
3115 }
3116
3117
3118 static tree
3119 xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
3120 bool ignore ATTRIBUTE_UNUSED)
3121 {
3122 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3123 tree arg0, arg1;
3124
3125 switch (fcode)
3126 {
3127 case XTENSA_BUILTIN_UMULSIDI3:
3128 arg0 = args[0];
3129 arg1 = args[1];
3130 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3131 || TARGET_MUL32_HIGH)
3132 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
3133 fold_convert (unsigned_intDI_type_node, arg0),
3134 fold_convert (unsigned_intDI_type_node, arg1));
3135 break;
3136
3137 case XTENSA_BUILTIN_THREAD_POINTER:
3138 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3139 break;
3140
3141 default:
3142 internal_error ("bad builtin code");
3143 break;
3144 }
3145
3146 return NULL;
3147 }
3148
3149
3150 static rtx
3151 xtensa_expand_builtin (tree exp, rtx target,
3152 rtx subtarget ATTRIBUTE_UNUSED,
3153 enum machine_mode mode ATTRIBUTE_UNUSED,
3154 int ignore)
3155 {
3156 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3157 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3158 rtx arg;
3159
3160 switch (fcode)
3161 {
3162 case XTENSA_BUILTIN_UMULSIDI3:
3163 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3164 __umulsidi3 function when the Xtensa configuration can directly
3165 implement it. If not, just call the function. */
3166 return expand_call (exp, target, ignore);
3167
3168 case XTENSA_BUILTIN_THREAD_POINTER:
3169 if (!target || !register_operand (target, Pmode))
3170 target = gen_reg_rtx (Pmode);
3171 emit_insn (gen_load_tp (target));
3172 return target;
3173
3174 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3175 arg = expand_normal (CALL_EXPR_ARG (exp, 0));
3176 if (!register_operand (arg, Pmode))
3177 arg = copy_to_mode_reg (Pmode, arg);
3178 emit_insn (gen_set_tp (arg));
3179 return const0_rtx;
3180
3181 default:
3182 internal_error ("bad builtin code");
3183 }
3184 return NULL_RTX;
3185 }
3186
3187 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS. */
3188
3189 static reg_class_t
3190 xtensa_preferred_reload_class (rtx x, reg_class_t rclass)
3191 {
3192 if (CONSTANT_P (x) && CONST_DOUBLE_P (x))
3193 return NO_REGS;
3194
3195 /* Don't use the stack pointer or hard frame pointer for reloads!
3196 The hard frame pointer would normally be OK except that it may
3197 briefly hold an incoming argument in the prologue, and reload
3198 won't know that it is live because the hard frame pointer is
3199 treated specially. */
3200
3201 if (rclass == AR_REGS || rclass == GR_REGS)
3202 return RL_REGS;
3203
3204 return rclass;
3205 }
3206
3207 /* Worker function for TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
3208
3209 static reg_class_t
3210 xtensa_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
3211 reg_class_t rclass)
3212 {
3213 /* Don't use the stack pointer or hard frame pointer for reloads!
3214 The hard frame pointer would normally be OK except that it may
3215 briefly hold an incoming argument in the prologue, and reload
3216 won't know that it is live because the hard frame pointer is
3217 treated specially. */
3218
3219 if (rclass == AR_REGS || rclass == GR_REGS)
3220 return RL_REGS;
3221
3222 return rclass;
3223 }
3224
3225 /* Worker function for TARGET_SECONDARY_RELOAD. */
3226
3227 static reg_class_t
3228 xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
3229 enum machine_mode mode, secondary_reload_info *sri)
3230 {
3231 int regno;
3232
3233 if (in_p && constantpool_mem_p (x))
3234 {
3235 if (rclass == FP_REGS)
3236 return RL_REGS;
3237
3238 if (mode == QImode)
3239 sri->icode = CODE_FOR_reloadqi_literal;
3240 else if (mode == HImode)
3241 sri->icode = CODE_FOR_reloadhi_literal;
3242 }
3243
3244 regno = xt_true_regnum (x);
3245 if (ACC_REG_P (regno))
3246 return ((rclass == GR_REGS || rclass == RL_REGS) ? NO_REGS : RL_REGS);
3247 if (rclass == ACC_REG)
3248 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
3249
3250 return NO_REGS;
3251 }
3252
3253
3254 void
3255 order_regs_for_local_alloc (void)
3256 {
3257 if (!leaf_function_p ())
3258 {
3259 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
3260 FIRST_PSEUDO_REGISTER * sizeof (int));
3261 }
3262 else
3263 {
3264 int i, num_arg_regs;
3265 int nxt = 0;
3266
3267 /* Use the AR registers in increasing order (skipping a0 and a1)
3268 but save the incoming argument registers for a last resort. */
3269 num_arg_regs = crtl->args.info.arg_words;
3270 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
3271 num_arg_regs = MAX_ARGS_IN_REGISTERS;
3272 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
3273 reg_alloc_order[nxt++] = i + num_arg_regs;
3274 for (i = 0; i < num_arg_regs; i++)
3275 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
3276
3277 /* List the coprocessor registers in order. */
3278 for (i = 0; i < BR_REG_NUM; i++)
3279 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
3280
3281 /* List the FP registers in order for now. */
3282 for (i = 0; i < 16; i++)
3283 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
3284
3285 /* GCC requires that we list *all* the registers.... */
3286 reg_alloc_order[nxt++] = 0; /* a0 = return address */
3287 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
3288 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
3289 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
3290
3291 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
3292 }
3293 }
3294
3295
3296 /* Some Xtensa targets support multiple bss sections. If the section
3297 name ends with ".bss", add SECTION_BSS to the flags. */
3298
3299 static unsigned int
3300 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
3301 {
3302 unsigned int flags = default_section_type_flags (decl, name, reloc);
3303 const char *suffix;
3304
3305 suffix = strrchr (name, '.');
3306 if (suffix && strcmp (suffix, ".bss") == 0)
3307 {
3308 if (!decl || (TREE_CODE (decl) == VAR_DECL
3309 && DECL_INITIAL (decl) == NULL_TREE))
3310 flags |= SECTION_BSS; /* @nobits */
3311 else
3312 warning (0, "only uninitialized variables can be placed in a "
3313 ".bss section");
3314 }
3315
3316 return flags;
3317 }
3318
3319
3320 /* The literal pool stays with the function. */
3321
3322 static section *
3323 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
3324 rtx x ATTRIBUTE_UNUSED,
3325 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3326 {
3327 return function_section (current_function_decl);
3328 }
3329
3330 /* Worker function for TARGET_REGISTER_MOVE_COST. */
3331
3332 static int
3333 xtensa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
3334 reg_class_t from, reg_class_t to)
3335 {
3336 if (from == to && from != BR_REGS && to != BR_REGS)
3337 return 2;
3338 else if (reg_class_subset_p (from, AR_REGS)
3339 && reg_class_subset_p (to, AR_REGS))
3340 return 2;
3341 else if (reg_class_subset_p (from, AR_REGS) && to == ACC_REG)
3342 return 3;
3343 else if (from == ACC_REG && reg_class_subset_p (to, AR_REGS))
3344 return 3;
3345 else
3346 return 10;
3347 }
3348
3349 /* Worker function for TARGET_MEMORY_MOVE_COST. */
3350
3351 static int
3352 xtensa_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
3353 reg_class_t rclass ATTRIBUTE_UNUSED,
3354 bool in ATTRIBUTE_UNUSED)
3355 {
3356 return 4;
3357 }
3358
3359 /* Compute a (partial) cost for rtx X. Return true if the complete
3360 cost has been computed, and false if subexpressions should be
3361 scanned. In either case, *TOTAL contains the cost result. */
3362
3363 static bool
3364 xtensa_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
3365 int *total, bool speed ATTRIBUTE_UNUSED)
3366 {
3367 switch (code)
3368 {
3369 case CONST_INT:
3370 switch (outer_code)
3371 {
3372 case SET:
3373 if (xtensa_simm12b (INTVAL (x)))
3374 {
3375 *total = 4;
3376 return true;
3377 }
3378 break;
3379 case PLUS:
3380 if (xtensa_simm8 (INTVAL (x))
3381 || xtensa_simm8x256 (INTVAL (x)))
3382 {
3383 *total = 0;
3384 return true;
3385 }
3386 break;
3387 case AND:
3388 if (xtensa_mask_immediate (INTVAL (x)))
3389 {
3390 *total = 0;
3391 return true;
3392 }
3393 break;
3394 case COMPARE:
3395 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
3396 {
3397 *total = 0;
3398 return true;
3399 }
3400 break;
3401 case ASHIFT:
3402 case ASHIFTRT:
3403 case LSHIFTRT:
3404 case ROTATE:
3405 case ROTATERT:
3406 /* No way to tell if X is the 2nd operand so be conservative. */
3407 default: break;
3408 }
3409 if (xtensa_simm12b (INTVAL (x)))
3410 *total = 5;
3411 else if (TARGET_CONST16)
3412 *total = COSTS_N_INSNS (2);
3413 else
3414 *total = 6;
3415 return true;
3416
3417 case CONST:
3418 case LABEL_REF:
3419 case SYMBOL_REF:
3420 if (TARGET_CONST16)
3421 *total = COSTS_N_INSNS (2);
3422 else
3423 *total = 5;
3424 return true;
3425
3426 case CONST_DOUBLE:
3427 if (TARGET_CONST16)
3428 *total = COSTS_N_INSNS (4);
3429 else
3430 *total = 7;
3431 return true;
3432
3433 case MEM:
3434 {
3435 int num_words =
3436 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
3437
3438 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
3439 *total = COSTS_N_INSNS (num_words);
3440 else
3441 *total = COSTS_N_INSNS (2*num_words);
3442 return true;
3443 }
3444
3445 case FFS:
3446 case CTZ:
3447 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
3448 return true;
3449
3450 case CLZ:
3451 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3452 return true;
3453
3454 case NOT:
3455 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
3456 return true;
3457
3458 case AND:
3459 case IOR:
3460 case XOR:
3461 if (GET_MODE (x) == DImode)
3462 *total = COSTS_N_INSNS (2);
3463 else
3464 *total = COSTS_N_INSNS (1);
3465 return true;
3466
3467 case ASHIFT:
3468 case ASHIFTRT:
3469 case LSHIFTRT:
3470 if (GET_MODE (x) == DImode)
3471 *total = COSTS_N_INSNS (50);
3472 else
3473 *total = COSTS_N_INSNS (1);
3474 return true;
3475
3476 case ABS:
3477 {
3478 enum machine_mode xmode = GET_MODE (x);
3479 if (xmode == SFmode)
3480 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3481 else if (xmode == DFmode)
3482 *total = COSTS_N_INSNS (50);
3483 else
3484 *total = COSTS_N_INSNS (4);
3485 return true;
3486 }
3487
3488 case PLUS:
3489 case MINUS:
3490 {
3491 enum machine_mode xmode = GET_MODE (x);
3492 if (xmode == SFmode)
3493 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3494 else if (xmode == DFmode || xmode == DImode)
3495 *total = COSTS_N_INSNS (50);
3496 else
3497 *total = COSTS_N_INSNS (1);
3498 return true;
3499 }
3500
3501 case NEG:
3502 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
3503 return true;
3504
3505 case MULT:
3506 {
3507 enum machine_mode xmode = GET_MODE (x);
3508 if (xmode == SFmode)
3509 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
3510 else if (xmode == DFmode)
3511 *total = COSTS_N_INSNS (50);
3512 else if (xmode == DImode)
3513 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3514 else if (TARGET_MUL32)
3515 *total = COSTS_N_INSNS (4);
3516 else if (TARGET_MAC16)
3517 *total = COSTS_N_INSNS (16);
3518 else if (TARGET_MUL16)
3519 *total = COSTS_N_INSNS (12);
3520 else
3521 *total = COSTS_N_INSNS (50);
3522 return true;
3523 }
3524
3525 case DIV:
3526 case MOD:
3527 {
3528 enum machine_mode xmode = GET_MODE (x);
3529 if (xmode == SFmode)
3530 {
3531 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3532 return true;
3533 }
3534 else if (xmode == DFmode)
3535 {
3536 *total = COSTS_N_INSNS (50);
3537 return true;
3538 }
3539 }
3540 /* Fall through. */
3541
3542 case UDIV:
3543 case UMOD:
3544 {
3545 enum machine_mode xmode = GET_MODE (x);
3546 if (xmode == DImode)
3547 *total = COSTS_N_INSNS (50);
3548 else if (TARGET_DIV32)
3549 *total = COSTS_N_INSNS (32);
3550 else
3551 *total = COSTS_N_INSNS (50);
3552 return true;
3553 }
3554
3555 case SQRT:
3556 if (GET_MODE (x) == SFmode)
3557 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3558 else
3559 *total = COSTS_N_INSNS (50);
3560 return true;
3561
3562 case SMIN:
3563 case UMIN:
3564 case SMAX:
3565 case UMAX:
3566 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3567 return true;
3568
3569 case SIGN_EXTRACT:
3570 case SIGN_EXTEND:
3571 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3572 return true;
3573
3574 case ZERO_EXTRACT:
3575 case ZERO_EXTEND:
3576 *total = COSTS_N_INSNS (1);
3577 return true;
3578
3579 default:
3580 return false;
3581 }
3582 }
3583
3584 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3585
3586 static bool
3587 xtensa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3588 {
3589 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3590 > 4 * UNITS_PER_WORD);
3591 }
3592
3593 /* Worker function for TARGET_FUNCTION_VALUE. */
3594
3595 rtx
3596 xtensa_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
3597 bool outgoing)
3598 {
3599 return gen_rtx_REG ((INTEGRAL_TYPE_P (valtype)
3600 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
3601 ? SImode : TYPE_MODE (valtype),
3602 outgoing ? GP_OUTGOING_RETURN : GP_RETURN);
3603 }
3604
3605 /* Worker function for TARGET_LIBCALL_VALUE. */
3606
3607 static rtx
3608 xtensa_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3609 {
3610 return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT
3611 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3612 ? SImode : mode, GP_RETURN);
3613 }
3614
3615 /* Worker function TARGET_FUNCTION_VALUE_REGNO_P. */
3616
3617 static bool
3618 xtensa_function_value_regno_p (const unsigned int regno)
3619 {
3620 return (regno == GP_RETURN);
3621 }
3622
3623 /* The static chain is passed in memory. Provide rtx giving 'mem'
3624 expressions that denote where they are stored. */
3625
3626 static rtx
3627 xtensa_static_chain (const_tree ARG_UNUSED (fndecl), bool incoming_p)
3628 {
3629 rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx;
3630 return gen_frame_mem (Pmode, plus_constant (Pmode, base,
3631 -5 * UNITS_PER_WORD));
3632 }
3633
3634
3635 /* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3636 instruction with a minimal stack frame in order to get some free
3637 registers. Once the actual call target is known, the proper stack frame
3638 size is extracted from the ENTRY instruction at the target and the
3639 current frame is adjusted to match. The trampoline then transfers
3640 control to the instruction following the ENTRY at the target. Note:
3641 this assumes that the target begins with an ENTRY instruction. */
3642
3643 static void
3644 xtensa_asm_trampoline_template (FILE *stream)
3645 {
3646 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3647
3648 fprintf (stream, "\t.begin no-transform\n");
3649 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
3650
3651 if (use_call0)
3652 {
3653 /* Save the return address. */
3654 fprintf (stream, "\tmov\ta10, a0\n");
3655
3656 /* Use a CALL0 instruction to skip past the constants and in the
3657 process get the PC into A0. This allows PC-relative access to
3658 the constants without relying on L32R. */
3659 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3660 }
3661 else
3662 fprintf (stream, "\tj\t.Lskipconsts\n");
3663
3664 fprintf (stream, "\t.align\t4\n");
3665 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3666 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3667 fprintf (stream, ".Lskipconsts:\n");
3668
3669 /* Load the static chain and function address from the trampoline. */
3670 if (use_call0)
3671 {
3672 fprintf (stream, "\taddi\ta0, a0, 3\n");
3673 fprintf (stream, "\tl32i\ta9, a0, 0\n");
3674 fprintf (stream, "\tl32i\ta8, a0, 4\n");
3675 }
3676 else
3677 {
3678 fprintf (stream, "\tl32r\ta9, .Lchainval\n");
3679 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
3680 }
3681
3682 /* Store the static chain. */
3683 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
3684
3685 /* Set the proper stack pointer value. */
3686 fprintf (stream, "\tl32i\ta9, a8, 0\n");
3687 fprintf (stream, "\textui\ta9, a9, %d, 12\n",
3688 TARGET_BIG_ENDIAN ? 8 : 12);
3689 fprintf (stream, "\tslli\ta9, a9, 3\n");
3690 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
3691 fprintf (stream, "\tsub\ta9, sp, a9\n");
3692 fprintf (stream, "\tmovsp\tsp, a9\n");
3693
3694 if (use_call0)
3695 /* Restore the return address. */
3696 fprintf (stream, "\tmov\ta0, a10\n");
3697
3698 /* Jump to the instruction following the ENTRY. */
3699 fprintf (stream, "\taddi\ta8, a8, 3\n");
3700 fprintf (stream, "\tjx\ta8\n");
3701
3702 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3703 if (use_call0)
3704 fprintf (stream, "\t.byte\t0\n");
3705 else
3706 fprintf (stream, "\tnop\n");
3707
3708 fprintf (stream, "\t.end no-transform\n");
3709 }
3710
3711 static void
3712 xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain)
3713 {
3714 rtx func = XEXP (DECL_RTL (fndecl), 0);
3715 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3716 int chain_off = use_call0 ? 12 : 8;
3717 int func_off = use_call0 ? 16 : 12;
3718
3719 emit_block_move (m_tramp, assemble_trampoline_template (),
3720 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3721
3722 emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain);
3723 emit_move_insn (adjust_address (m_tramp, SImode, func_off), func);
3724 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
3725 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode);
3726 }
3727
3728 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3729
3730 static bool
3731 xtensa_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3732 {
3733 return !xtensa_tls_referenced_p (x);
3734 }
3735
3736 #include "gt-xtensa.h"