ba4782c74a53787827e480853f978261d0fa5ea8
[gcc.git] / gcc / config / lm32 / lm32.c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "predict.h"
30 #include "function.h"
31 #include "dominance.h"
32 #include "cfg.h"
33 #include "cfgrtl.h"
34 #include "cfganal.h"
35 #include "lcm.h"
36 #include "cfgbuild.h"
37 #include "cfgcleanup.h"
38 #include "basic-block.h"
39 #include "insn-config.h"
40 #include "conditions.h"
41 #include "insn-flags.h"
42 #include "insn-attr.h"
43 #include "insn-codes.h"
44 #include "recog.h"
45 #include "output.h"
46 #include "symtab.h"
47 #include "tree.h"
48 #include "fold-const.h"
49 #include "calls.h"
50 #include "flags.h"
51 #include "alias.h"
52 #include "expmed.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "reload.h"
60 #include "tm_p.h"
61 #include "diagnostic-core.h"
62 #include "optabs.h"
63 #include "libfuncs.h"
64 #include "target.h"
65 #include "target-def.h"
66 #include "langhooks.h"
67 #include "tm-constrs.h"
68 #include "df.h"
69 #include "builtins.h"
70
71 struct lm32_frame_info
72 {
73 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
74 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
75 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
76 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
77 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
78 unsigned int reg_save_mask; /* mask of saved registers. */
79 };
80
81 /* Prototypes for static functions. */
82 static rtx emit_add (rtx dest, rtx src0, rtx src1);
83 static void expand_save_restore (struct lm32_frame_info *info, int op);
84 static void stack_adjust (HOST_WIDE_INT amount);
85 static bool lm32_in_small_data_p (const_tree);
86 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
87 machine_mode mode, tree type,
88 int *pretend_size, int no_rtl);
89 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
90 int *total, bool speed);
91 static bool lm32_can_eliminate (const int, const int);
92 static bool
93 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
94 static HOST_WIDE_INT lm32_compute_frame_size (int size);
95 static void lm32_option_override (void);
96 static rtx lm32_function_arg (cumulative_args_t cum,
97 machine_mode mode, const_tree type,
98 bool named);
99 static void lm32_function_arg_advance (cumulative_args_t cum,
100 machine_mode mode,
101 const_tree type, bool named);
102
103 #undef TARGET_OPTION_OVERRIDE
104 #define TARGET_OPTION_OVERRIDE lm32_option_override
105 #undef TARGET_ADDRESS_COST
106 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
107 #undef TARGET_RTX_COSTS
108 #define TARGET_RTX_COSTS lm32_rtx_costs
109 #undef TARGET_IN_SMALL_DATA_P
110 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
111 #undef TARGET_PROMOTE_FUNCTION_MODE
112 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
113 #undef TARGET_SETUP_INCOMING_VARARGS
114 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
115 #undef TARGET_FUNCTION_ARG
116 #define TARGET_FUNCTION_ARG lm32_function_arg
117 #undef TARGET_FUNCTION_ARG_ADVANCE
118 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
119 #undef TARGET_PROMOTE_PROTOTYPES
120 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
121 #undef TARGET_MIN_ANCHOR_OFFSET
122 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
123 #undef TARGET_MAX_ANCHOR_OFFSET
124 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
125 #undef TARGET_CAN_ELIMINATE
126 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
127 #undef TARGET_LEGITIMATE_ADDRESS_P
128 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
129
130 struct gcc_target targetm = TARGET_INITIALIZER;
131
132 /* Current frame information calculated by lm32_compute_frame_size. */
133 static struct lm32_frame_info current_frame_info;
134
135 /* Return non-zero if the given return type should be returned in memory. */
136
137 int
138 lm32_return_in_memory (tree type)
139 {
140 HOST_WIDE_INT size;
141
142 if (!AGGREGATE_TYPE_P (type))
143 {
144 /* All simple types are returned in registers. */
145 return 0;
146 }
147
148 size = int_size_in_bytes (type);
149 if (size >= 0 && size <= UNITS_PER_WORD)
150 {
151 /* If it can fit in one register. */
152 return 0;
153 }
154
155 return 1;
156 }
157
158 /* Generate an emit a word sized add instruction. */
159
160 static rtx
161 emit_add (rtx dest, rtx src0, rtx src1)
162 {
163 rtx insn;
164 insn = emit_insn (gen_addsi3 (dest, src0, src1));
165 return insn;
166 }
167
168 /* Generate the code to compare (and possibly branch) two integer values
169 TEST_CODE is the comparison code we are trying to emulate
170 (or implement directly)
171 RESULT is where to store the result of the comparison,
172 or null to emit a branch
173 CMP0 CMP1 are the two comparison operands
174 DESTINATION is the destination of the branch, or null to only compare
175 */
176
177 static void
178 gen_int_relational (enum rtx_code code,
179 rtx result,
180 rtx cmp0,
181 rtx cmp1,
182 rtx destination)
183 {
184 machine_mode mode;
185 int branch_p;
186
187 mode = GET_MODE (cmp0);
188 if (mode == VOIDmode)
189 mode = GET_MODE (cmp1);
190
191 /* Is this a branch or compare. */
192 branch_p = (destination != 0);
193
194 /* Instruction set doesn't support LE or LT, so swap operands and use
195 GE, GT. */
196 switch (code)
197 {
198 case LE:
199 case LT:
200 case LEU:
201 case LTU:
202 {
203 rtx temp;
204
205 code = swap_condition (code);
206 temp = cmp0;
207 cmp0 = cmp1;
208 cmp1 = temp;
209 break;
210 }
211 default:
212 break;
213 }
214
215 if (branch_p)
216 {
217 rtx insn, cond, label;
218
219 /* Operands must be in registers. */
220 if (!register_operand (cmp0, mode))
221 cmp0 = force_reg (mode, cmp0);
222 if (!register_operand (cmp1, mode))
223 cmp1 = force_reg (mode, cmp1);
224
225 /* Generate conditional branch instruction. */
226 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
227 label = gen_rtx_LABEL_REF (VOIDmode, destination);
228 insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
229 cond, label, pc_rtx));
230 emit_jump_insn (insn);
231 }
232 else
233 {
234 /* We can't have const_ints in cmp0, other than 0. */
235 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
236 cmp0 = force_reg (mode, cmp0);
237
238 /* If the comparison is against an int not in legal range
239 move it into a register. */
240 if (GET_CODE (cmp1) == CONST_INT)
241 {
242 switch (code)
243 {
244 case EQ:
245 case NE:
246 case LE:
247 case LT:
248 case GE:
249 case GT:
250 if (!satisfies_constraint_K (cmp1))
251 cmp1 = force_reg (mode, cmp1);
252 break;
253 case LEU:
254 case LTU:
255 case GEU:
256 case GTU:
257 if (!satisfies_constraint_L (cmp1))
258 cmp1 = force_reg (mode, cmp1);
259 break;
260 default:
261 gcc_unreachable ();
262 }
263 }
264
265 /* Generate compare instruction. */
266 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
267 }
268 }
269
270 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
271 and OPERAND[3]. Store the result in OPERANDS[0]. */
272
273 void
274 lm32_expand_scc (rtx operands[])
275 {
276 rtx target = operands[0];
277 enum rtx_code code = GET_CODE (operands[1]);
278 rtx op0 = operands[2];
279 rtx op1 = operands[3];
280
281 gen_int_relational (code, target, op0, op1, NULL_RTX);
282 }
283
284 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
285 CODE and jump to OPERANDS[3] if the condition holds. */
286
287 void
288 lm32_expand_conditional_branch (rtx operands[])
289 {
290 enum rtx_code code = GET_CODE (operands[0]);
291 rtx op0 = operands[1];
292 rtx op1 = operands[2];
293 rtx destination = operands[3];
294
295 gen_int_relational (code, NULL_RTX, op0, op1, destination);
296 }
297
298 /* Generate and emit RTL to save or restore callee save registers. */
299 static void
300 expand_save_restore (struct lm32_frame_info *info, int op)
301 {
302 unsigned int reg_save_mask = info->reg_save_mask;
303 int regno;
304 HOST_WIDE_INT offset;
305 rtx insn;
306
307 /* Callee saves are below locals and above outgoing arguments. */
308 offset = info->args_size + info->callee_size;
309 for (regno = 0; regno <= 31; regno++)
310 {
311 if ((reg_save_mask & (1 << regno)) != 0)
312 {
313 rtx offset_rtx;
314 rtx mem;
315
316 offset_rtx = GEN_INT (offset);
317 if (satisfies_constraint_K (offset_rtx))
318 {
319 mem = gen_rtx_MEM (word_mode,
320 gen_rtx_PLUS (Pmode,
321 stack_pointer_rtx,
322 offset_rtx));
323 }
324 else
325 {
326 /* r10 is caller saved so it can be used as a temp reg. */
327 rtx r10;
328
329 r10 = gen_rtx_REG (word_mode, 10);
330 insn = emit_move_insn (r10, offset_rtx);
331 if (op == 0)
332 RTX_FRAME_RELATED_P (insn) = 1;
333 insn = emit_add (r10, r10, stack_pointer_rtx);
334 if (op == 0)
335 RTX_FRAME_RELATED_P (insn) = 1;
336 mem = gen_rtx_MEM (word_mode, r10);
337 }
338
339 if (op == 0)
340 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
341 else
342 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
343
344 /* only prologue instructions which set the sp fp or save a
345 register should be marked as frame related. */
346 if (op == 0)
347 RTX_FRAME_RELATED_P (insn) = 1;
348 offset -= UNITS_PER_WORD;
349 }
350 }
351 }
352
353 static void
354 stack_adjust (HOST_WIDE_INT amount)
355 {
356 rtx insn;
357
358 if (!IN_RANGE (amount, -32776, 32768))
359 {
360 /* r10 is caller saved so it can be used as a temp reg. */
361 rtx r10;
362 r10 = gen_rtx_REG (word_mode, 10);
363 insn = emit_move_insn (r10, GEN_INT (amount));
364 if (amount < 0)
365 RTX_FRAME_RELATED_P (insn) = 1;
366 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
367 if (amount < 0)
368 RTX_FRAME_RELATED_P (insn) = 1;
369 }
370 else
371 {
372 insn = emit_add (stack_pointer_rtx,
373 stack_pointer_rtx, GEN_INT (amount));
374 if (amount < 0)
375 RTX_FRAME_RELATED_P (insn) = 1;
376 }
377 }
378
379
380 /* Create and emit instructions for a functions prologue. */
381 void
382 lm32_expand_prologue (void)
383 {
384 rtx insn;
385
386 lm32_compute_frame_size (get_frame_size ());
387
388 if (current_frame_info.total_size > 0)
389 {
390 /* Add space on stack new frame. */
391 stack_adjust (-current_frame_info.total_size);
392
393 /* Save callee save registers. */
394 if (current_frame_info.reg_save_mask != 0)
395 expand_save_restore (&current_frame_info, 0);
396
397 /* Setup frame pointer if it's needed. */
398 if (frame_pointer_needed == 1)
399 {
400 /* Move sp to fp. */
401 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
402 RTX_FRAME_RELATED_P (insn) = 1;
403
404 /* Add offset - Don't use total_size, as that includes pretend_size,
405 which isn't part of this frame? */
406 insn = emit_add (frame_pointer_rtx,
407 frame_pointer_rtx,
408 GEN_INT (current_frame_info.args_size +
409 current_frame_info.callee_size +
410 current_frame_info.locals_size));
411 RTX_FRAME_RELATED_P (insn) = 1;
412 }
413
414 /* Prevent prologue from being scheduled into function body. */
415 emit_insn (gen_blockage ());
416 }
417 }
418
419 /* Create an emit instructions for a functions epilogue. */
420 void
421 lm32_expand_epilogue (void)
422 {
423 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
424
425 lm32_compute_frame_size (get_frame_size ());
426
427 if (current_frame_info.total_size > 0)
428 {
429 /* Prevent stack code from being reordered. */
430 emit_insn (gen_blockage ());
431
432 /* Restore callee save registers. */
433 if (current_frame_info.reg_save_mask != 0)
434 expand_save_restore (&current_frame_info, 1);
435
436 /* Deallocate stack. */
437 stack_adjust (current_frame_info.total_size);
438
439 /* Return to calling function. */
440 emit_jump_insn (gen_return_internal (ra_rtx));
441 }
442 else
443 {
444 /* Return to calling function. */
445 emit_jump_insn (gen_return_internal (ra_rtx));
446 }
447 }
448
449 /* Return the bytes needed to compute the frame pointer from the current
450 stack pointer. */
451 static HOST_WIDE_INT
452 lm32_compute_frame_size (int size)
453 {
454 int regno;
455 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
456 unsigned int reg_save_mask;
457
458 locals_size = size;
459 args_size = crtl->outgoing_args_size;
460 pretend_size = crtl->args.pretend_args_size;
461 callee_size = 0;
462 reg_save_mask = 0;
463
464 /* Build mask that actually determines which regsiters we save
465 and calculate size required to store them in the stack. */
466 for (regno = 1; regno < SP_REGNUM; regno++)
467 {
468 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
469 {
470 reg_save_mask |= 1 << regno;
471 callee_size += UNITS_PER_WORD;
472 }
473 }
474 if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
475 || !optimize)
476 {
477 reg_save_mask |= 1 << RA_REGNUM;
478 callee_size += UNITS_PER_WORD;
479 }
480 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
481 {
482 reg_save_mask |= 1 << FP_REGNUM;
483 callee_size += UNITS_PER_WORD;
484 }
485
486 /* Compute total frame size. */
487 total_size = pretend_size + args_size + locals_size + callee_size;
488
489 /* Align frame to appropriate boundary. */
490 total_size = (total_size + 3) & ~3;
491
492 /* Save computed information. */
493 current_frame_info.total_size = total_size;
494 current_frame_info.callee_size = callee_size;
495 current_frame_info.pretend_size = pretend_size;
496 current_frame_info.locals_size = locals_size;
497 current_frame_info.args_size = args_size;
498 current_frame_info.reg_save_mask = reg_save_mask;
499
500 return total_size;
501 }
502
503 void
504 lm32_print_operand (FILE * file, rtx op, int letter)
505 {
506 enum rtx_code code;
507
508 code = GET_CODE (op);
509
510 if (code == SIGN_EXTEND)
511 op = XEXP (op, 0), code = GET_CODE (op);
512 else if (code == REG || code == SUBREG)
513 {
514 int regnum;
515
516 if (code == REG)
517 regnum = REGNO (op);
518 else
519 regnum = true_regnum (op);
520
521 fprintf (file, "%s", reg_names[regnum]);
522 }
523 else if (code == HIGH)
524 output_addr_const (file, XEXP (op, 0));
525 else if (code == MEM)
526 output_address (XEXP (op, 0));
527 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
528 fprintf (file, "%s", reg_names[0]);
529 else if (GET_CODE (op) == CONST_DOUBLE)
530 {
531 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
532 output_operand_lossage ("only 0.0 can be loaded as an immediate");
533 else
534 fprintf (file, "0");
535 }
536 else if (code == EQ)
537 fprintf (file, "e ");
538 else if (code == NE)
539 fprintf (file, "ne ");
540 else if (code == GT)
541 fprintf (file, "g ");
542 else if (code == GTU)
543 fprintf (file, "gu ");
544 else if (code == LT)
545 fprintf (file, "l ");
546 else if (code == LTU)
547 fprintf (file, "lu ");
548 else if (code == GE)
549 fprintf (file, "ge ");
550 else if (code == GEU)
551 fprintf (file, "geu");
552 else if (code == LE)
553 fprintf (file, "le ");
554 else if (code == LEU)
555 fprintf (file, "leu");
556 else
557 output_addr_const (file, op);
558 }
559
560 /* A C compound statement to output to stdio stream STREAM the
561 assembler syntax for an instruction operand that is a memory
562 reference whose address is ADDR. ADDR is an RTL expression.
563
564 On some machines, the syntax for a symbolic address depends on
565 the section that the address refers to. On these machines,
566 define the macro `ENCODE_SECTION_INFO' to store the information
567 into the `symbol_ref', and then check for it here. */
568
569 void
570 lm32_print_operand_address (FILE * file, rtx addr)
571 {
572 switch (GET_CODE (addr))
573 {
574 case REG:
575 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
576 break;
577
578 case MEM:
579 output_address (XEXP (addr, 0));
580 break;
581
582 case PLUS:
583 {
584 rtx arg0 = XEXP (addr, 0);
585 rtx arg1 = XEXP (addr, 1);
586
587 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
588 {
589 if (GET_CODE (arg1) == CONST_INT)
590 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
591 INTVAL (arg1));
592 else
593 {
594 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
595 output_addr_const (file, arg1);
596 fprintf (file, ")");
597 }
598 }
599 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
600 output_addr_const (file, addr);
601 else
602 fatal_insn ("bad operand", addr);
603 }
604 break;
605
606 case SYMBOL_REF:
607 if (SYMBOL_REF_SMALL_P (addr))
608 {
609 fprintf (file, "gp(");
610 output_addr_const (file, addr);
611 fprintf (file, ")");
612 }
613 else
614 fatal_insn ("can't use non gp relative absolute address", addr);
615 break;
616
617 default:
618 fatal_insn ("invalid addressing mode", addr);
619 break;
620 }
621 }
622
623 /* Determine where to put an argument to a function.
624 Value is zero to push the argument on the stack,
625 or a hard register in which to store the argument.
626
627 MODE is the argument's machine mode.
628 TYPE is the data type of the argument (as a tree).
629 This is null for libcalls where that information may
630 not be available.
631 CUM is a variable of type CUMULATIVE_ARGS which gives info about
632 the preceding args and about the function being called.
633 NAMED is nonzero if this argument is a named parameter
634 (otherwise it is an extra parameter matching an ellipsis). */
635
636 static rtx
637 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
638 const_tree type, bool named)
639 {
640 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
641
642 if (mode == VOIDmode)
643 /* Compute operand 2 of the call insn. */
644 return GEN_INT (0);
645
646 if (targetm.calls.must_pass_in_stack (mode, type))
647 return NULL_RTX;
648
649 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
650 return NULL_RTX;
651
652 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
653 }
654
655 static void
656 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
657 const_tree type, bool named ATTRIBUTE_UNUSED)
658 {
659 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
660 }
661
662 HOST_WIDE_INT
663 lm32_compute_initial_elimination_offset (int from, int to)
664 {
665 HOST_WIDE_INT offset = 0;
666
667 switch (from)
668 {
669 case ARG_POINTER_REGNUM:
670 switch (to)
671 {
672 case FRAME_POINTER_REGNUM:
673 offset = 0;
674 break;
675 case STACK_POINTER_REGNUM:
676 offset =
677 lm32_compute_frame_size (get_frame_size ()) -
678 current_frame_info.pretend_size;
679 break;
680 default:
681 gcc_unreachable ();
682 }
683 break;
684 default:
685 gcc_unreachable ();
686 }
687
688 return offset;
689 }
690
691 static void
692 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
693 tree type, int *pretend_size, int no_rtl)
694 {
695 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
696 int first_anon_arg;
697 tree fntype;
698
699 fntype = TREE_TYPE (current_function_decl);
700
701 if (stdarg_p (fntype))
702 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
703 else
704 {
705 /* this is the common case, we have been passed details setup
706 for the last named argument, we want to skip over the
707 registers, if any used in passing this named paramter in
708 order to determine which is the first registers used to pass
709 anonymous arguments. */
710 int size;
711
712 if (mode == BLKmode)
713 size = int_size_in_bytes (type);
714 else
715 size = GET_MODE_SIZE (mode);
716
717 first_anon_arg =
718 *cum + LM32_FIRST_ARG_REG +
719 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
720 }
721
722 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
723 {
724 int first_reg_offset = first_anon_arg;
725 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
726 rtx regblock;
727
728 regblock = gen_rtx_MEM (BLKmode,
729 plus_constant (Pmode, arg_pointer_rtx,
730 FIRST_PARM_OFFSET (0)));
731 move_block_from_reg (first_reg_offset, regblock, size);
732
733 *pretend_size = size * UNITS_PER_WORD;
734 }
735 }
736
737 /* Override command line options. */
738 static void
739 lm32_option_override (void)
740 {
741 /* We must have sign-extend enabled if barrel-shift isn't. */
742 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
743 target_flags |= MASK_SIGN_EXTEND_ENABLED;
744 }
745
746 /* Return nonzero if this function is known to have a null epilogue.
747 This allows the optimizer to omit jumps to jumps if no stack
748 was created. */
749 int
750 lm32_can_use_return (void)
751 {
752 if (!reload_completed)
753 return 0;
754
755 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
756 return 0;
757
758 if (lm32_compute_frame_size (get_frame_size ()) != 0)
759 return 0;
760
761 return 1;
762 }
763
764 /* Support function to determine the return address of the function
765 'count' frames back up the stack. */
766 rtx
767 lm32_return_addr_rtx (int count, rtx frame)
768 {
769 rtx r;
770 if (count == 0)
771 {
772 if (!df_regs_ever_live_p (RA_REGNUM))
773 r = gen_rtx_REG (Pmode, RA_REGNUM);
774 else
775 {
776 r = gen_rtx_MEM (Pmode,
777 gen_rtx_PLUS (Pmode, frame,
778 GEN_INT (-2 * UNITS_PER_WORD)));
779 set_mem_alias_set (r, get_frame_alias_set ());
780 }
781 }
782 else if (flag_omit_frame_pointer)
783 r = NULL_RTX;
784 else
785 {
786 r = gen_rtx_MEM (Pmode,
787 gen_rtx_PLUS (Pmode, frame,
788 GEN_INT (-2 * UNITS_PER_WORD)));
789 set_mem_alias_set (r, get_frame_alias_set ());
790 }
791 return r;
792 }
793
794 /* Return true if EXP should be placed in the small data section. */
795
796 static bool
797 lm32_in_small_data_p (const_tree exp)
798 {
799 /* We want to merge strings, so we never consider them small data. */
800 if (TREE_CODE (exp) == STRING_CST)
801 return false;
802
803 /* Functions are never in the small data area. Duh. */
804 if (TREE_CODE (exp) == FUNCTION_DECL)
805 return false;
806
807 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
808 {
809 const char *section = DECL_SECTION_NAME (exp);
810 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
811 return true;
812 }
813 else
814 {
815 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
816
817 /* If this is an incomplete type with size 0, then we can't put it
818 in sdata because it might be too big when completed. */
819 if (size > 0 && size <= g_switch_value)
820 return true;
821 }
822
823 return false;
824 }
825
826 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
827 Assume that the areas do not overlap. */
828
829 static void
830 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
831 HOST_WIDE_INT alignment)
832 {
833 HOST_WIDE_INT offset, delta;
834 unsigned HOST_WIDE_INT bits;
835 int i;
836 machine_mode mode;
837 rtx *regs;
838
839 /* Work out how many bits to move at a time. */
840 switch (alignment)
841 {
842 case 1:
843 bits = 8;
844 break;
845 case 2:
846 bits = 16;
847 break;
848 default:
849 bits = 32;
850 break;
851 }
852
853 mode = mode_for_size (bits, MODE_INT, 0);
854 delta = bits / BITS_PER_UNIT;
855
856 /* Allocate a buffer for the temporary registers. */
857 regs = XALLOCAVEC (rtx, length / delta);
858
859 /* Load as many BITS-sized chunks as possible. */
860 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
861 {
862 regs[i] = gen_reg_rtx (mode);
863 emit_move_insn (regs[i], adjust_address (src, mode, offset));
864 }
865
866 /* Copy the chunks to the destination. */
867 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
868 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
869
870 /* Mop up any left-over bytes. */
871 if (offset < length)
872 {
873 src = adjust_address (src, BLKmode, offset);
874 dest = adjust_address (dest, BLKmode, offset);
875 move_by_pieces (dest, src, length - offset,
876 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
877 }
878 }
879
880 /* Expand string/block move operations.
881
882 operands[0] is the pointer to the destination.
883 operands[1] is the pointer to the source.
884 operands[2] is the number of bytes to move.
885 operands[3] is the alignment. */
886
887 int
888 lm32_expand_block_move (rtx * operands)
889 {
890 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
891 {
892 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
893 INTVAL (operands[3]));
894 return 1;
895 }
896 return 0;
897 }
898
899 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
900 isn't protected by a PIC unspec. */
901 int
902 nonpic_symbol_mentioned_p (rtx x)
903 {
904 const char *fmt;
905 int i;
906
907 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
908 || GET_CODE (x) == PC)
909 return 1;
910
911 /* We don't want to look into the possible MEM location of a
912 CONST_DOUBLE, since we're not going to use it, in general. */
913 if (GET_CODE (x) == CONST_DOUBLE)
914 return 0;
915
916 if (GET_CODE (x) == UNSPEC)
917 return 0;
918
919 fmt = GET_RTX_FORMAT (GET_CODE (x));
920 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
921 {
922 if (fmt[i] == 'E')
923 {
924 int j;
925
926 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
927 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
928 return 1;
929 }
930 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
931 return 1;
932 }
933
934 return 0;
935 }
936
937 /* Compute a (partial) cost for rtx X. Return true if the complete
938 cost has been computed, and false if subexpressions should be
939 scanned. In either case, *TOTAL contains the cost result. */
940
941 static bool
942 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
943 int *total, bool speed)
944 {
945 machine_mode mode = GET_MODE (x);
946 bool small_mode;
947
948 const int arithmetic_latency = 1;
949 const int shift_latency = 1;
950 const int compare_latency = 2;
951 const int multiply_latency = 3;
952 const int load_latency = 3;
953 const int libcall_size_cost = 5;
954
955 /* Determine if we can handle the given mode size in a single instruction. */
956 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
957
958 switch (code)
959 {
960
961 case PLUS:
962 case MINUS:
963 case AND:
964 case IOR:
965 case XOR:
966 case NOT:
967 case NEG:
968 if (!speed)
969 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
970 else
971 *total =
972 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
973 break;
974
975 case COMPARE:
976 if (small_mode)
977 {
978 if (!speed)
979 *total = COSTS_N_INSNS (1);
980 else
981 *total = COSTS_N_INSNS (compare_latency);
982 }
983 else
984 {
985 /* FIXME. Guessing here. */
986 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
987 }
988 break;
989
990 case ASHIFT:
991 case ASHIFTRT:
992 case LSHIFTRT:
993 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
994 {
995 if (!speed)
996 *total = COSTS_N_INSNS (1);
997 else
998 *total = COSTS_N_INSNS (shift_latency);
999 }
1000 else if (TARGET_BARREL_SHIFT_ENABLED)
1001 {
1002 /* FIXME: Guessing here. */
1003 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
1004 }
1005 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
1006 {
1007 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
1008 }
1009 else
1010 {
1011 /* Libcall. */
1012 if (!speed)
1013 *total = COSTS_N_INSNS (libcall_size_cost);
1014 else
1015 *total = COSTS_N_INSNS (100);
1016 }
1017 break;
1018
1019 case MULT:
1020 if (TARGET_MULTIPLY_ENABLED && small_mode)
1021 {
1022 if (!speed)
1023 *total = COSTS_N_INSNS (1);
1024 else
1025 *total = COSTS_N_INSNS (multiply_latency);
1026 }
1027 else
1028 {
1029 /* Libcall. */
1030 if (!speed)
1031 *total = COSTS_N_INSNS (libcall_size_cost);
1032 else
1033 *total = COSTS_N_INSNS (100);
1034 }
1035 break;
1036
1037 case DIV:
1038 case MOD:
1039 case UDIV:
1040 case UMOD:
1041 if (TARGET_DIVIDE_ENABLED && small_mode)
1042 {
1043 if (!speed)
1044 *total = COSTS_N_INSNS (1);
1045 else
1046 {
1047 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1048 {
1049 int cycles = 0;
1050 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1051
1052 while (i)
1053 {
1054 i >>= 2;
1055 cycles++;
1056 }
1057 if (IN_RANGE (i, 0, 65536))
1058 *total = COSTS_N_INSNS (1 + 1 + cycles);
1059 else
1060 *total = COSTS_N_INSNS (2 + 1 + cycles);
1061 return true;
1062 }
1063 else if (GET_CODE (XEXP (x, 1)) == REG)
1064 {
1065 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1066 return true;
1067 }
1068 else
1069 {
1070 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1071 return false;
1072 }
1073 }
1074 }
1075 else
1076 {
1077 /* Libcall. */
1078 if (!speed)
1079 *total = COSTS_N_INSNS (libcall_size_cost);
1080 else
1081 *total = COSTS_N_INSNS (100);
1082 }
1083 break;
1084
1085 case HIGH:
1086 case LO_SUM:
1087 if (!speed)
1088 *total = COSTS_N_INSNS (1);
1089 else
1090 *total = COSTS_N_INSNS (arithmetic_latency);
1091 break;
1092
1093 case ZERO_EXTEND:
1094 if (MEM_P (XEXP (x, 0)))
1095 *total = COSTS_N_INSNS (0);
1096 else if (small_mode)
1097 {
1098 if (!speed)
1099 *total = COSTS_N_INSNS (1);
1100 else
1101 *total = COSTS_N_INSNS (arithmetic_latency);
1102 }
1103 else
1104 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1105 break;
1106
1107 case CONST_INT:
1108 {
1109 switch (outer_code)
1110 {
1111 case HIGH:
1112 case LO_SUM:
1113 *total = COSTS_N_INSNS (0);
1114 return true;
1115
1116 case AND:
1117 case XOR:
1118 case IOR:
1119 case ASHIFT:
1120 case ASHIFTRT:
1121 case LSHIFTRT:
1122 case ROTATE:
1123 case ROTATERT:
1124 if (satisfies_constraint_L (x))
1125 *total = COSTS_N_INSNS (0);
1126 else
1127 *total = COSTS_N_INSNS (2);
1128 return true;
1129
1130 case SET:
1131 case PLUS:
1132 case MINUS:
1133 case COMPARE:
1134 if (satisfies_constraint_K (x))
1135 *total = COSTS_N_INSNS (0);
1136 else
1137 *total = COSTS_N_INSNS (2);
1138 return true;
1139
1140 case MULT:
1141 if (TARGET_MULTIPLY_ENABLED)
1142 {
1143 if (satisfies_constraint_K (x))
1144 *total = COSTS_N_INSNS (0);
1145 else
1146 *total = COSTS_N_INSNS (2);
1147 return true;
1148 }
1149 /* Fall through. */
1150
1151 default:
1152 if (satisfies_constraint_K (x))
1153 *total = COSTS_N_INSNS (1);
1154 else
1155 *total = COSTS_N_INSNS (2);
1156 return true;
1157 }
1158 }
1159
1160 case SYMBOL_REF:
1161 case CONST:
1162 switch (outer_code)
1163 {
1164 case HIGH:
1165 case LO_SUM:
1166 *total = COSTS_N_INSNS (0);
1167 return true;
1168
1169 case MEM:
1170 case SET:
1171 if (g_switch_value)
1172 {
1173 *total = COSTS_N_INSNS (0);
1174 return true;
1175 }
1176 break;
1177 }
1178 /* Fall through. */
1179
1180 case LABEL_REF:
1181 case CONST_DOUBLE:
1182 *total = COSTS_N_INSNS (2);
1183 return true;
1184
1185 case SET:
1186 *total = COSTS_N_INSNS (1);
1187 break;
1188
1189 case MEM:
1190 if (!speed)
1191 *total = COSTS_N_INSNS (1);
1192 else
1193 *total = COSTS_N_INSNS (load_latency);
1194 break;
1195
1196 }
1197
1198 return false;
1199 }
1200
1201 /* Implemenent TARGET_CAN_ELIMINATE. */
1202
1203 bool
1204 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1205 {
1206 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1207 }
1208
1209 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1210
1211 static bool
1212 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1213 {
1214 /* (rM) */
1215 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1216 return true;
1217 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1218 return true;
1219
1220 /* (rM)+literal) */
1221 if (GET_CODE (x) == PLUS
1222 && REG_P (XEXP (x, 0))
1223 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1224 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1225 && GET_CODE (XEXP (x, 1)) == CONST_INT
1226 && satisfies_constraint_K (XEXP ((x), 1)))
1227 return true;
1228
1229 /* gp(sym) */
1230 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1231 return true;
1232
1233 return false;
1234 }
1235
1236 /* Check a move is not memory to memory. */
1237
1238 bool
1239 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1240 if (memory_operand (operands[0], mode))
1241 return register_or_zero_operand (operands[1], mode);
1242 return true;
1243 }