Makefile.in (TARGET_H, [...]): New.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
44
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
50
51 #ifdef PUSH_ROUNDING
52
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
56
57 #endif
58
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
66
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
71
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
93
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
96
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
100 {
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
112 };
113
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
116
117 struct store_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
128 };
129
130 extern struct obstack permanent_obstack;
131
132 static rtx get_push_address PARAMS ((int));
133
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
186
187 #ifndef MOVE_RATIO
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
189 #define MOVE_RATIO 2
190 #else
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
193 #endif
194 #endif
195
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
201 #endif
202
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
210
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 #endif
214 \f
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
217
218 void
219 init_expr_once ()
220 {
221 rtx insn, pat;
222 enum machine_mode mode;
223 int num_clobbers;
224 rtx mem, mem1;
225
226 start_sequence ();
227
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
236
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
239 {
240 int regno;
241 rtx reg;
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
249
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
253 regno++)
254 {
255 if (! HARD_REGNO_MODE_OK (regno, mode))
256 continue;
257
258 reg = gen_rtx_REG (mode, regno);
259
260 SET_SRC (pat) = mem;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
279 }
280 }
281
282 end_sequence ();
283 }
284
285 /* This is run at the start of compiling a function. */
286
287 void
288 init_expr ()
289 {
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
291
292 pending_chain = 0;
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
296 saveregs_value = 0;
297 apply_args_value = 0;
298 forced_labels = 0;
299 }
300
301 void
302 mark_expr_status (p)
303 struct expr_status *p;
304 {
305 if (p == NULL)
306 return;
307
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
311 }
312
313 void
314 free_expr_status (f)
315 struct function *f;
316 {
317 free (f->expr);
318 f->expr = NULL;
319 }
320
321 /* Small sanity check that the queue is empty at the end of a function. */
322
323 void
324 finish_expr_for_function ()
325 {
326 if (pending_chain)
327 abort ();
328 }
329 \f
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
332
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
336
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
339
340 static rtx
341 enqueue_insn (var, body)
342 rtx var, body;
343 {
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
347 }
348
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
355
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
359
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
363
364 rtx
365 protect_from_queue (x, modify)
366 register rtx x;
367 int modify;
368 {
369 register RTX_CODE code = GET_CODE (x);
370
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
374 return x;
375 #endif
376
377 if (code != QUEUED)
378 {
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
383 shared. */
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 {
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
389
390 MEM_COPY_ATTRIBUTES (new, x);
391
392 if (QUEUED_INSN (y))
393 {
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
396 QUEUED_INSN (y));
397 return temp;
398 }
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
402 return new;
403 }
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
415 else if (code == PLUS || code == MULT)
416 {
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
425 }
426 return x;
427 }
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
430 emit_queue. */
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
434 use that copy. */
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 QUEUED_INSN (x));
442 return QUEUED_COPY (x);
443 }
444
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
449
450 int
451 queued_subexp_p (x)
452 rtx x;
453 {
454 register enum rtx_code code = GET_CODE (x);
455 switch (code)
456 {
457 case QUEUED:
458 return 1;
459 case MEM:
460 return queued_subexp_p (XEXP (x, 0));
461 case MULT:
462 case PLUS:
463 case MINUS:
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
466 default:
467 return 0;
468 }
469 }
470
471 /* Perform all the pending incrementations. */
472
473 void
474 emit_queue ()
475 {
476 register rtx p;
477 while ((p = pending_chain))
478 {
479 rtx body = QUEUED_BODY (p);
480
481 if (GET_CODE (body) == SEQUENCE)
482 {
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
485 }
486 else
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
489 }
490 }
491 \f
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
496
497 void
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
500 int unsignedp;
501 {
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
506 enum insn_code code;
507 rtx libcall;
508
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
514
515 if (to_real != from_real)
516 abort ();
517
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
520 TO here. */
521
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
529 abort ();
530
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 {
534 emit_move_insn (to, from);
535 return;
536 }
537
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 {
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
541 abort ();
542
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
545 else
546 to = gen_rtx_SUBREG (from_mode, to, 0);
547
548 emit_move_insn (to, from);
549 return;
550 }
551
552 if (to_real != from_real)
553 abort ();
554
555 if (to_real)
556 {
557 rtx value, insns;
558
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
560 {
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
563 != CODE_FOR_nothing)
564 {
565 emit_unop_insn (code, to, from, UNKNOWN);
566 return;
567 }
568 }
569
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
609 return;
610 }
611 #endif
612
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 {
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713
714 libcall = (rtx) 0;
715 switch (from_mode)
716 {
717 case SFmode:
718 switch (to_mode)
719 {
720 case DFmode:
721 libcall = extendsfdf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extendsfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extendsftf2_libfunc;
730 break;
731
732 default:
733 break;
734 }
735 break;
736
737 case DFmode:
738 switch (to_mode)
739 {
740 case SFmode:
741 libcall = truncdfsf2_libfunc;
742 break;
743
744 case XFmode:
745 libcall = extenddfxf2_libfunc;
746 break;
747
748 case TFmode:
749 libcall = extenddftf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case XFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = truncxfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = truncxfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 case TFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = trunctfsf2_libfunc;
778 break;
779
780 case DFmode:
781 libcall = trunctfdf2_libfunc;
782 break;
783
784 default:
785 break;
786 }
787 break;
788
789 default:
790 break;
791 }
792
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
795 abort ();
796
797 start_sequence ();
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
799 1, from, from_mode);
800 insns = get_insns ();
801 end_sequence ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
803 from));
804 return;
805 }
806
807 /* Now both modes are integers. */
808
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 {
813 rtx insns;
814 rtx lowpart;
815 rtx fill_value;
816 rtx lowfrom;
817 int i;
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
823 != CODE_FOR_nothing)
824 {
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
832 return;
833 }
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
838 {
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
844 return;
845 }
846
847 /* No special multiword conversion insn; do it by hand. */
848 start_sequence ();
849
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
852
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
855
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
859 else
860 lowpart_mode = from_mode;
861
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
866
867 /* Compute the value to put in each remaining word. */
868 if (unsignedp)
869 fill_value = const0_rtx;
870 else
871 {
872 #ifdef HAVE_slt
873 if (HAVE_slt
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
876 {
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
878 lowpart_mode, 0, 0);
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
881 }
882 else
883 #endif
884 {
885 fill_value
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
888 NULL_RTX, 0);
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
890 }
891 }
892
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 {
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
898
899 if (subword == 0)
900 abort ();
901
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
904 }
905
906 insns = get_insns ();
907 end_sequence ();
908
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
911 return;
912 }
913
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
917 {
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
926 return;
927 }
928
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
931 {
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
937 {
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
939 return;
940 }
941 #endif /* HAVE_truncqipqi2 */
942 abort ();
943 }
944
945 if (from_mode == PQImode)
946 {
947 if (to_mode != QImode)
948 {
949 from = convert_to_mode (QImode, from, unsignedp);
950 from_mode = QImode;
951 }
952 else
953 {
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
956 {
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_extendpqiqi2 */
961 abort ();
962 }
963 }
964
965 if (to_mode == PSImode)
966 {
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
972 {
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
974 return;
975 }
976 #endif /* HAVE_truncsipsi2 */
977 abort ();
978 }
979
980 if (from_mode == PSImode)
981 {
982 if (to_mode != SImode)
983 {
984 from = convert_to_mode (SImode, from, unsignedp);
985 from_mode = SImode;
986 }
987 else
988 {
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
991 {
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
993 return;
994 }
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
998 {
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_zero_extendpsisi2 */
1003 abort ();
1004 }
1005 }
1006
1007 if (to_mode == PDImode)
1008 {
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1014 {
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1016 return;
1017 }
1018 #endif /* HAVE_truncdipdi2 */
1019 abort ();
1020 }
1021
1022 if (from_mode == PDImode)
1023 {
1024 if (to_mode != DImode)
1025 {
1026 from = convert_to_mode (DImode, from, unsignedp);
1027 from_mode = DImode;
1028 }
1029 else
1030 {
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1033 {
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1035 return;
1036 }
1037 #endif /* HAVE_extendpdidi2 */
1038 abort ();
1039 }
1040 }
1041
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1044
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1049 {
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1061 return;
1062 }
1063
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 {
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1070 {
1071 emit_unop_insn (code, to, from, equiv_code);
1072 return;
1073 }
1074 else
1075 {
1076 enum machine_mode intermediate;
1077 rtx tmp;
1078 tree shift_amount;
1079
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1090 {
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1093 return;
1094 }
1095
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1102 to, unsignedp);
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1104 to, unsignedp);
1105 if (tmp != to)
1106 emit_move_insn (to, tmp);
1107 return;
1108 }
1109 }
1110
1111 /* Support special truncate insns for certain modes. */
1112
1113 if (from_mode == DImode && to_mode == SImode)
1114 {
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == HImode)
1127 {
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == DImode && to_mode == QImode)
1140 {
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == HImode)
1153 {
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == SImode && to_mode == QImode)
1166 {
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == HImode && to_mode == QImode)
1179 {
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == DImode)
1192 {
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == SImode)
1205 {
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == HImode)
1218 {
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 if (from_mode == TImode && to_mode == QImode)
1231 {
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1234 {
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1236 return;
1237 }
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1241 }
1242
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 {
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1250 return;
1251 }
1252
1253 /* Mode combination is not recognized. */
1254 abort ();
1255 }
1256
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1263
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1266
1267 rtx
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1270 rtx x;
1271 int unsignedp;
1272 {
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1274 }
1275
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1280
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1283
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1288
1289 rtx
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1292 rtx x;
1293 int unsignedp;
1294 {
1295 register rtx temp;
1296
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1299
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1304
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1307
1308 if (mode == oldmode)
1309 return x;
1310
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1316
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 {
1326 int width = GET_MODE_BITSIZE (oldmode);
1327
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 }
1331
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 }
1334
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 {
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 {
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1360
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 if (! unsignedp
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1367
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1369 }
1370
1371 return gen_lowpart (mode, x);
1372 }
1373
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1376 return temp;
1377 }
1378 \f
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1381
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1385
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1388 #endif
1389
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1394
1395 When TO is NULL, the emit_single_push_insn is used to push the
1396 FROM to stack.
1397
1398 ALIGN is maximum alignment we can assume. */
1399
1400 void
1401 move_by_pieces (to, from, len, align)
1402 rtx to, from;
1403 unsigned HOST_WIDE_INT len;
1404 unsigned int align;
1405 {
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1411
1412 data.offset = 0;
1413 data.from_addr = from_addr;
1414 if (to)
1415 {
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 }
1424 else
1425 {
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429 #ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431 #else
1432 data.reverse = 0;
1433 #endif
1434 }
1435 data.to_addr = to_addr;
1436 data.from = from;
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1441
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1446
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1452 {
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1458
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1460 {
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1464 }
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1466 {
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1470 }
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1474 {
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1478 }
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1487 }
1488
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1492
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1495
1496 while (max_size > 1)
1497 {
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1501 mode = tmode;
1502
1503 if (mode == VOIDmode)
1504 break;
1505
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 /* The code above should have handled everything. */
1514 if (data.len > 0)
1515 abort ();
1516 }
1517
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1520
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1524 unsigned int align;
1525 {
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1528
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1532
1533 while (max_size > 1)
1534 {
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1537
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1541 mode = tmode;
1542
1543 if (mode == VOIDmode)
1544 break;
1545
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1549
1550 max_size = GET_MODE_SIZE (mode);
1551 }
1552
1553 if (l)
1554 abort ();
1555 return n_insns;
1556 }
1557
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1561
1562 static void
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1567 {
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1, from1;
1570
1571 while (data->len >= size)
1572 {
1573 if (data->reverse)
1574 data->offset -= size;
1575
1576 if (data->to)
1577 {
1578 if (data->autinc_to)
1579 {
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1582 }
1583 else
1584 to1 = change_address (data->to, mode,
1585 plus_constant (data->to_addr, data->offset));
1586 }
1587
1588 if (data->autinc_from)
1589 {
1590 from1 = gen_rtx_MEM (mode, data->from_addr);
1591 MEM_COPY_ATTRIBUTES (from1, data->from);
1592 }
1593 else
1594 from1 = change_address (data->from, mode,
1595 plus_constant (data->from_addr, data->offset));
1596
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601
1602 if (data->to)
1603 emit_insn ((*genfun) (to1, from1));
1604 else
1605 emit_single_push_insn (mode, from1, NULL);
1606
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1611
1612 if (! data->reverse)
1613 data->offset += size;
1614
1615 data->len -= size;
1616 }
1617 }
1618 \f
1619 /* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1622
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1624 with mode BLKmode.
1625 SIZE is an rtx that says how long they are.
1626 ALIGN is the maximum alignment we can assume they have.
1627
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1630
1631 rtx
1632 emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
1635 unsigned int align;
1636 {
1637 rtx retval = 0;
1638 #ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641 #endif
1642
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1645
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1648
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1652
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1659
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1663 {
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1667
1668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1669 enum machine_mode mode;
1670
1671 /* Since this is a move insn, we don't care about volatility. */
1672 volatile_ok = 1;
1673
1674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1675 mode = GET_MODE_WIDER_MODE (mode))
1676 {
1677 enum insn_code code = movstr_optab[(int) mode];
1678 insn_operand_predicate_fn pred;
1679
1680 if (code != CODE_FOR_nothing
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
1683 returned by the macro, it will definitely be less than the
1684 actual mode mask. */
1685 && ((GET_CODE (size) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1687 <= (GET_MODE_MASK (mode) >> 1)))
1688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1690 || (*pred) (x, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1692 || (*pred) (y, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1694 || (*pred) (opalign, VOIDmode)))
1695 {
1696 rtx op2;
1697 rtx last = get_last_insn ();
1698 rtx pat;
1699
1700 op2 = convert_to_mode (mode, size, 1);
1701 pred = insn_data[(int) code].operand[2].predicate;
1702 if (pred != 0 && ! (*pred) (op2, mode))
1703 op2 = copy_to_mode_reg (mode, op2);
1704
1705 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1706 if (pat)
1707 {
1708 emit_insn (pat);
1709 volatile_ok = 0;
1710 return 0;
1711 }
1712 else
1713 delete_insns_since (last);
1714 }
1715 }
1716
1717 volatile_ok = 0;
1718
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1720
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1724
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1728
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
1732 emit_queue.
1733
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1740 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1741
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1744 #else
1745 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node));
1747 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1748 #endif
1749
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1753
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1756
1757 For targets where libcalls and normal calls have different conventions
1758 for returning pointers, we could end up generating incorrect code.
1759
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn == NULL_TREE)
1763 {
1764 tree fntype;
1765
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn = get_identifier ("memcpy");
1769 fntype = build_pointer_type (void_type_node);
1770 fntype = build_function_type (fntype, NULL_TREE);
1771 fn = build_decl (FUNCTION_DECL, fn, fntype);
1772 ggc_add_tree_root (&fn, 1);
1773 DECL_EXTERNAL (fn) = 1;
1774 TREE_PUBLIC (fn) = 1;
1775 DECL_ARTIFICIAL (fn) = 1;
1776 TREE_NOTHROW (fn) = 1;
1777 make_decl_rtl (fn, NULL);
1778 assemble_external (fn);
1779 }
1780
1781 /* We need to make an argument list for the function call.
1782
1783 memcpy has three arguments, the first two are void * addresses and
1784 the last is a size_t byte count for the copy. */
1785 arg_list
1786 = build_tree_list (NULL_TREE,
1787 make_tree (build_pointer_type (void_type_node), x));
1788 TREE_CHAIN (arg_list)
1789 = build_tree_list (NULL_TREE,
1790 make_tree (build_pointer_type (void_type_node), y));
1791 TREE_CHAIN (TREE_CHAIN (arg_list))
1792 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1793 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1794
1795 /* Now we have to build up the CALL_EXPR itself. */
1796 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1797 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1798 call_expr, arg_list, NULL_TREE);
1799 TREE_SIDE_EFFECTS (call_expr) = 1;
1800
1801 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1802 #else
1803 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1804 VOIDmode, 3, y, Pmode, x, Pmode,
1805 convert_to_mode (TYPE_MODE (integer_type_node), size,
1806 TREE_UNSIGNED (integer_type_node)),
1807 TYPE_MODE (integer_type_node));
1808 #endif
1809 }
1810
1811 return retval;
1812 }
1813 \f
1814 /* Copy all or part of a value X into registers starting at REGNO.
1815 The number of registers to be filled is NREGS. */
1816
1817 void
1818 move_block_to_reg (regno, x, nregs, mode)
1819 int regno;
1820 rtx x;
1821 int nregs;
1822 enum machine_mode mode;
1823 {
1824 int i;
1825 #ifdef HAVE_load_multiple
1826 rtx pat;
1827 rtx last;
1828 #endif
1829
1830 if (nregs == 0)
1831 return;
1832
1833 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1834 x = validize_mem (force_const_mem (mode, x));
1835
1836 /* See if the machine can do this with a load multiple insn. */
1837 #ifdef HAVE_load_multiple
1838 if (HAVE_load_multiple)
1839 {
1840 last = get_last_insn ();
1841 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1842 GEN_INT (nregs));
1843 if (pat)
1844 {
1845 emit_insn (pat);
1846 return;
1847 }
1848 else
1849 delete_insns_since (last);
1850 }
1851 #endif
1852
1853 for (i = 0; i < nregs; i++)
1854 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1855 operand_subword_force (x, i, mode));
1856 }
1857
1858 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1859 The number of registers to be filled is NREGS. SIZE indicates the number
1860 of bytes in the object X. */
1861
1862 void
1863 move_block_from_reg (regno, x, nregs, size)
1864 int regno;
1865 rtx x;
1866 int nregs;
1867 int size;
1868 {
1869 int i;
1870 #ifdef HAVE_store_multiple
1871 rtx pat;
1872 rtx last;
1873 #endif
1874 enum machine_mode mode;
1875
1876 if (nregs == 0)
1877 return;
1878
1879 /* If SIZE is that of a mode no bigger than a word, just use that
1880 mode's store operation. */
1881 if (size <= UNITS_PER_WORD
1882 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1883 {
1884 emit_move_insn (change_address (x, mode, NULL),
1885 gen_rtx_REG (mode, regno));
1886 return;
1887 }
1888
1889 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1890 to the left before storing to memory. Note that the previous test
1891 doesn't handle all cases (e.g. SIZE == 3). */
1892 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1893 {
1894 rtx tem = operand_subword (x, 0, 1, BLKmode);
1895 rtx shift;
1896
1897 if (tem == 0)
1898 abort ();
1899
1900 shift = expand_shift (LSHIFT_EXPR, word_mode,
1901 gen_rtx_REG (word_mode, regno),
1902 build_int_2 ((UNITS_PER_WORD - size)
1903 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904 emit_move_insn (tem, shift);
1905 return;
1906 }
1907
1908 /* See if the machine can do this with a store multiple insn. */
1909 #ifdef HAVE_store_multiple
1910 if (HAVE_store_multiple)
1911 {
1912 last = get_last_insn ();
1913 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1914 GEN_INT (nregs));
1915 if (pat)
1916 {
1917 emit_insn (pat);
1918 return;
1919 }
1920 else
1921 delete_insns_since (last);
1922 }
1923 #endif
1924
1925 for (i = 0; i < nregs; i++)
1926 {
1927 rtx tem = operand_subword (x, i, 1, BLKmode);
1928
1929 if (tem == 0)
1930 abort ();
1931
1932 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933 }
1934 }
1935
1936 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937 registers represented by a PARALLEL. SSIZE represents the total size of
1938 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1939 SRC in bits. */
1940 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1941 the balance will be in what would be the low-order memory addresses, i.e.
1942 left justified for big endian, right justified for little endian. This
1943 happens to be true for the targets currently using this support. If this
1944 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945 would be needed. */
1946
1947 void
1948 emit_group_load (dst, orig_src, ssize, align)
1949 rtx dst, orig_src;
1950 unsigned int align;
1951 int ssize;
1952 {
1953 rtx *tmps, src;
1954 int start, i;
1955
1956 if (GET_CODE (dst) != PARALLEL)
1957 abort ();
1958
1959 /* Check for a NULL entry, used to indicate that the parameter goes
1960 both on the stack and in registers. */
1961 if (XEXP (XVECEXP (dst, 0, 0), 0))
1962 start = 0;
1963 else
1964 start = 1;
1965
1966 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1967
1968 /* Process the pieces. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 {
1971 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1972 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1973 unsigned int bytelen = GET_MODE_SIZE (mode);
1974 int shift = 0;
1975
1976 /* Handle trailing fragments that run over the size of the struct. */
1977 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1978 {
1979 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1980 bytelen = ssize - bytepos;
1981 if (bytelen <= 0)
1982 abort ();
1983 }
1984
1985 /* If we won't be loading directly from memory, protect the real source
1986 from strange tricks we might play; but make sure that the source can
1987 be loaded directly into the destination. */
1988 src = orig_src;
1989 if (GET_CODE (orig_src) != MEM
1990 && (!CONSTANT_P (orig_src)
1991 || (GET_MODE (orig_src) != mode
1992 && GET_MODE (orig_src) != VOIDmode)))
1993 {
1994 if (GET_MODE (orig_src) == VOIDmode)
1995 src = gen_reg_rtx (mode);
1996 else
1997 src = gen_reg_rtx (GET_MODE (orig_src));
1998 emit_move_insn (src, orig_src);
1999 }
2000
2001 /* Optimize the access just a bit. */
2002 if (GET_CODE (src) == MEM
2003 && align >= GET_MODE_ALIGNMENT (mode)
2004 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2005 && bytelen == GET_MODE_SIZE (mode))
2006 {
2007 tmps[i] = gen_reg_rtx (mode);
2008 emit_move_insn (tmps[i],
2009 change_address (src, mode,
2010 plus_constant (XEXP (src, 0),
2011 bytepos)));
2012 }
2013 else if (GET_CODE (src) == CONCAT)
2014 {
2015 if (bytepos == 0
2016 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2017 tmps[i] = XEXP (src, 0);
2018 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2019 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2020 tmps[i] = XEXP (src, 1);
2021 else
2022 abort ();
2023 }
2024 else if (CONSTANT_P (src)
2025 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2026 tmps[i] = src;
2027 else
2028 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2029 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2030 mode, mode, align, ssize);
2031
2032 if (BYTES_BIG_ENDIAN && shift)
2033 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2034 tmps[i], 0, OPTAB_WIDEN);
2035 }
2036
2037 emit_queue ();
2038
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i = start; i < XVECLEN (dst, 0); i++)
2041 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2042 }
2043
2044 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2047
2048 void
2049 emit_group_store (orig_dst, src, ssize, align)
2050 rtx orig_dst, src;
2051 int ssize;
2052 unsigned int align;
2053 {
2054 rtx *tmps, dst;
2055 int start, i;
2056
2057 if (GET_CODE (src) != PARALLEL)
2058 abort ();
2059
2060 /* Check for a NULL entry, used to indicate that the parameter goes
2061 both on the stack and in registers. */
2062 if (XEXP (XVECEXP (src, 0, 0), 0))
2063 start = 0;
2064 else
2065 start = 1;
2066
2067 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2068
2069 /* Copy the (probable) hard regs into pseudos. */
2070 for (i = start; i < XVECLEN (src, 0); i++)
2071 {
2072 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2073 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2074 emit_move_insn (tmps[i], reg);
2075 }
2076 emit_queue ();
2077
2078 /* If we won't be storing directly into memory, protect the real destination
2079 from strange tricks we might play. */
2080 dst = orig_dst;
2081 if (GET_CODE (dst) == PARALLEL)
2082 {
2083 rtx temp;
2084
2085 /* We can get a PARALLEL dst if there is a conditional expression in
2086 a return statement. In that case, the dst and src are the same,
2087 so no action is necessary. */
2088 if (rtx_equal_p (dst, src))
2089 return;
2090
2091 /* It is unclear if we can ever reach here, but we may as well handle
2092 it. Allocate a temporary, and split this into a store/load to/from
2093 the temporary. */
2094
2095 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2096 emit_group_store (temp, src, ssize, align);
2097 emit_group_load (dst, temp, ssize, align);
2098 return;
2099 }
2100 else if (GET_CODE (dst) != MEM)
2101 {
2102 dst = gen_reg_rtx (GET_MODE (orig_dst));
2103 /* Make life a bit easier for combine. */
2104 emit_move_insn (dst, const0_rtx);
2105 }
2106
2107 /* Process the pieces. */
2108 for (i = start; i < XVECLEN (src, 0); i++)
2109 {
2110 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2111 enum machine_mode mode = GET_MODE (tmps[i]);
2112 unsigned int bytelen = GET_MODE_SIZE (mode);
2113
2114 /* Handle trailing fragments that run over the size of the struct. */
2115 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2116 {
2117 if (BYTES_BIG_ENDIAN)
2118 {
2119 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2120 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2121 tmps[i], 0, OPTAB_WIDEN);
2122 }
2123 bytelen = ssize - bytepos;
2124 }
2125
2126 /* Optimize the access just a bit. */
2127 if (GET_CODE (dst) == MEM
2128 && align >= GET_MODE_ALIGNMENT (mode)
2129 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2130 && bytelen == GET_MODE_SIZE (mode))
2131 emit_move_insn (change_address (dst, mode,
2132 plus_constant (XEXP (dst, 0),
2133 bytepos)),
2134 tmps[i]);
2135 else
2136 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2137 mode, tmps[i], align, ssize);
2138 }
2139
2140 emit_queue ();
2141
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (GET_CODE (dst) == REG)
2144 emit_move_insn (orig_dst, dst);
2145 }
2146
2147 /* Generate code to copy a BLKmode object of TYPE out of a
2148 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2149 is null, a stack temporary is created. TGTBLK is returned.
2150
2151 The primary purpose of this routine is to handle functions
2152 that return BLKmode structures in registers. Some machines
2153 (the PA for example) want to return all small structures
2154 in registers regardless of the structure's alignment. */
2155
2156 rtx
2157 copy_blkmode_from_reg (tgtblk, srcreg, type)
2158 rtx tgtblk;
2159 rtx srcreg;
2160 tree type;
2161 {
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2166
2167 if (tgtblk == 0)
2168 {
2169 tgtblk = assign_temp (build_qualified_type (type,
2170 (TYPE_QUALS (type)
2171 | TYPE_QUAL_CONST)),
2172 0, 1, 1);
2173 preserve_temp_slots (tgtblk);
2174 }
2175
2176 /* This code assumes srcreg is at least a full word. If it isn't,
2177 copy it into a new pseudo which is a full word. */
2178 if (GET_MODE (srcreg) != BLKmode
2179 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181
2182 /* Structures whose size is not a multiple of a word are aligned
2183 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2184 machine, this means we must skip the empty high order bytes when
2185 calculating the bit offset. */
2186 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2187 big_endian_correction
2188 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189
2190 /* Copy the structure BITSIZE bites at a time.
2191
2192 We could probably emit more efficient code for machines which do not use
2193 strict alignment, but it doesn't seem worth the effort at the current
2194 time. */
2195 for (bitpos = 0, xbitpos = big_endian_correction;
2196 bitpos < bytes * BITS_PER_UNIT;
2197 bitpos += bitsize, xbitpos += bitsize)
2198 {
2199 /* We need a new source operand each time xbitpos is on a
2200 word boundary and when xbitpos == big_endian_correction
2201 (the first time through). */
2202 if (xbitpos % BITS_PER_WORD == 0
2203 || xbitpos == big_endian_correction)
2204 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 GET_MODE (srcreg));
2206
2207 /* We need a new destination operand each time bitpos is on
2208 a word boundary. */
2209 if (bitpos % BITS_PER_WORD == 0)
2210 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211
2212 /* Use xbitpos for the source extraction (right justified) and
2213 xbitpos for the destination store (left justified). */
2214 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2215 extract_bit_field (src, bitsize,
2216 xbitpos % BITS_PER_WORD, 1,
2217 NULL_RTX, word_mode, word_mode,
2218 bitsize, BITS_PER_WORD),
2219 bitsize, BITS_PER_WORD);
2220 }
2221
2222 return tgtblk;
2223 }
2224
2225 /* Add a USE expression for REG to the (possibly empty) list pointed
2226 to by CALL_FUSAGE. REG must denote a hard register. */
2227
2228 void
2229 use_reg (call_fusage, reg)
2230 rtx *call_fusage, reg;
2231 {
2232 if (GET_CODE (reg) != REG
2233 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2234 abort ();
2235
2236 *call_fusage
2237 = gen_rtx_EXPR_LIST (VOIDmode,
2238 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2239 }
2240
2241 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2242 starting at REGNO. All of these registers must be hard registers. */
2243
2244 void
2245 use_regs (call_fusage, regno, nregs)
2246 rtx *call_fusage;
2247 int regno;
2248 int nregs;
2249 {
2250 int i;
2251
2252 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 abort ();
2254
2255 for (i = 0; i < nregs; i++)
2256 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2257 }
2258
2259 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2260 PARALLEL REGS. This is for calls that pass values in multiple
2261 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262
2263 void
2264 use_group_regs (call_fusage, regs)
2265 rtx *call_fusage;
2266 rtx regs;
2267 {
2268 int i;
2269
2270 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 {
2272 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2273
2274 /* A NULL entry means the parameter goes both on the stack and in
2275 registers. This can also be a MEM for targets that pass values
2276 partially on the stack and partially in registers. */
2277 if (reg != 0 && GET_CODE (reg) == REG)
2278 use_reg (call_fusage, reg);
2279 }
2280 }
2281 \f
2282
2283 int
2284 can_store_by_pieces (len, constfun, constfundata, align)
2285 unsigned HOST_WIDE_INT len;
2286 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2287 PTR constfundata;
2288 unsigned int align;
2289 {
2290 unsigned HOST_WIDE_INT max_size, l;
2291 HOST_WIDE_INT offset = 0;
2292 enum machine_mode mode, tmode;
2293 enum insn_code icode;
2294 int reverse;
2295 rtx cst;
2296
2297 if (! MOVE_BY_PIECES_P (len, align))
2298 return 0;
2299
2300 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2301 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2302 align = MOVE_MAX * BITS_PER_UNIT;
2303
2304 /* We would first store what we can in the largest integer mode, then go to
2305 successively smaller modes. */
2306
2307 for (reverse = 0;
2308 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2309 reverse++)
2310 {
2311 l = len;
2312 mode = VOIDmode;
2313 max_size = MOVE_MAX_PIECES + 1;
2314 while (max_size > 1)
2315 {
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2319 mode = tmode;
2320
2321 if (mode == VOIDmode)
2322 break;
2323
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing
2326 && align >= GET_MODE_ALIGNMENT (mode))
2327 {
2328 unsigned int size = GET_MODE_SIZE (mode);
2329
2330 while (l >= size)
2331 {
2332 if (reverse)
2333 offset -= size;
2334
2335 cst = (*constfun) (constfundata, offset, mode);
2336 if (!LEGITIMATE_CONSTANT_P (cst))
2337 return 0;
2338
2339 if (!reverse)
2340 offset += size;
2341
2342 l -= size;
2343 }
2344 }
2345
2346 max_size = GET_MODE_SIZE (mode);
2347 }
2348
2349 /* The code above should have handled everything. */
2350 if (l != 0)
2351 abort ();
2352 }
2353
2354 return 1;
2355 }
2356
2357 /* Generate several move instructions to store LEN bytes generated by
2358 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2359 pointer which will be passed as argument in every CONSTFUN call.
2360 ALIGN is maximum alignment we can assume. */
2361
2362 void
2363 store_by_pieces (to, len, constfun, constfundata, align)
2364 rtx to;
2365 unsigned HOST_WIDE_INT len;
2366 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2367 PTR constfundata;
2368 unsigned int align;
2369 {
2370 struct store_by_pieces data;
2371
2372 if (! MOVE_BY_PIECES_P (len, align))
2373 abort ();
2374 to = protect_from_queue (to, 1);
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2377 data.len = len;
2378 data.to = to;
2379 store_by_pieces_1 (&data, align);
2380 }
2381
2382 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). The caller must pass TO through protect_from_queue
2384 before calling. ALIGN is maximum alignment we can assume. */
2385
2386 static void
2387 clear_by_pieces (to, len, align)
2388 rtx to;
2389 unsigned HOST_WIDE_INT len;
2390 unsigned int align;
2391 {
2392 struct store_by_pieces data;
2393
2394 data.constfun = clear_by_pieces_1;
2395 data.constfundata = NULL;
2396 data.len = len;
2397 data.to = to;
2398 store_by_pieces_1 (&data, align);
2399 }
2400
2401 /* Callback routine for clear_by_pieces.
2402 Return const0_rtx unconditionally. */
2403
2404 static rtx
2405 clear_by_pieces_1 (data, offset, mode)
2406 PTR data ATTRIBUTE_UNUSED;
2407 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2408 enum machine_mode mode ATTRIBUTE_UNUSED;
2409 {
2410 return const0_rtx;
2411 }
2412
2413 /* Subroutine of clear_by_pieces and store_by_pieces.
2414 Generate several move instructions to store LEN bytes of block TO. (A MEM
2415 rtx with BLKmode). The caller must pass TO through protect_from_queue
2416 before calling. ALIGN is maximum alignment we can assume. */
2417
2418 static void
2419 store_by_pieces_1 (data, align)
2420 struct store_by_pieces *data;
2421 unsigned int align;
2422 {
2423 rtx to_addr = XEXP (data->to, 0);
2424 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2425 enum machine_mode mode = VOIDmode, tmode;
2426 enum insn_code icode;
2427
2428 data->offset = 0;
2429 data->to_addr = to_addr;
2430 data->autinc_to
2431 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2432 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433
2434 data->explicit_inc_to = 0;
2435 data->reverse
2436 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2437 if (data->reverse)
2438 data->offset = data->len;
2439
2440 /* If storing requires more than two move insns,
2441 copy addresses to registers (to make displacements shorter)
2442 and use post-increment if available. */
2443 if (!data->autinc_to
2444 && move_by_pieces_ninsns (data->len, align) > 2)
2445 {
2446 /* Determine the main mode we'll be using. */
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2451
2452 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2453 {
2454 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2455 data->autinc_to = 1;
2456 data->explicit_inc_to = -1;
2457 }
2458
2459 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2460 && ! data->autinc_to)
2461 {
2462 data->to_addr = copy_addr_to_reg (to_addr);
2463 data->autinc_to = 1;
2464 data->explicit_inc_to = 1;
2465 }
2466
2467 if ( !data->autinc_to && CONSTANT_P (to_addr))
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 }
2470
2471 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2472 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2473 align = MOVE_MAX * BITS_PER_UNIT;
2474
2475 /* First store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2477
2478 while (max_size > 1)
2479 {
2480 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2481 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2482 if (GET_MODE_SIZE (tmode) < max_size)
2483 mode = tmode;
2484
2485 if (mode == VOIDmode)
2486 break;
2487
2488 icode = mov_optab->handlers[(int) mode].insn_code;
2489 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2490 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491
2492 max_size = GET_MODE_SIZE (mode);
2493 }
2494
2495 /* The code above should have handled everything. */
2496 if (data->len != 0)
2497 abort ();
2498 }
2499
2500 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2501 with move instructions for mode MODE. GENFUN is the gen_... function
2502 to make a move insn for that mode. DATA has all the other info. */
2503
2504 static void
2505 store_by_pieces_2 (genfun, mode, data)
2506 rtx (*genfun) PARAMS ((rtx, ...));
2507 enum machine_mode mode;
2508 struct store_by_pieces *data;
2509 {
2510 unsigned int size = GET_MODE_SIZE (mode);
2511 rtx to1, cst;
2512
2513 while (data->len >= size)
2514 {
2515 if (data->reverse)
2516 data->offset -= size;
2517
2518 if (data->autinc_to)
2519 {
2520 to1 = gen_rtx_MEM (mode, data->to_addr);
2521 MEM_COPY_ATTRIBUTES (to1, data->to);
2522 }
2523 else
2524 to1 = change_address (data->to, mode,
2525 plus_constant (data->to_addr, data->offset));
2526
2527 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2528 emit_insn (gen_add2_insn (data->to_addr,
2529 GEN_INT (-(HOST_WIDE_INT) size)));
2530
2531 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2532 emit_insn ((*genfun) (to1, cst));
2533
2534 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536
2537 if (! data->reverse)
2538 data->offset += size;
2539
2540 data->len -= size;
2541 }
2542 }
2543 \f
2544 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2545 its length in bytes and ALIGN is the maximum alignment we can is has.
2546
2547 If we call a function that returns the length of the block, return it. */
2548
2549 rtx
2550 clear_storage (object, size, align)
2551 rtx object;
2552 rtx size;
2553 unsigned int align;
2554 {
2555 #ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558 #endif
2559 rtx retval = 0;
2560
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
2568 {
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2571
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2575 else
2576 {
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2580
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2583
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2586 {
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2589
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2603 {
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2607
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2612
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2615 {
2616 emit_insn (pat);
2617 return 0;
2618 }
2619 else
2620 delete_insns_since (last);
2621 }
2622 }
2623
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2625
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2629
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2633
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
2638
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2645
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648 #else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2652 #endif
2653
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2657
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2660
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2663 incorrect code.
2664
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2668 {
2669 tree fntype;
2670
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2684 }
2685
2686 /* We need to make an argument list for the function call.
2687
2688 memset has three arguments, the first is a void * addresses, the
2689 second a integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2701
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2708
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2710 #else
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2714 #endif
2715 }
2716 }
2717
2718 return retval;
2719 }
2720
2721 /* Generate code to copy Y into X.
2722 Both Y and X must have the same mode, except that
2723 Y can be a constant with VOIDmode.
2724 This mode cannot be BLKmode; use emit_block_move for that.
2725
2726 Return the last instruction emitted. */
2727
2728 rtx
2729 emit_move_insn (x, y)
2730 rtx x, y;
2731 {
2732 enum machine_mode mode = GET_MODE (x);
2733 rtx y_cst = NULL_RTX;
2734 rtx last_insn;
2735
2736 x = protect_from_queue (x, 1);
2737 y = protect_from_queue (y, 0);
2738
2739 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2740 abort ();
2741
2742 /* Never force constant_p_rtx to memory. */
2743 if (GET_CODE (y) == CONSTANT_P_RTX)
2744 ;
2745 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2746 {
2747 y_cst = y;
2748 y = force_const_mem (mode, y);
2749 }
2750
2751 /* If X or Y are memory references, verify that their addresses are valid
2752 for the machine. */
2753 if (GET_CODE (x) == MEM
2754 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2755 && ! push_operand (x, GET_MODE (x)))
2756 || (flag_force_addr
2757 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2758 x = change_address (x, VOIDmode, XEXP (x, 0));
2759
2760 if (GET_CODE (y) == MEM
2761 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2764 y = change_address (y, VOIDmode, XEXP (y, 0));
2765
2766 if (mode == BLKmode)
2767 abort ();
2768
2769 last_insn = emit_move_insn_1 (x, y);
2770
2771 if (y_cst && GET_CODE (x) == REG)
2772 REG_NOTES (last_insn)
2773 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2774
2775 return last_insn;
2776 }
2777
2778 /* Low level part of emit_move_insn.
2779 Called just like emit_move_insn, but assumes X and Y
2780 are basically valid. */
2781
2782 rtx
2783 emit_move_insn_1 (x, y)
2784 rtx x, y;
2785 {
2786 enum machine_mode mode = GET_MODE (x);
2787 enum machine_mode submode;
2788 enum mode_class class = GET_MODE_CLASS (mode);
2789 unsigned int i;
2790
2791 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2792 abort ();
2793
2794 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2795 return
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2797
2798 /* Expand complex moves by moving real part and imag part, if possible. */
2799 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2800 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2801 * BITS_PER_UNIT),
2802 (class == MODE_COMPLEX_INT
2803 ? MODE_INT : MODE_FLOAT),
2804 0))
2805 && (mov_optab->handlers[(int) submode].insn_code
2806 != CODE_FOR_nothing))
2807 {
2808 /* Don't split destination if it is a stack push. */
2809 int stack = push_operand (x, GET_MODE (x));
2810
2811 #ifdef PUSH_ROUNDING
2812 /* In case we output to the stack, but the size is smaller machine can
2813 push exactly, we need to use move instructions. */
2814 if (stack
2815 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2816 {
2817 rtx temp;
2818 int offset1, offset2;
2819
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_binop (Pmode,
2823 #ifdef STACK_GROWS_DOWNWARD
2824 sub_optab,
2825 #else
2826 add_optab,
2827 #endif
2828 stack_pointer_rtx,
2829 GEN_INT
2830 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 stack_pointer_rtx,
2832 0,
2833 OPTAB_LIB_WIDEN);
2834 if (temp != stack_pointer_rtx)
2835 emit_move_insn (stack_pointer_rtx, temp);
2836 #ifdef STACK_GROWS_DOWNWARD
2837 offset1 = 0;
2838 offset2 = GET_MODE_SIZE (submode);
2839 #else
2840 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2841 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2842 + GET_MODE_SIZE (submode));
2843 #endif
2844 emit_move_insn (change_address (x, submode,
2845 gen_rtx_PLUS (Pmode,
2846 stack_pointer_rtx,
2847 GEN_INT (offset1))),
2848 gen_realpart (submode, y));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2851 stack_pointer_rtx,
2852 GEN_INT (offset2))),
2853 gen_imagpart (submode, y));
2854 }
2855 else
2856 #endif
2857 /* If this is a stack, push the highpart first, so it
2858 will be in the argument order.
2859
2860 In that case, change_address is used only to convert
2861 the mode, not to change the address. */
2862 if (stack)
2863 {
2864 /* Note that the real part always precedes the imag part in memory
2865 regardless of machine's endianness. */
2866 #ifdef STACK_GROWS_DOWNWARD
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2871 (gen_rtx_MEM (submode, XEXP (x, 0)),
2872 gen_realpart (submode, y)));
2873 #else
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_realpart (submode, y)));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
2879 gen_imagpart (submode, y)));
2880 #endif
2881 }
2882 else
2883 {
2884 rtx realpart_x, realpart_y;
2885 rtx imagpart_x, imagpart_y;
2886
2887 /* If this is a complex value with each part being smaller than a
2888 word, the usual calling sequence will likely pack the pieces into
2889 a single register. Unfortunately, SUBREG of hard registers only
2890 deals in terms of words, so we have a problem converting input
2891 arguments to the CONCAT of two registers that is used elsewhere
2892 for complex values. If this is before reload, we can copy it into
2893 memory and reload. FIXME, we should see about using extract and
2894 insert on integer registers, but complex short and complex char
2895 variables should be rarely used. */
2896 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2897 && (reload_in_progress | reload_completed) == 0)
2898 {
2899 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2900 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2901
2902 if (packed_dest_p || packed_src_p)
2903 {
2904 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2905 ? MODE_FLOAT : MODE_INT);
2906
2907 enum machine_mode reg_mode
2908 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2909
2910 if (reg_mode != BLKmode)
2911 {
2912 rtx mem = assign_stack_temp (reg_mode,
2913 GET_MODE_SIZE (mode), 0);
2914 rtx cmem = change_address (mem, mode, NULL_RTX);
2915
2916 cfun->cannot_inline
2917 = N_("function using short complex types cannot be inline");
2918
2919 if (packed_dest_p)
2920 {
2921 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2922 emit_move_insn_1 (cmem, y);
2923 return emit_move_insn_1 (sreg, mem);
2924 }
2925 else
2926 {
2927 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2928 emit_move_insn_1 (mem, sreg);
2929 return emit_move_insn_1 (x, cmem);
2930 }
2931 }
2932 }
2933 }
2934
2935 realpart_x = gen_realpart (submode, x);
2936 realpart_y = gen_realpart (submode, y);
2937 imagpart_x = gen_imagpart (submode, x);
2938 imagpart_y = gen_imagpart (submode, y);
2939
2940 /* Show the output dies here. This is necessary for SUBREGs
2941 of pseudos since we cannot track their lifetimes correctly;
2942 hard regs shouldn't appear here except as return values.
2943 We never want to emit such a clobber after reload. */
2944 if (x != y
2945 && ! (reload_in_progress || reload_completed)
2946 && (GET_CODE (realpart_x) == SUBREG
2947 || GET_CODE (imagpart_x) == SUBREG))
2948 {
2949 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2950 }
2951
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (realpart_x, realpart_y));
2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2955 (imagpart_x, imagpart_y));
2956 }
2957
2958 return get_last_insn ();
2959 }
2960
2961 /* This will handle any multi-word mode that lacks a move_insn pattern.
2962 However, you will get better code if you define such patterns,
2963 even if they must turn into multiple assembler instructions. */
2964 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2965 {
2966 rtx last_insn = 0;
2967 rtx seq, inner;
2968 int need_clobber;
2969
2970 #ifdef PUSH_ROUNDING
2971
2972 /* If X is a push on the stack, do the push now and replace
2973 X with a reference to the stack pointer. */
2974 if (push_operand (x, GET_MODE (x)))
2975 {
2976 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2977 x = change_address (x, VOIDmode, stack_pointer_rtx);
2978 }
2979 #endif
2980
2981 /* If we are in reload, see if either operand is a MEM whose address
2982 is scheduled for replacement. */
2983 if (reload_in_progress && GET_CODE (x) == MEM
2984 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2985 {
2986 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2987
2988 MEM_COPY_ATTRIBUTES (new, x);
2989 x = new;
2990 }
2991 if (reload_in_progress && GET_CODE (y) == MEM
2992 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2993 {
2994 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2995
2996 MEM_COPY_ATTRIBUTES (new, y);
2997 y = new;
2998 }
2999
3000 start_sequence ();
3001
3002 need_clobber = 0;
3003 for (i = 0;
3004 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3005 i++)
3006 {
3007 rtx xpart = operand_subword (x, i, 1, mode);
3008 rtx ypart = operand_subword (y, i, 1, mode);
3009
3010 /* If we can't get a part of Y, put Y into memory if it is a
3011 constant. Otherwise, force it into a register. If we still
3012 can't get a part of Y, abort. */
3013 if (ypart == 0 && CONSTANT_P (y))
3014 {
3015 y = force_const_mem (mode, y);
3016 ypart = operand_subword (y, i, 1, mode);
3017 }
3018 else if (ypart == 0)
3019 ypart = operand_subword_force (y, i, mode);
3020
3021 if (xpart == 0 || ypart == 0)
3022 abort ();
3023
3024 need_clobber |= (GET_CODE (xpart) == SUBREG);
3025
3026 last_insn = emit_move_insn (xpart, ypart);
3027 }
3028
3029 seq = gen_sequence ();
3030 end_sequence ();
3031
3032 /* Show the output dies here. This is necessary for SUBREGs
3033 of pseudos since we cannot track their lifetimes correctly;
3034 hard regs shouldn't appear here except as return values.
3035 We never want to emit such a clobber after reload. */
3036 if (x != y
3037 && ! (reload_in_progress || reload_completed)
3038 && need_clobber != 0)
3039 {
3040 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3041 }
3042
3043 emit_insn (seq);
3044
3045 return last_insn;
3046 }
3047 else
3048 abort ();
3049 }
3050 \f
3051 /* Pushing data onto the stack. */
3052
3053 /* Push a block of length SIZE (perhaps variable)
3054 and return an rtx to address the beginning of the block.
3055 Note that it is not possible for the value returned to be a QUEUED.
3056 The value may be virtual_outgoing_args_rtx.
3057
3058 EXTRA is the number of bytes of padding to push in addition to SIZE.
3059 BELOW nonzero means this padding comes at low addresses;
3060 otherwise, the padding comes at high addresses. */
3061
3062 rtx
3063 push_block (size, extra, below)
3064 rtx size;
3065 int extra, below;
3066 {
3067 register rtx temp;
3068
3069 size = convert_modes (Pmode, ptr_mode, size, 1);
3070 if (CONSTANT_P (size))
3071 anti_adjust_stack (plus_constant (size, extra));
3072 else if (GET_CODE (size) == REG && extra == 0)
3073 anti_adjust_stack (size);
3074 else
3075 {
3076 temp = copy_to_mode_reg (Pmode, size);
3077 if (extra != 0)
3078 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3079 temp, 0, OPTAB_LIB_WIDEN);
3080 anti_adjust_stack (temp);
3081 }
3082
3083 #ifndef STACK_GROWS_DOWNWARD
3084 #ifdef ARGS_GROW_DOWNWARD
3085 if (!ACCUMULATE_OUTGOING_ARGS)
3086 #else
3087 if (0)
3088 #endif
3089 #else
3090 if (1)
3091 #endif
3092 {
3093 /* Return the lowest stack address when STACK or ARGS grow downward and
3094 we are not aaccumulating outgoing arguments (the c4x port uses such
3095 conventions). */
3096 temp = virtual_outgoing_args_rtx;
3097 if (extra != 0 && below)
3098 temp = plus_constant (temp, extra);
3099 }
3100 else
3101 {
3102 if (GET_CODE (size) == CONST_INT)
3103 temp = plus_constant (virtual_outgoing_args_rtx,
3104 -INTVAL (size) - (below ? 0 : extra));
3105 else if (extra != 0 && !below)
3106 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3107 negate_rtx (Pmode, plus_constant (size, extra)));
3108 else
3109 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3110 negate_rtx (Pmode, size));
3111 }
3112
3113 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3114 }
3115
3116
3117 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3118 block of SIZE bytes. */
3119
3120 static rtx
3121 get_push_address (size)
3122 int size;
3123 {
3124 register rtx temp;
3125
3126 if (STACK_PUSH_CODE == POST_DEC)
3127 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3128 else if (STACK_PUSH_CODE == POST_INC)
3129 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3130 else
3131 temp = stack_pointer_rtx;
3132
3133 return copy_to_reg (temp);
3134 }
3135
3136 /* Emit single push insn. */
3137 static void
3138 emit_single_push_insn (mode, x, type)
3139 rtx x;
3140 enum machine_mode mode;
3141 tree type;
3142 {
3143 #ifdef PUSH_ROUNDING
3144 rtx dest_addr;
3145 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3146 rtx dest;
3147
3148 if (GET_MODE_SIZE (mode) == rounded_size)
3149 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3150 else
3151 {
3152 #ifdef STACK_GROWS_DOWNWARD
3153 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3154 GEN_INT (-rounded_size));
3155 #else
3156 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3157 GEN_INT (rounded_size));
3158 #endif
3159 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3160 }
3161
3162 dest = gen_rtx_MEM (mode, dest_addr);
3163
3164 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3165
3166 if (type != 0)
3167 {
3168 set_mem_attributes (dest, type, 1);
3169 /* Function incoming arguments may overlap with sibling call
3170 outgoing arguments and we cannot allow reordering of reads
3171 from function arguments with stores to outgoing arguments
3172 of sibling calls. */
3173 MEM_ALIAS_SET (dest) = 0;
3174 }
3175 emit_move_insn (dest, x);
3176 #else
3177 abort();
3178 #endif
3179 }
3180
3181 /* Generate code to push X onto the stack, assuming it has mode MODE and
3182 type TYPE.
3183 MODE is redundant except when X is a CONST_INT (since they don't
3184 carry mode info).
3185 SIZE is an rtx for the size of data to be copied (in bytes),
3186 needed only if X is BLKmode.
3187
3188 ALIGN (in bits) is maximum alignment we can assume.
3189
3190 If PARTIAL and REG are both nonzero, then copy that many of the first
3191 words of X into registers starting with REG, and push the rest of X.
3192 The amount of space pushed is decreased by PARTIAL words,
3193 rounded *down* to a multiple of PARM_BOUNDARY.
3194 REG must be a hard register in this case.
3195 If REG is zero but PARTIAL is not, take any all others actions for an
3196 argument partially in registers, but do not actually load any
3197 registers.
3198
3199 EXTRA is the amount in bytes of extra space to leave next to this arg.
3200 This is ignored if an argument block has already been allocated.
3201
3202 On a machine that lacks real push insns, ARGS_ADDR is the address of
3203 the bottom of the argument block for this call. We use indexing off there
3204 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3205 argument block has not been preallocated.
3206
3207 ARGS_SO_FAR is the size of args previously pushed for this call.
3208
3209 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3210 for arguments passed in registers. If nonzero, it will be the number
3211 of bytes required. */
3212
3213 void
3214 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3215 args_addr, args_so_far, reg_parm_stack_space,
3216 alignment_pad)
3217 register rtx x;
3218 enum machine_mode mode;
3219 tree type;
3220 rtx size;
3221 unsigned int align;
3222 int partial;
3223 rtx reg;
3224 int extra;
3225 rtx args_addr;
3226 rtx args_so_far;
3227 int reg_parm_stack_space;
3228 rtx alignment_pad;
3229 {
3230 rtx xinner;
3231 enum direction stack_direction
3232 #ifdef STACK_GROWS_DOWNWARD
3233 = downward;
3234 #else
3235 = upward;
3236 #endif
3237
3238 /* Decide where to pad the argument: `downward' for below,
3239 `upward' for above, or `none' for don't pad it.
3240 Default is below for small data on big-endian machines; else above. */
3241 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3242
3243 /* Invert direction if stack is post-update. */
3244 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3245 if (where_pad != none)
3246 where_pad = (where_pad == downward ? upward : downward);
3247
3248 xinner = x = protect_from_queue (x, 0);
3249
3250 if (mode == BLKmode)
3251 {
3252 /* Copy a block into the stack, entirely or partially. */
3253
3254 register rtx temp;
3255 int used = partial * UNITS_PER_WORD;
3256 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3257 int skip;
3258
3259 if (size == 0)
3260 abort ();
3261
3262 used -= offset;
3263
3264 /* USED is now the # of bytes we need not copy to the stack
3265 because registers will take care of them. */
3266
3267 if (partial != 0)
3268 xinner = change_address (xinner, BLKmode,
3269 plus_constant (XEXP (xinner, 0), used));
3270
3271 /* If the partial register-part of the arg counts in its stack size,
3272 skip the part of stack space corresponding to the registers.
3273 Otherwise, start copying to the beginning of the stack space,
3274 by setting SKIP to 0. */
3275 skip = (reg_parm_stack_space == 0) ? 0 : used;
3276
3277 #ifdef PUSH_ROUNDING
3278 /* Do it with several push insns if that doesn't take lots of insns
3279 and if there is no difficulty with push insns that skip bytes
3280 on the stack for alignment purposes. */
3281 if (args_addr == 0
3282 && PUSH_ARGS
3283 && GET_CODE (size) == CONST_INT
3284 && skip == 0
3285 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3286 /* Here we avoid the case of a structure whose weak alignment
3287 forces many pushes of a small amount of data,
3288 and such small pushes do rounding that causes trouble. */
3289 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3290 || align >= BIGGEST_ALIGNMENT
3291 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3292 == (align / BITS_PER_UNIT)))
3293 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3294 {
3295 /* Push padding now if padding above and stack grows down,
3296 or if padding below and stack grows up.
3297 But if space already allocated, this has already been done. */
3298 if (extra && args_addr == 0
3299 && where_pad != none && where_pad != stack_direction)
3300 anti_adjust_stack (GEN_INT (extra));
3301
3302 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3303
3304 if (current_function_check_memory_usage && ! in_check_memory_usage)
3305 {
3306 rtx temp;
3307
3308 in_check_memory_usage = 1;
3309 temp = get_push_address (INTVAL (size) - used);
3310 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3311 emit_library_call (chkr_copy_bitmap_libfunc,
3312 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3313 Pmode, XEXP (xinner, 0), Pmode,
3314 GEN_INT (INTVAL (size) - used),
3315 TYPE_MODE (sizetype));
3316 else
3317 emit_library_call (chkr_set_right_libfunc,
3318 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3319 Pmode, GEN_INT (INTVAL (size) - used),
3320 TYPE_MODE (sizetype),
3321 GEN_INT (MEMORY_USE_RW),
3322 TYPE_MODE (integer_type_node));
3323 in_check_memory_usage = 0;
3324 }
3325 }
3326 else
3327 #endif /* PUSH_ROUNDING */
3328 {
3329 rtx target;
3330
3331 /* Otherwise make space on the stack and copy the data
3332 to the address of that space. */
3333
3334 /* Deduct words put into registers from the size we must copy. */
3335 if (partial != 0)
3336 {
3337 if (GET_CODE (size) == CONST_INT)
3338 size = GEN_INT (INTVAL (size) - used);
3339 else
3340 size = expand_binop (GET_MODE (size), sub_optab, size,
3341 GEN_INT (used), NULL_RTX, 0,
3342 OPTAB_LIB_WIDEN);
3343 }
3344
3345 /* Get the address of the stack space.
3346 In this case, we do not deal with EXTRA separately.
3347 A single stack adjust will do. */
3348 if (! args_addr)
3349 {
3350 temp = push_block (size, extra, where_pad == downward);
3351 extra = 0;
3352 }
3353 else if (GET_CODE (args_so_far) == CONST_INT)
3354 temp = memory_address (BLKmode,
3355 plus_constant (args_addr,
3356 skip + INTVAL (args_so_far)));
3357 else
3358 temp = memory_address (BLKmode,
3359 plus_constant (gen_rtx_PLUS (Pmode,
3360 args_addr,
3361 args_so_far),
3362 skip));
3363 if (current_function_check_memory_usage && ! in_check_memory_usage)
3364 {
3365 in_check_memory_usage = 1;
3366 target = copy_to_reg (temp);
3367 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3368 emit_library_call (chkr_copy_bitmap_libfunc,
3369 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3370 target, Pmode,
3371 XEXP (xinner, 0), Pmode,
3372 size, TYPE_MODE (sizetype));
3373 else
3374 emit_library_call (chkr_set_right_libfunc,
3375 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3376 target, Pmode,
3377 size, TYPE_MODE (sizetype),
3378 GEN_INT (MEMORY_USE_RW),
3379 TYPE_MODE (integer_type_node));
3380 in_check_memory_usage = 0;
3381 }
3382
3383 target = gen_rtx_MEM (BLKmode, temp);
3384
3385 if (type != 0)
3386 {
3387 set_mem_attributes (target, type, 1);
3388 /* Function incoming arguments may overlap with sibling call
3389 outgoing arguments and we cannot allow reordering of reads
3390 from function arguments with stores to outgoing arguments
3391 of sibling calls. */
3392 MEM_ALIAS_SET (target) = 0;
3393 }
3394
3395 /* TEMP is the address of the block. Copy the data there. */
3396 if (GET_CODE (size) == CONST_INT
3397 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3398 {
3399 move_by_pieces (target, xinner, INTVAL (size), align);
3400 goto ret;
3401 }
3402 else
3403 {
3404 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3405 enum machine_mode mode;
3406
3407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3408 mode != VOIDmode;
3409 mode = GET_MODE_WIDER_MODE (mode))
3410 {
3411 enum insn_code code = movstr_optab[(int) mode];
3412 insn_operand_predicate_fn pred;
3413
3414 if (code != CODE_FOR_nothing
3415 && ((GET_CODE (size) == CONST_INT
3416 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3417 <= (GET_MODE_MASK (mode) >> 1)))
3418 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3419 && (!(pred = insn_data[(int) code].operand[0].predicate)
3420 || ((*pred) (target, BLKmode)))
3421 && (!(pred = insn_data[(int) code].operand[1].predicate)
3422 || ((*pred) (xinner, BLKmode)))
3423 && (!(pred = insn_data[(int) code].operand[3].predicate)
3424 || ((*pred) (opalign, VOIDmode))))
3425 {
3426 rtx op2 = convert_to_mode (mode, size, 1);
3427 rtx last = get_last_insn ();
3428 rtx pat;
3429
3430 pred = insn_data[(int) code].operand[2].predicate;
3431 if (pred != 0 && ! (*pred) (op2, mode))
3432 op2 = copy_to_mode_reg (mode, op2);
3433
3434 pat = GEN_FCN ((int) code) (target, xinner,
3435 op2, opalign);
3436 if (pat)
3437 {
3438 emit_insn (pat);
3439 goto ret;
3440 }
3441 else
3442 delete_insns_since (last);
3443 }
3444 }
3445 }
3446
3447 if (!ACCUMULATE_OUTGOING_ARGS)
3448 {
3449 /* If the source is referenced relative to the stack pointer,
3450 copy it to another register to stabilize it. We do not need
3451 to do this if we know that we won't be changing sp. */
3452
3453 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3454 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3455 temp = copy_to_reg (temp);
3456 }
3457
3458 /* Make inhibit_defer_pop nonzero around the library call
3459 to force it to pop the bcopy-arguments right away. */
3460 NO_DEFER_POP;
3461 #ifdef TARGET_MEM_FUNCTIONS
3462 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3463 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3464 convert_to_mode (TYPE_MODE (sizetype),
3465 size, TREE_UNSIGNED (sizetype)),
3466 TYPE_MODE (sizetype));
3467 #else
3468 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3469 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3470 convert_to_mode (TYPE_MODE (integer_type_node),
3471 size,
3472 TREE_UNSIGNED (integer_type_node)),
3473 TYPE_MODE (integer_type_node));
3474 #endif
3475 OK_DEFER_POP;
3476 }
3477 }
3478 else if (partial > 0)
3479 {
3480 /* Scalar partly in registers. */
3481
3482 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483 int i;
3484 int not_stack;
3485 /* # words of start of argument
3486 that we must make space for but need not store. */
3487 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3488 int args_offset = INTVAL (args_so_far);
3489 int skip;
3490
3491 /* Push padding now if padding above and stack grows down,
3492 or if padding below and stack grows up.
3493 But if space already allocated, this has already been done. */
3494 if (extra && args_addr == 0
3495 && where_pad != none && where_pad != stack_direction)
3496 anti_adjust_stack (GEN_INT (extra));
3497
3498 /* If we make space by pushing it, we might as well push
3499 the real data. Otherwise, we can leave OFFSET nonzero
3500 and leave the space uninitialized. */
3501 if (args_addr == 0)
3502 offset = 0;
3503
3504 /* Now NOT_STACK gets the number of words that we don't need to
3505 allocate on the stack. */
3506 not_stack = partial - offset;
3507
3508 /* If the partial register-part of the arg counts in its stack size,
3509 skip the part of stack space corresponding to the registers.
3510 Otherwise, start copying to the beginning of the stack space,
3511 by setting SKIP to 0. */
3512 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3513
3514 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3515 x = validize_mem (force_const_mem (mode, x));
3516
3517 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3518 SUBREGs of such registers are not allowed. */
3519 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3520 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3521 x = copy_to_reg (x);
3522
3523 /* Loop over all the words allocated on the stack for this arg. */
3524 /* We can do it by words, because any scalar bigger than a word
3525 has a size a multiple of a word. */
3526 #ifndef PUSH_ARGS_REVERSED
3527 for (i = not_stack; i < size; i++)
3528 #else
3529 for (i = size - 1; i >= not_stack; i--)
3530 #endif
3531 if (i >= not_stack + offset)
3532 emit_push_insn (operand_subword_force (x, i, mode),
3533 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3534 0, args_addr,
3535 GEN_INT (args_offset + ((i - not_stack + skip)
3536 * UNITS_PER_WORD)),
3537 reg_parm_stack_space, alignment_pad);
3538 }
3539 else
3540 {
3541 rtx addr;
3542 rtx target = NULL_RTX;
3543 rtx dest;
3544
3545 /* Push padding now if padding above and stack grows down,
3546 or if padding below and stack grows up.
3547 But if space already allocated, this has already been done. */
3548 if (extra && args_addr == 0
3549 && where_pad != none && where_pad != stack_direction)
3550 anti_adjust_stack (GEN_INT (extra));
3551
3552 #ifdef PUSH_ROUNDING
3553 if (args_addr == 0 && PUSH_ARGS)
3554 emit_single_push_insn (mode, x, type);
3555 else
3556 #endif
3557 {
3558 if (GET_CODE (args_so_far) == CONST_INT)
3559 addr
3560 = memory_address (mode,
3561 plus_constant (args_addr,
3562 INTVAL (args_so_far)));
3563 else
3564 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565 args_so_far));
3566 target = addr;
3567 dest = gen_rtx_MEM (mode, addr);
3568 if (type != 0)
3569 {
3570 set_mem_attributes (dest, type, 1);
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 MEM_ALIAS_SET (dest) = 0;
3576 }
3577
3578 emit_move_insn (dest, x);
3579
3580 }
3581
3582 if (current_function_check_memory_usage && ! in_check_memory_usage)
3583 {
3584 in_check_memory_usage = 1;
3585 if (target == 0)
3586 target = get_push_address (GET_MODE_SIZE (mode));
3587
3588 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3589 emit_library_call (chkr_copy_bitmap_libfunc,
3590 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3591 Pmode, XEXP (x, 0), Pmode,
3592 GEN_INT (GET_MODE_SIZE (mode)),
3593 TYPE_MODE (sizetype));
3594 else
3595 emit_library_call (chkr_set_right_libfunc,
3596 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3597 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3598 TYPE_MODE (sizetype),
3599 GEN_INT (MEMORY_USE_RW),
3600 TYPE_MODE (integer_type_node));
3601 in_check_memory_usage = 0;
3602 }
3603 }
3604
3605 ret:
3606 /* If part should go in registers, copy that part
3607 into the appropriate registers. Do this now, at the end,
3608 since mem-to-mem copies above may do function calls. */
3609 if (partial > 0 && reg != 0)
3610 {
3611 /* Handle calls that pass values in multiple non-contiguous locations.
3612 The Irix 6 ABI has examples of this. */
3613 if (GET_CODE (reg) == PARALLEL)
3614 emit_group_load (reg, x, -1, align); /* ??? size? */
3615 else
3616 move_block_to_reg (REGNO (reg), x, partial, mode);
3617 }
3618
3619 if (extra && args_addr == 0 && where_pad == stack_direction)
3620 anti_adjust_stack (GEN_INT (extra));
3621
3622 if (alignment_pad && args_addr == 0)
3623 anti_adjust_stack (alignment_pad);
3624 }
3625 \f
3626 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3627 operations. */
3628
3629 static rtx
3630 get_subtarget (x)
3631 rtx x;
3632 {
3633 return ((x == 0
3634 /* Only registers can be subtargets. */
3635 || GET_CODE (x) != REG
3636 /* If the register is readonly, it can't be set more than once. */
3637 || RTX_UNCHANGING_P (x)
3638 /* Don't use hard regs to avoid extending their life. */
3639 || REGNO (x) < FIRST_PSEUDO_REGISTER
3640 /* Avoid subtargets inside loops,
3641 since they hide some invariant expressions. */
3642 || preserve_subexpressions_p ())
3643 ? 0 : x);
3644 }
3645
3646 /* Expand an assignment that stores the value of FROM into TO.
3647 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3648 (This may contain a QUEUED rtx;
3649 if the value is constant, this rtx is a constant.)
3650 Otherwise, the returned value is NULL_RTX.
3651
3652 SUGGEST_REG is no longer actually used.
3653 It used to mean, copy the value through a register
3654 and return that register, if that is possible.
3655 We now use WANT_VALUE to decide whether to do this. */
3656
3657 rtx
3658 expand_assignment (to, from, want_value, suggest_reg)
3659 tree to, from;
3660 int want_value;
3661 int suggest_reg ATTRIBUTE_UNUSED;
3662 {
3663 register rtx to_rtx = 0;
3664 rtx result;
3665
3666 /* Don't crash if the lhs of the assignment was erroneous. */
3667
3668 if (TREE_CODE (to) == ERROR_MARK)
3669 {
3670 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3671 return want_value ? result : NULL_RTX;
3672 }
3673
3674 /* Assignment of a structure component needs special treatment
3675 if the structure component's rtx is not simply a MEM.
3676 Assignment of an array element at a constant index, and assignment of
3677 an array element in an unaligned packed structure field, has the same
3678 problem. */
3679
3680 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3681 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3682 {
3683 enum machine_mode mode1;
3684 HOST_WIDE_INT bitsize, bitpos;
3685 tree offset;
3686 int unsignedp;
3687 int volatilep = 0;
3688 tree tem;
3689 unsigned int alignment;
3690
3691 push_temp_slots ();
3692 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3693 &unsignedp, &volatilep, &alignment);
3694
3695 /* If we are going to use store_bit_field and extract_bit_field,
3696 make sure to_rtx will be safe for multiple use. */
3697
3698 if (mode1 == VOIDmode && want_value)
3699 tem = stabilize_reference (tem);
3700
3701 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3702 if (offset != 0)
3703 {
3704 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3705
3706 if (GET_CODE (to_rtx) != MEM)
3707 abort ();
3708
3709 if (GET_MODE (offset_rtx) != ptr_mode)
3710 {
3711 #ifdef POINTERS_EXTEND_UNSIGNED
3712 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3713 #else
3714 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3715 #endif
3716 }
3717
3718 /* A constant address in TO_RTX can have VOIDmode, we must not try
3719 to call force_reg for that case. Avoid that case. */
3720 if (GET_CODE (to_rtx) == MEM
3721 && GET_MODE (to_rtx) == BLKmode
3722 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3723 && bitsize
3724 && (bitpos % bitsize) == 0
3725 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3726 && alignment == GET_MODE_ALIGNMENT (mode1))
3727 {
3728 rtx temp = change_address (to_rtx, mode1,
3729 plus_constant (XEXP (to_rtx, 0),
3730 (bitpos /
3731 BITS_PER_UNIT)));
3732 if (GET_CODE (XEXP (temp, 0)) == REG)
3733 to_rtx = temp;
3734 else
3735 to_rtx = change_address (to_rtx, mode1,
3736 force_reg (GET_MODE (XEXP (temp, 0)),
3737 XEXP (temp, 0)));
3738 bitpos = 0;
3739 }
3740
3741 to_rtx = change_address (to_rtx, VOIDmode,
3742 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3743 force_reg (ptr_mode,
3744 offset_rtx)));
3745 }
3746
3747 if (volatilep)
3748 {
3749 if (GET_CODE (to_rtx) == MEM)
3750 {
3751 /* When the offset is zero, to_rtx is the address of the
3752 structure we are storing into, and hence may be shared.
3753 We must make a new MEM before setting the volatile bit. */
3754 if (offset == 0)
3755 to_rtx = copy_rtx (to_rtx);
3756
3757 MEM_VOLATILE_P (to_rtx) = 1;
3758 }
3759 #if 0 /* This was turned off because, when a field is volatile
3760 in an object which is not volatile, the object may be in a register,
3761 and then we would abort over here. */
3762 else
3763 abort ();
3764 #endif
3765 }
3766
3767 if (TREE_CODE (to) == COMPONENT_REF
3768 && TREE_READONLY (TREE_OPERAND (to, 1)))
3769 {
3770 if (offset == 0)
3771 to_rtx = copy_rtx (to_rtx);
3772
3773 RTX_UNCHANGING_P (to_rtx) = 1;
3774 }
3775
3776 /* Check the access. */
3777 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3778 {
3779 rtx to_addr;
3780 int size;
3781 int best_mode_size;
3782 enum machine_mode best_mode;
3783
3784 best_mode = get_best_mode (bitsize, bitpos,
3785 TYPE_ALIGN (TREE_TYPE (tem)),
3786 mode1, volatilep);
3787 if (best_mode == VOIDmode)
3788 best_mode = QImode;
3789
3790 best_mode_size = GET_MODE_BITSIZE (best_mode);
3791 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3792 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3793 size *= GET_MODE_SIZE (best_mode);
3794
3795 /* Check the access right of the pointer. */
3796 in_check_memory_usage = 1;
3797 if (size)
3798 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3799 VOIDmode, 3, to_addr, Pmode,
3800 GEN_INT (size), TYPE_MODE (sizetype),
3801 GEN_INT (MEMORY_USE_WO),
3802 TYPE_MODE (integer_type_node));
3803 in_check_memory_usage = 0;
3804 }
3805
3806 /* If this is a varying-length object, we must get the address of
3807 the source and do an explicit block move. */
3808 if (bitsize < 0)
3809 {
3810 unsigned int from_align;
3811 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3812 rtx inner_to_rtx
3813 = change_address (to_rtx, VOIDmode,
3814 plus_constant (XEXP (to_rtx, 0),
3815 bitpos / BITS_PER_UNIT));
3816
3817 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3818 MIN (alignment, from_align));
3819 free_temp_slots ();
3820 pop_temp_slots ();
3821 return to_rtx;
3822 }
3823 else
3824 {
3825 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3826 (want_value
3827 /* Spurious cast for HPUX compiler. */
3828 ? ((enum machine_mode)
3829 TYPE_MODE (TREE_TYPE (to)))
3830 : VOIDmode),
3831 unsignedp,
3832 alignment,
3833 int_size_in_bytes (TREE_TYPE (tem)),
3834 get_alias_set (to));
3835
3836 preserve_temp_slots (result);
3837 free_temp_slots ();
3838 pop_temp_slots ();
3839
3840 /* If the value is meaningful, convert RESULT to the proper mode.
3841 Otherwise, return nothing. */
3842 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3843 TYPE_MODE (TREE_TYPE (from)),
3844 result,
3845 TREE_UNSIGNED (TREE_TYPE (to)))
3846 : NULL_RTX);
3847 }
3848 }
3849
3850 /* If the rhs is a function call and its value is not an aggregate,
3851 call the function before we start to compute the lhs.
3852 This is needed for correct code for cases such as
3853 val = setjmp (buf) on machines where reference to val
3854 requires loading up part of an address in a separate insn.
3855
3856 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3857 since it might be a promoted variable where the zero- or sign- extension
3858 needs to be done. Handling this in the normal way is safe because no
3859 computation is done before the call. */
3860 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3861 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3862 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3863 && GET_CODE (DECL_RTL (to)) == REG))
3864 {
3865 rtx value;
3866
3867 push_temp_slots ();
3868 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3869 if (to_rtx == 0)
3870 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3871
3872 /* Handle calls that return values in multiple non-contiguous locations.
3873 The Irix 6 ABI has examples of this. */
3874 if (GET_CODE (to_rtx) == PARALLEL)
3875 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3876 TYPE_ALIGN (TREE_TYPE (from)));
3877 else if (GET_MODE (to_rtx) == BLKmode)
3878 emit_block_move (to_rtx, value, expr_size (from),
3879 TYPE_ALIGN (TREE_TYPE (from)));
3880 else
3881 {
3882 #ifdef POINTERS_EXTEND_UNSIGNED
3883 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3884 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3885 value = convert_memory_address (GET_MODE (to_rtx), value);
3886 #endif
3887 emit_move_insn (to_rtx, value);
3888 }
3889 preserve_temp_slots (to_rtx);
3890 free_temp_slots ();
3891 pop_temp_slots ();
3892 return want_value ? to_rtx : NULL_RTX;
3893 }
3894
3895 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3896 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3897
3898 if (to_rtx == 0)
3899 {
3900 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3901 if (GET_CODE (to_rtx) == MEM)
3902 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3903 }
3904
3905 /* Don't move directly into a return register. */
3906 if (TREE_CODE (to) == RESULT_DECL
3907 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3908 {
3909 rtx temp;
3910
3911 push_temp_slots ();
3912 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3913
3914 if (GET_CODE (to_rtx) == PARALLEL)
3915 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3916 TYPE_ALIGN (TREE_TYPE (from)));
3917 else
3918 emit_move_insn (to_rtx, temp);
3919
3920 preserve_temp_slots (to_rtx);
3921 free_temp_slots ();
3922 pop_temp_slots ();
3923 return want_value ? to_rtx : NULL_RTX;
3924 }
3925
3926 /* In case we are returning the contents of an object which overlaps
3927 the place the value is being stored, use a safe function when copying
3928 a value through a pointer into a structure value return block. */
3929 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3930 && current_function_returns_struct
3931 && !current_function_returns_pcc_struct)
3932 {
3933 rtx from_rtx, size;
3934
3935 push_temp_slots ();
3936 size = expr_size (from);
3937 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3938 EXPAND_MEMORY_USE_DONT);
3939
3940 /* Copy the rights of the bitmap. */
3941 if (current_function_check_memory_usage)
3942 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3944 XEXP (from_rtx, 0), Pmode,
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3948
3949 #ifdef TARGET_MEM_FUNCTIONS
3950 emit_library_call (memmove_libfunc, LCT_NORMAL,
3951 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3952 XEXP (from_rtx, 0), Pmode,
3953 convert_to_mode (TYPE_MODE (sizetype),
3954 size, TREE_UNSIGNED (sizetype)),
3955 TYPE_MODE (sizetype));
3956 #else
3957 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3959 XEXP (to_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (integer_type_node),
3961 size, TREE_UNSIGNED (integer_type_node)),
3962 TYPE_MODE (integer_type_node));
3963 #endif
3964
3965 preserve_temp_slots (to_rtx);
3966 free_temp_slots ();
3967 pop_temp_slots ();
3968 return want_value ? to_rtx : NULL_RTX;
3969 }
3970
3971 /* Compute FROM and store the value in the rtx we got. */
3972
3973 push_temp_slots ();
3974 result = store_expr (from, to_rtx, want_value);
3975 preserve_temp_slots (result);
3976 free_temp_slots ();
3977 pop_temp_slots ();
3978 return want_value ? result : NULL_RTX;
3979 }
3980
3981 /* Generate code for computing expression EXP,
3982 and storing the value into TARGET.
3983 TARGET may contain a QUEUED rtx.
3984
3985 If WANT_VALUE is nonzero, return a copy of the value
3986 not in TARGET, so that we can be sure to use the proper
3987 value in a containing expression even if TARGET has something
3988 else stored in it. If possible, we copy the value through a pseudo
3989 and return that pseudo. Or, if the value is constant, we try to
3990 return the constant. In some cases, we return a pseudo
3991 copied *from* TARGET.
3992
3993 If the mode is BLKmode then we may return TARGET itself.
3994 It turns out that in BLKmode it doesn't cause a problem.
3995 because C has no operators that could combine two different
3996 assignments into the same BLKmode object with different values
3997 with no sequence point. Will other languages need this to
3998 be more thorough?
3999
4000 If WANT_VALUE is 0, we return NULL, to make sure
4001 to catch quickly any cases where the caller uses the value
4002 and fails to set WANT_VALUE. */
4003
4004 rtx
4005 store_expr (exp, target, want_value)
4006 register tree exp;
4007 register rtx target;
4008 int want_value;
4009 {
4010 register rtx temp;
4011 int dont_return_target = 0;
4012 int dont_store_target = 0;
4013
4014 if (TREE_CODE (exp) == COMPOUND_EXPR)
4015 {
4016 /* Perform first part of compound expression, then assign from second
4017 part. */
4018 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4019 emit_queue ();
4020 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4021 }
4022 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4023 {
4024 /* For conditional expression, get safe form of the target. Then
4025 test the condition, doing the appropriate assignment on either
4026 side. This avoids the creation of unnecessary temporaries.
4027 For non-BLKmode, it is more efficient not to do this. */
4028
4029 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4030
4031 emit_queue ();
4032 target = protect_from_queue (target, 1);
4033
4034 do_pending_stack_adjust ();
4035 NO_DEFER_POP;
4036 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4037 start_cleanup_deferral ();
4038 store_expr (TREE_OPERAND (exp, 1), target, 0);
4039 end_cleanup_deferral ();
4040 emit_queue ();
4041 emit_jump_insn (gen_jump (lab2));
4042 emit_barrier ();
4043 emit_label (lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 2), target, 0);
4046 end_cleanup_deferral ();
4047 emit_queue ();
4048 emit_label (lab2);
4049 OK_DEFER_POP;
4050
4051 return want_value ? target : NULL_RTX;
4052 }
4053 else if (queued_subexp_p (target))
4054 /* If target contains a postincrement, let's not risk
4055 using it as the place to generate the rhs. */
4056 {
4057 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4058 {
4059 /* Expand EXP into a new pseudo. */
4060 temp = gen_reg_rtx (GET_MODE (target));
4061 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4062 }
4063 else
4064 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4065
4066 /* If target is volatile, ANSI requires accessing the value
4067 *from* the target, if it is accessed. So make that happen.
4068 In no case return the target itself. */
4069 if (! MEM_VOLATILE_P (target) && want_value)
4070 dont_return_target = 1;
4071 }
4072 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4073 && GET_MODE (target) != BLKmode)
4074 /* If target is in memory and caller wants value in a register instead,
4075 arrange that. Pass TARGET as target for expand_expr so that,
4076 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4077 We know expand_expr will not use the target in that case.
4078 Don't do this if TARGET is volatile because we are supposed
4079 to write it and then read it. */
4080 {
4081 temp = expand_expr (exp, target, GET_MODE (target), 0);
4082 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4083 {
4084 /* If TEMP is already in the desired TARGET, only copy it from
4085 memory and don't store it there again. */
4086 if (temp == target
4087 || (rtx_equal_p (temp, target)
4088 && ! side_effects_p (temp) && ! side_effects_p (target)))
4089 dont_store_target = 1;
4090 temp = copy_to_reg (temp);
4091 }
4092 dont_return_target = 1;
4093 }
4094 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4095 /* If this is an scalar in a register that is stored in a wider mode
4096 than the declared mode, compute the result into its declared mode
4097 and then convert to the wider mode. Our value is the computed
4098 expression. */
4099 {
4100 /* If we don't want a value, we can do the conversion inside EXP,
4101 which will often result in some optimizations. Do the conversion
4102 in two steps: first change the signedness, if needed, then
4103 the extend. But don't do this if the type of EXP is a subtype
4104 of something else since then the conversion might involve
4105 more than just converting modes. */
4106 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4107 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4108 {
4109 if (TREE_UNSIGNED (TREE_TYPE (exp))
4110 != SUBREG_PROMOTED_UNSIGNED_P (target))
4111 exp
4112 = convert
4113 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4114 TREE_TYPE (exp)),
4115 exp);
4116
4117 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4118 SUBREG_PROMOTED_UNSIGNED_P (target)),
4119 exp);
4120 }
4121
4122 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4123
4124 /* If TEMP is a volatile MEM and we want a result value, make
4125 the access now so it gets done only once. Likewise if
4126 it contains TARGET. */
4127 if (GET_CODE (temp) == MEM && want_value
4128 && (MEM_VOLATILE_P (temp)
4129 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4130 temp = copy_to_reg (temp);
4131
4132 /* If TEMP is a VOIDmode constant, use convert_modes to make
4133 sure that we properly convert it. */
4134 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4135 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4136 TYPE_MODE (TREE_TYPE (exp)), temp,
4137 SUBREG_PROMOTED_UNSIGNED_P (target));
4138
4139 convert_move (SUBREG_REG (target), temp,
4140 SUBREG_PROMOTED_UNSIGNED_P (target));
4141
4142 /* If we promoted a constant, change the mode back down to match
4143 target. Otherwise, the caller might get confused by a result whose
4144 mode is larger than expected. */
4145
4146 if (want_value && GET_MODE (temp) != GET_MODE (target)
4147 && GET_MODE (temp) != VOIDmode)
4148 {
4149 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4150 SUBREG_PROMOTED_VAR_P (temp) = 1;
4151 SUBREG_PROMOTED_UNSIGNED_P (temp)
4152 = SUBREG_PROMOTED_UNSIGNED_P (target);
4153 }
4154
4155 return want_value ? temp : NULL_RTX;
4156 }
4157 else
4158 {
4159 temp = expand_expr (exp, target, GET_MODE (target), 0);
4160 /* Return TARGET if it's a specified hardware register.
4161 If TARGET is a volatile mem ref, either return TARGET
4162 or return a reg copied *from* TARGET; ANSI requires this.
4163
4164 Otherwise, if TEMP is not TARGET, return TEMP
4165 if it is constant (for efficiency),
4166 or if we really want the correct value. */
4167 if (!(target && GET_CODE (target) == REG
4168 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4169 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4170 && ! rtx_equal_p (temp, target)
4171 && (CONSTANT_P (temp) || want_value))
4172 dont_return_target = 1;
4173 }
4174
4175 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4176 the same as that of TARGET, adjust the constant. This is needed, for
4177 example, in case it is a CONST_DOUBLE and we want only a word-sized
4178 value. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4180 && TREE_CODE (exp) != ERROR_MARK
4181 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4182 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4183 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4184
4185 if (current_function_check_memory_usage
4186 && GET_CODE (target) == MEM
4187 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4188 {
4189 in_check_memory_usage = 1;
4190 if (GET_CODE (temp) == MEM)
4191 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4192 VOIDmode, 3, XEXP (target, 0), Pmode,
4193 XEXP (temp, 0), Pmode,
4194 expr_size (exp), TYPE_MODE (sizetype));
4195 else
4196 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4197 VOIDmode, 3, XEXP (target, 0), Pmode,
4198 expr_size (exp), TYPE_MODE (sizetype),
4199 GEN_INT (MEMORY_USE_WO),
4200 TYPE_MODE (integer_type_node));
4201 in_check_memory_usage = 0;
4202 }
4203
4204 /* If value was not generated in the target, store it there.
4205 Convert the value to TARGET's type first if nec. */
4206 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4207 one or both of them are volatile memory refs, we have to distinguish
4208 two cases:
4209 - expand_expr has used TARGET. In this case, we must not generate
4210 another copy. This can be detected by TARGET being equal according
4211 to == .
4212 - expand_expr has not used TARGET - that means that the source just
4213 happens to have the same RTX form. Since temp will have been created
4214 by expand_expr, it will compare unequal according to == .
4215 We must generate a copy in this case, to reach the correct number
4216 of volatile memory references. */
4217
4218 if ((! rtx_equal_p (temp, target)
4219 || (temp != target && (side_effects_p (temp)
4220 || side_effects_p (target))))
4221 && TREE_CODE (exp) != ERROR_MARK
4222 && ! dont_store_target)
4223 {
4224 target = protect_from_queue (target, 1);
4225 if (GET_MODE (temp) != GET_MODE (target)
4226 && GET_MODE (temp) != VOIDmode)
4227 {
4228 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4229 if (dont_return_target)
4230 {
4231 /* In this case, we will return TEMP,
4232 so make sure it has the proper mode.
4233 But don't forget to store the value into TARGET. */
4234 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4235 emit_move_insn (target, temp);
4236 }
4237 else
4238 convert_move (target, temp, unsignedp);
4239 }
4240
4241 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4242 {
4243 /* Handle copying a string constant into an array.
4244 The string constant may be shorter than the array.
4245 So copy just the string's actual length, and clear the rest. */
4246 rtx size;
4247 rtx addr;
4248
4249 /* Get the size of the data type of the string,
4250 which is actually the size of the target. */
4251 size = expr_size (exp);
4252 if (GET_CODE (size) == CONST_INT
4253 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4254 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4255 else
4256 {
4257 /* Compute the size of the data to copy from the string. */
4258 tree copy_size
4259 = size_binop (MIN_EXPR,
4260 make_tree (sizetype, size),
4261 size_int (TREE_STRING_LENGTH (exp)));
4262 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4263 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4264 VOIDmode, 0);
4265 rtx label = 0;
4266
4267 /* Copy that much. */
4268 emit_block_move (target, temp, copy_size_rtx,
4269 TYPE_ALIGN (TREE_TYPE (exp)));
4270
4271 /* Figure out how much is left in TARGET that we have to clear.
4272 Do all calculations in ptr_mode. */
4273
4274 addr = XEXP (target, 0);
4275 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4276
4277 if (GET_CODE (copy_size_rtx) == CONST_INT)
4278 {
4279 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4280 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4281 align = MIN (align,
4282 (unsigned int) (BITS_PER_UNIT
4283 * (INTVAL (copy_size_rtx)
4284 & - INTVAL (copy_size_rtx))));
4285 }
4286 else
4287 {
4288 addr = force_reg (ptr_mode, addr);
4289 addr = expand_binop (ptr_mode, add_optab, addr,
4290 copy_size_rtx, NULL_RTX, 0,
4291 OPTAB_LIB_WIDEN);
4292
4293 size = expand_binop (ptr_mode, sub_optab, size,
4294 copy_size_rtx, NULL_RTX, 0,
4295 OPTAB_LIB_WIDEN);
4296
4297 align = BITS_PER_UNIT;
4298 label = gen_label_rtx ();
4299 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4300 GET_MODE (size), 0, 0, label);
4301 }
4302 align = MIN (align, expr_align (copy_size));
4303
4304 if (size != const0_rtx)
4305 {
4306 rtx dest = gen_rtx_MEM (BLKmode, addr);
4307
4308 MEM_COPY_ATTRIBUTES (dest, target);
4309
4310 /* Be sure we can write on ADDR. */
4311 in_check_memory_usage = 1;
4312 if (current_function_check_memory_usage)
4313 emit_library_call (chkr_check_addr_libfunc,
4314 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4315 addr, Pmode,
4316 size, TYPE_MODE (sizetype),
4317 GEN_INT (MEMORY_USE_WO),
4318 TYPE_MODE (integer_type_node));
4319 in_check_memory_usage = 0;
4320 clear_storage (dest, size, align);
4321 }
4322
4323 if (label)
4324 emit_label (label);
4325 }
4326 }
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
4330 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4331 TYPE_ALIGN (TREE_TYPE (exp)));
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp),
4334 TYPE_ALIGN (TREE_TYPE (exp)));
4335 else
4336 emit_move_insn (target, temp);
4337 }
4338
4339 /* If we don't want a value, return NULL_RTX. */
4340 if (! want_value)
4341 return NULL_RTX;
4342
4343 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4344 ??? The latter test doesn't seem to make sense. */
4345 else if (dont_return_target && GET_CODE (temp) != MEM)
4346 return temp;
4347
4348 /* Return TARGET itself if it is a hard register. */
4349 else if (want_value && GET_MODE (target) != BLKmode
4350 && ! (GET_CODE (target) == REG
4351 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4352 return copy_to_reg (target);
4353
4354 else
4355 return target;
4356 }
4357 \f
4358 /* Return 1 if EXP just contains zeros. */
4359
4360 static int
4361 is_zeros_p (exp)
4362 tree exp;
4363 {
4364 tree elt;
4365
4366 switch (TREE_CODE (exp))
4367 {
4368 case CONVERT_EXPR:
4369 case NOP_EXPR:
4370 case NON_LVALUE_EXPR:
4371 return is_zeros_p (TREE_OPERAND (exp, 0));
4372
4373 case INTEGER_CST:
4374 return integer_zerop (exp);
4375
4376 case COMPLEX_CST:
4377 return
4378 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4379
4380 case REAL_CST:
4381 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4382
4383 case CONSTRUCTOR:
4384 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4385 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4386 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4387 if (! is_zeros_p (TREE_VALUE (elt)))
4388 return 0;
4389
4390 return 1;
4391
4392 default:
4393 return 0;
4394 }
4395 }
4396
4397 /* Return 1 if EXP contains mostly (3/4) zeros. */
4398
4399 static int
4400 mostly_zeros_p (exp)
4401 tree exp;
4402 {
4403 if (TREE_CODE (exp) == CONSTRUCTOR)
4404 {
4405 int elts = 0, zeros = 0;
4406 tree elt = CONSTRUCTOR_ELTS (exp);
4407 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4408 {
4409 /* If there are no ranges of true bits, it is all zero. */
4410 return elt == NULL_TREE;
4411 }
4412 for (; elt; elt = TREE_CHAIN (elt))
4413 {
4414 /* We do not handle the case where the index is a RANGE_EXPR,
4415 so the statistic will be somewhat inaccurate.
4416 We do make a more accurate count in store_constructor itself,
4417 so since this function is only used for nested array elements,
4418 this should be close enough. */
4419 if (mostly_zeros_p (TREE_VALUE (elt)))
4420 zeros++;
4421 elts++;
4422 }
4423
4424 return 4 * zeros >= 3 * elts;
4425 }
4426
4427 return is_zeros_p (exp);
4428 }
4429 \f
4430 /* Helper function for store_constructor.
4431 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4432 TYPE is the type of the CONSTRUCTOR, not the element type.
4433 ALIGN and CLEARED are as for store_constructor.
4434 ALIAS_SET is the alias set to use for any stores.
4435
4436 This provides a recursive shortcut back to store_constructor when it isn't
4437 necessary to go through store_field. This is so that we can pass through
4438 the cleared field to let store_constructor know that we may not have to
4439 clear a substructure if the outer structure has already been cleared. */
4440
4441 static void
4442 store_constructor_field (target, bitsize, bitpos,
4443 mode, exp, type, align, cleared, alias_set)
4444 rtx target;
4445 unsigned HOST_WIDE_INT bitsize;
4446 HOST_WIDE_INT bitpos;
4447 enum machine_mode mode;
4448 tree exp, type;
4449 unsigned int align;
4450 int cleared;
4451 int alias_set;
4452 {
4453 if (TREE_CODE (exp) == CONSTRUCTOR
4454 && bitpos % BITS_PER_UNIT == 0
4455 /* If we have a non-zero bitpos for a register target, then we just
4456 let store_field do the bitfield handling. This is unlikely to
4457 generate unnecessary clear instructions anyways. */
4458 && (bitpos == 0 || GET_CODE (target) == MEM))
4459 {
4460 if (bitpos != 0)
4461 target
4462 = change_address (target,
4463 GET_MODE (target) == BLKmode
4464 || 0 != (bitpos
4465 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4466 ? BLKmode : VOIDmode,
4467 plus_constant (XEXP (target, 0),
4468 bitpos / BITS_PER_UNIT));
4469
4470
4471 /* Show the alignment may no longer be what it was and update the alias
4472 set, if required. */
4473 if (bitpos != 0)
4474 align = MIN (align, (unsigned int) bitpos & - bitpos);
4475 if (GET_CODE (target) == MEM)
4476 MEM_ALIAS_SET (target) = alias_set;
4477
4478 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4479 }
4480 else
4481 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4482 int_size_in_bytes (type), alias_set);
4483 }
4484
4485 /* Store the value of constructor EXP into the rtx TARGET.
4486 TARGET is either a REG or a MEM.
4487 ALIGN is the maximum known alignment for TARGET.
4488 CLEARED is true if TARGET is known to have been zero'd.
4489 SIZE is the number of bytes of TARGET we are allowed to modify: this
4490 may not be the same as the size of EXP if we are assigning to a field
4491 which has been packed to exclude padding bits. */
4492
4493 static void
4494 store_constructor (exp, target, align, cleared, size)
4495 tree exp;
4496 rtx target;
4497 unsigned int align;
4498 int cleared;
4499 HOST_WIDE_INT size;
4500 {
4501 tree type = TREE_TYPE (exp);
4502 #ifdef WORD_REGISTER_OPERATIONS
4503 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4504 #endif
4505
4506 /* We know our target cannot conflict, since safe_from_p has been called. */
4507 #if 0
4508 /* Don't try copying piece by piece into a hard register
4509 since that is vulnerable to being clobbered by EXP.
4510 Instead, construct in a pseudo register and then copy it all. */
4511 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4512 {
4513 rtx temp = gen_reg_rtx (GET_MODE (target));
4514 store_constructor (exp, temp, align, cleared, size);
4515 emit_move_insn (target, temp);
4516 return;
4517 }
4518 #endif
4519
4520 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4521 || TREE_CODE (type) == QUAL_UNION_TYPE)
4522 {
4523 register tree elt;
4524
4525 /* Inform later passes that the whole union value is dead. */
4526 if ((TREE_CODE (type) == UNION_TYPE
4527 || TREE_CODE (type) == QUAL_UNION_TYPE)
4528 && ! cleared)
4529 {
4530 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4531
4532 /* If the constructor is empty, clear the union. */
4533 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4534 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4535 }
4536
4537 /* If we are building a static constructor into a register,
4538 set the initial value as zero so we can fold the value into
4539 a constant. But if more than one register is involved,
4540 this probably loses. */
4541 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4542 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4543 {
4544 if (! cleared)
4545 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4546
4547 cleared = 1;
4548 }
4549
4550 /* If the constructor has fewer fields than the structure
4551 or if we are initializing the structure to mostly zeros,
4552 clear the whole structure first. Don't do this if TARGET is a
4553 register whose mode size isn't equal to SIZE since clear_storage
4554 can't handle this case. */
4555 else if (size > 0
4556 && ((list_length (CONSTRUCTOR_ELTS (exp))
4557 != fields_length (type))
4558 || mostly_zeros_p (exp))
4559 && (GET_CODE (target) != REG
4560 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4561 {
4562 if (! cleared)
4563 clear_storage (target, GEN_INT (size), align);
4564
4565 cleared = 1;
4566 }
4567 else if (! cleared)
4568 /* Inform later passes that the old value is dead. */
4569 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4570
4571 /* Store each element of the constructor into
4572 the corresponding field of TARGET. */
4573
4574 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4575 {
4576 register tree field = TREE_PURPOSE (elt);
4577 #ifdef WORD_REGISTER_OPERATIONS
4578 tree value = TREE_VALUE (elt);
4579 #endif
4580 register enum machine_mode mode;
4581 HOST_WIDE_INT bitsize;
4582 HOST_WIDE_INT bitpos = 0;
4583 int unsignedp;
4584 tree offset;
4585 rtx to_rtx = target;
4586
4587 /* Just ignore missing fields.
4588 We cleared the whole structure, above,
4589 if any fields are missing. */
4590 if (field == 0)
4591 continue;
4592
4593 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4594 continue;
4595
4596 if (host_integerp (DECL_SIZE (field), 1))
4597 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4598 else
4599 bitsize = -1;
4600
4601 unsignedp = TREE_UNSIGNED (field);
4602 mode = DECL_MODE (field);
4603 if (DECL_BIT_FIELD (field))
4604 mode = VOIDmode;
4605
4606 offset = DECL_FIELD_OFFSET (field);
4607 if (host_integerp (offset, 0)
4608 && host_integerp (bit_position (field), 0))
4609 {
4610 bitpos = int_bit_position (field);
4611 offset = 0;
4612 }
4613 else
4614 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4615
4616 if (offset)
4617 {
4618 rtx offset_rtx;
4619
4620 if (contains_placeholder_p (offset))
4621 offset = build (WITH_RECORD_EXPR, sizetype,
4622 offset, make_tree (TREE_TYPE (exp), target));
4623
4624 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4625 if (GET_CODE (to_rtx) != MEM)
4626 abort ();
4627
4628 if (GET_MODE (offset_rtx) != ptr_mode)
4629 {
4630 #ifdef POINTERS_EXTEND_UNSIGNED
4631 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4632 #else
4633 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4634 #endif
4635 }
4636
4637 to_rtx
4638 = change_address (to_rtx, VOIDmode,
4639 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4640 force_reg (ptr_mode,
4641 offset_rtx)));
4642 align = DECL_OFFSET_ALIGN (field);
4643 }
4644
4645 if (TREE_READONLY (field))
4646 {
4647 if (GET_CODE (to_rtx) == MEM)
4648 to_rtx = copy_rtx (to_rtx);
4649
4650 RTX_UNCHANGING_P (to_rtx) = 1;
4651 }
4652
4653 #ifdef WORD_REGISTER_OPERATIONS
4654 /* If this initializes a field that is smaller than a word, at the
4655 start of a word, try to widen it to a full word.
4656 This special case allows us to output C++ member function
4657 initializations in a form that the optimizers can understand. */
4658 if (GET_CODE (target) == REG
4659 && bitsize < BITS_PER_WORD
4660 && bitpos % BITS_PER_WORD == 0
4661 && GET_MODE_CLASS (mode) == MODE_INT
4662 && TREE_CODE (value) == INTEGER_CST
4663 && exp_size >= 0
4664 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4665 {
4666 tree type = TREE_TYPE (value);
4667 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4668 {
4669 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4670 value = convert (type, value);
4671 }
4672 if (BYTES_BIG_ENDIAN)
4673 value
4674 = fold (build (LSHIFT_EXPR, type, value,
4675 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4676 bitsize = BITS_PER_WORD;
4677 mode = word_mode;
4678 }
4679 #endif
4680 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4681 TREE_VALUE (elt), type, align, cleared,
4682 (DECL_NONADDRESSABLE_P (field)
4683 && GET_CODE (to_rtx) == MEM)
4684 ? MEM_ALIAS_SET (to_rtx)
4685 : get_alias_set (TREE_TYPE (field)));
4686 }
4687 }
4688 else if (TREE_CODE (type) == ARRAY_TYPE)
4689 {
4690 register tree elt;
4691 register int i;
4692 int need_to_clear;
4693 tree domain = TYPE_DOMAIN (type);
4694 tree elttype = TREE_TYPE (type);
4695 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4696 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4697 HOST_WIDE_INT minelt;
4698 HOST_WIDE_INT maxelt;
4699
4700 /* If we have constant bounds for the range of the type, get them. */
4701 if (const_bounds_p)
4702 {
4703 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4704 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4705 }
4706
4707 /* If the constructor has fewer elements than the array,
4708 clear the whole array first. Similarly if this is
4709 static constructor of a non-BLKmode object. */
4710 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4711 need_to_clear = 1;
4712 else
4713 {
4714 HOST_WIDE_INT count = 0, zero_count = 0;
4715 need_to_clear = ! const_bounds_p;
4716
4717 /* This loop is a more accurate version of the loop in
4718 mostly_zeros_p (it handles RANGE_EXPR in an index).
4719 It is also needed to check for missing elements. */
4720 for (elt = CONSTRUCTOR_ELTS (exp);
4721 elt != NULL_TREE && ! need_to_clear;
4722 elt = TREE_CHAIN (elt))
4723 {
4724 tree index = TREE_PURPOSE (elt);
4725 HOST_WIDE_INT this_node_count;
4726
4727 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4728 {
4729 tree lo_index = TREE_OPERAND (index, 0);
4730 tree hi_index = TREE_OPERAND (index, 1);
4731
4732 if (! host_integerp (lo_index, 1)
4733 || ! host_integerp (hi_index, 1))
4734 {
4735 need_to_clear = 1;
4736 break;
4737 }
4738
4739 this_node_count = (tree_low_cst (hi_index, 1)
4740 - tree_low_cst (lo_index, 1) + 1);
4741 }
4742 else
4743 this_node_count = 1;
4744
4745 count += this_node_count;
4746 if (mostly_zeros_p (TREE_VALUE (elt)))
4747 zero_count += this_node_count;
4748 }
4749
4750 /* Clear the entire array first if there are any missing elements,
4751 or if the incidence of zero elements is >= 75%. */
4752 if (! need_to_clear
4753 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4754 need_to_clear = 1;
4755 }
4756
4757 if (need_to_clear && size > 0)
4758 {
4759 if (! cleared)
4760 clear_storage (target, GEN_INT (size), align);
4761 cleared = 1;
4762 }
4763 else
4764 /* Inform later passes that the old value is dead. */
4765 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4766
4767 /* Store each element of the constructor into
4768 the corresponding element of TARGET, determined
4769 by counting the elements. */
4770 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4771 elt;
4772 elt = TREE_CHAIN (elt), i++)
4773 {
4774 register enum machine_mode mode;
4775 HOST_WIDE_INT bitsize;
4776 HOST_WIDE_INT bitpos;
4777 int unsignedp;
4778 tree value = TREE_VALUE (elt);
4779 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4780 tree index = TREE_PURPOSE (elt);
4781 rtx xtarget = target;
4782
4783 if (cleared && is_zeros_p (value))
4784 continue;
4785
4786 unsignedp = TREE_UNSIGNED (elttype);
4787 mode = TYPE_MODE (elttype);
4788 if (mode == BLKmode)
4789 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4790 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4791 : -1);
4792 else
4793 bitsize = GET_MODE_BITSIZE (mode);
4794
4795 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4796 {
4797 tree lo_index = TREE_OPERAND (index, 0);
4798 tree hi_index = TREE_OPERAND (index, 1);
4799 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4800 struct nesting *loop;
4801 HOST_WIDE_INT lo, hi, count;
4802 tree position;
4803
4804 /* If the range is constant and "small", unroll the loop. */
4805 if (const_bounds_p
4806 && host_integerp (lo_index, 0)
4807 && host_integerp (hi_index, 0)
4808 && (lo = tree_low_cst (lo_index, 0),
4809 hi = tree_low_cst (hi_index, 0),
4810 count = hi - lo + 1,
4811 (GET_CODE (target) != MEM
4812 || count <= 2
4813 || (host_integerp (TYPE_SIZE (elttype), 1)
4814 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4815 <= 40 * 8)))))
4816 {
4817 lo -= minelt; hi -= minelt;
4818 for (; lo <= hi; lo++)
4819 {
4820 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4821 store_constructor_field
4822 (target, bitsize, bitpos, mode, value, type, align,
4823 cleared,
4824 TYPE_NONALIASED_COMPONENT (type)
4825 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4826 }
4827 }
4828 else
4829 {
4830 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4831 loop_top = gen_label_rtx ();
4832 loop_end = gen_label_rtx ();
4833
4834 unsignedp = TREE_UNSIGNED (domain);
4835
4836 index = build_decl (VAR_DECL, NULL_TREE, domain);
4837
4838 index_r
4839 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4840 &unsignedp, 0));
4841 SET_DECL_RTL (index, index_r);
4842 if (TREE_CODE (value) == SAVE_EXPR
4843 && SAVE_EXPR_RTL (value) == 0)
4844 {
4845 /* Make sure value gets expanded once before the
4846 loop. */
4847 expand_expr (value, const0_rtx, VOIDmode, 0);
4848 emit_queue ();
4849 }
4850 store_expr (lo_index, index_r, 0);
4851 loop = expand_start_loop (0);
4852
4853 /* Assign value to element index. */
4854 position
4855 = convert (ssizetype,
4856 fold (build (MINUS_EXPR, TREE_TYPE (index),
4857 index, TYPE_MIN_VALUE (domain))));
4858 position = size_binop (MULT_EXPR, position,
4859 convert (ssizetype,
4860 TYPE_SIZE_UNIT (elttype)));
4861
4862 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4863 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4864 xtarget = change_address (target, mode, addr);
4865 if (TREE_CODE (value) == CONSTRUCTOR)
4866 store_constructor (value, xtarget, align, cleared,
4867 bitsize / BITS_PER_UNIT);
4868 else
4869 store_expr (value, xtarget, 0);
4870
4871 expand_exit_loop_if_false (loop,
4872 build (LT_EXPR, integer_type_node,
4873 index, hi_index));
4874
4875 expand_increment (build (PREINCREMENT_EXPR,
4876 TREE_TYPE (index),
4877 index, integer_one_node), 0, 0);
4878 expand_end_loop ();
4879 emit_label (loop_end);
4880 }
4881 }
4882 else if ((index != 0 && ! host_integerp (index, 0))
4883 || ! host_integerp (TYPE_SIZE (elttype), 1))
4884 {
4885 rtx pos_rtx, addr;
4886 tree position;
4887
4888 if (index == 0)
4889 index = ssize_int (1);
4890
4891 if (minelt)
4892 index = convert (ssizetype,
4893 fold (build (MINUS_EXPR, index,
4894 TYPE_MIN_VALUE (domain))));
4895
4896 position = size_binop (MULT_EXPR, index,
4897 convert (ssizetype,
4898 TYPE_SIZE_UNIT (elttype)));
4899 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4900 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4901 xtarget = change_address (target, mode, addr);
4902 store_expr (value, xtarget, 0);
4903 }
4904 else
4905 {
4906 if (index != 0)
4907 bitpos = ((tree_low_cst (index, 0) - minelt)
4908 * tree_low_cst (TYPE_SIZE (elttype), 1));
4909 else
4910 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4911
4912 store_constructor_field (target, bitsize, bitpos, mode, value,
4913 type, align, cleared,
4914 TYPE_NONALIASED_COMPONENT (type)
4915 && GET_CODE (target) == MEM
4916 ? MEM_ALIAS_SET (target) :
4917 get_alias_set (elttype));
4918
4919 }
4920 }
4921 }
4922
4923 /* Set constructor assignments. */
4924 else if (TREE_CODE (type) == SET_TYPE)
4925 {
4926 tree elt = CONSTRUCTOR_ELTS (exp);
4927 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4928 tree domain = TYPE_DOMAIN (type);
4929 tree domain_min, domain_max, bitlength;
4930
4931 /* The default implementation strategy is to extract the constant
4932 parts of the constructor, use that to initialize the target,
4933 and then "or" in whatever non-constant ranges we need in addition.
4934
4935 If a large set is all zero or all ones, it is
4936 probably better to set it using memset (if available) or bzero.
4937 Also, if a large set has just a single range, it may also be
4938 better to first clear all the first clear the set (using
4939 bzero/memset), and set the bits we want. */
4940
4941 /* Check for all zeros. */
4942 if (elt == NULL_TREE && size > 0)
4943 {
4944 if (!cleared)
4945 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4946 return;
4947 }
4948
4949 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4950 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4951 bitlength = size_binop (PLUS_EXPR,
4952 size_diffop (domain_max, domain_min),
4953 ssize_int (1));
4954
4955 nbits = tree_low_cst (bitlength, 1);
4956
4957 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4958 are "complicated" (more than one range), initialize (the
4959 constant parts) by copying from a constant. */
4960 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4961 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4962 {
4963 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4964 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4965 char *bit_buffer = (char *) alloca (nbits);
4966 HOST_WIDE_INT word = 0;
4967 unsigned int bit_pos = 0;
4968 unsigned int ibit = 0;
4969 unsigned int offset = 0; /* In bytes from beginning of set. */
4970
4971 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4972 for (;;)
4973 {
4974 if (bit_buffer[ibit])
4975 {
4976 if (BYTES_BIG_ENDIAN)
4977 word |= (1 << (set_word_size - 1 - bit_pos));
4978 else
4979 word |= 1 << bit_pos;
4980 }
4981
4982 bit_pos++; ibit++;
4983 if (bit_pos >= set_word_size || ibit == nbits)
4984 {
4985 if (word != 0 || ! cleared)
4986 {
4987 rtx datum = GEN_INT (word);
4988 rtx to_rtx;
4989
4990 /* The assumption here is that it is safe to use
4991 XEXP if the set is multi-word, but not if
4992 it's single-word. */
4993 if (GET_CODE (target) == MEM)
4994 {
4995 to_rtx = plus_constant (XEXP (target, 0), offset);
4996 to_rtx = change_address (target, mode, to_rtx);
4997 }
4998 else if (offset == 0)
4999 to_rtx = target;
5000 else
5001 abort ();
5002 emit_move_insn (to_rtx, datum);
5003 }
5004
5005 if (ibit == nbits)
5006 break;
5007 word = 0;
5008 bit_pos = 0;
5009 offset += set_word_size / BITS_PER_UNIT;
5010 }
5011 }
5012 }
5013 else if (!cleared)
5014 /* Don't bother clearing storage if the set is all ones. */
5015 if (TREE_CHAIN (elt) != NULL_TREE
5016 || (TREE_PURPOSE (elt) == NULL_TREE
5017 ? nbits != 1
5018 : ( ! host_integerp (TREE_VALUE (elt), 0)
5019 || ! host_integerp (TREE_PURPOSE (elt), 0)
5020 || (tree_low_cst (TREE_VALUE (elt), 0)
5021 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5022 != (HOST_WIDE_INT) nbits))))
5023 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5024
5025 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5026 {
5027 /* Start of range of element or NULL. */
5028 tree startbit = TREE_PURPOSE (elt);
5029 /* End of range of element, or element value. */
5030 tree endbit = TREE_VALUE (elt);
5031 #ifdef TARGET_MEM_FUNCTIONS
5032 HOST_WIDE_INT startb, endb;
5033 #endif
5034 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5035
5036 bitlength_rtx = expand_expr (bitlength,
5037 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5038
5039 /* Handle non-range tuple element like [ expr ]. */
5040 if (startbit == NULL_TREE)
5041 {
5042 startbit = save_expr (endbit);
5043 endbit = startbit;
5044 }
5045
5046 startbit = convert (sizetype, startbit);
5047 endbit = convert (sizetype, endbit);
5048 if (! integer_zerop (domain_min))
5049 {
5050 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5051 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5052 }
5053 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5054 EXPAND_CONST_ADDRESS);
5055 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5056 EXPAND_CONST_ADDRESS);
5057
5058 if (REG_P (target))
5059 {
5060 targetx
5061 = assign_temp
5062 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5063 TYPE_QUAL_CONST)),
5064 0, 1, 1);
5065 emit_move_insn (targetx, target);
5066 }
5067
5068 else if (GET_CODE (target) == MEM)
5069 targetx = target;
5070 else
5071 abort ();
5072
5073 #ifdef TARGET_MEM_FUNCTIONS
5074 /* Optimization: If startbit and endbit are
5075 constants divisible by BITS_PER_UNIT,
5076 call memset instead. */
5077 if (TREE_CODE (startbit) == INTEGER_CST
5078 && TREE_CODE (endbit) == INTEGER_CST
5079 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5080 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5081 {
5082 emit_library_call (memset_libfunc, LCT_NORMAL,
5083 VOIDmode, 3,
5084 plus_constant (XEXP (targetx, 0),
5085 startb / BITS_PER_UNIT),
5086 Pmode,
5087 constm1_rtx, TYPE_MODE (integer_type_node),
5088 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5089 TYPE_MODE (sizetype));
5090 }
5091 else
5092 #endif
5093 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5094 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5095 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5096 startbit_rtx, TYPE_MODE (sizetype),
5097 endbit_rtx, TYPE_MODE (sizetype));
5098
5099 if (REG_P (target))
5100 emit_move_insn (target, targetx);
5101 }
5102 }
5103
5104 else
5105 abort ();
5106 }
5107
5108 /* Store the value of EXP (an expression tree)
5109 into a subfield of TARGET which has mode MODE and occupies
5110 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5111 If MODE is VOIDmode, it means that we are storing into a bit-field.
5112
5113 If VALUE_MODE is VOIDmode, return nothing in particular.
5114 UNSIGNEDP is not used in this case.
5115
5116 Otherwise, return an rtx for the value stored. This rtx
5117 has mode VALUE_MODE if that is convenient to do.
5118 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5119
5120 ALIGN is the alignment that TARGET is known to have.
5121 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5122
5123 ALIAS_SET is the alias set for the destination. This value will
5124 (in general) be different from that for TARGET, since TARGET is a
5125 reference to the containing structure. */
5126
5127 static rtx
5128 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5129 unsignedp, align, total_size, alias_set)
5130 rtx target;
5131 HOST_WIDE_INT bitsize;
5132 HOST_WIDE_INT bitpos;
5133 enum machine_mode mode;
5134 tree exp;
5135 enum machine_mode value_mode;
5136 int unsignedp;
5137 unsigned int align;
5138 HOST_WIDE_INT total_size;
5139 int alias_set;
5140 {
5141 HOST_WIDE_INT width_mask = 0;
5142
5143 if (TREE_CODE (exp) == ERROR_MARK)
5144 return const0_rtx;
5145
5146 /* If we have nothing to store, do nothing unless the expression has
5147 side-effects. */
5148 if (bitsize == 0)
5149 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5150
5151 if (bitsize < HOST_BITS_PER_WIDE_INT)
5152 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5153
5154 /* If we are storing into an unaligned field of an aligned union that is
5155 in a register, we may have the mode of TARGET being an integer mode but
5156 MODE == BLKmode. In that case, get an aligned object whose size and
5157 alignment are the same as TARGET and store TARGET into it (we can avoid
5158 the store if the field being stored is the entire width of TARGET). Then
5159 call ourselves recursively to store the field into a BLKmode version of
5160 that object. Finally, load from the object into TARGET. This is not
5161 very efficient in general, but should only be slightly more expensive
5162 than the otherwise-required unaligned accesses. Perhaps this can be
5163 cleaned up later. */
5164
5165 if (mode == BLKmode
5166 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5167 {
5168 rtx object
5169 = assign_temp
5170 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5171 TYPE_QUAL_CONST),
5172 0, 1, 1);
5173 rtx blk_object = copy_rtx (object);
5174
5175 PUT_MODE (blk_object, BLKmode);
5176
5177 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5178 emit_move_insn (object, target);
5179
5180 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5181 align, total_size, alias_set);
5182
5183 /* Even though we aren't returning target, we need to
5184 give it the updated value. */
5185 emit_move_insn (target, object);
5186
5187 return blk_object;
5188 }
5189
5190 if (GET_CODE (target) == CONCAT)
5191 {
5192 /* We're storing into a struct containing a single __complex. */
5193
5194 if (bitpos != 0)
5195 abort ();
5196 return store_expr (exp, target, 0);
5197 }
5198
5199 /* If the structure is in a register or if the component
5200 is a bit field, we cannot use addressing to access it.
5201 Use bit-field techniques or SUBREG to store in it. */
5202
5203 if (mode == VOIDmode
5204 || (mode != BLKmode && ! direct_store[(int) mode]
5205 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5206 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5207 || GET_CODE (target) == REG
5208 || GET_CODE (target) == SUBREG
5209 /* If the field isn't aligned enough to store as an ordinary memref,
5210 store it as a bit field. */
5211 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5212 && (align < GET_MODE_ALIGNMENT (mode)
5213 || bitpos % GET_MODE_ALIGNMENT (mode)))
5214 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5215 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5216 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5217 /* If the RHS and field are a constant size and the size of the
5218 RHS isn't the same size as the bitfield, we must use bitfield
5219 operations. */
5220 || (bitsize >= 0
5221 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5222 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5223 {
5224 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5225
5226 /* If BITSIZE is narrower than the size of the type of EXP
5227 we will be narrowing TEMP. Normally, what's wanted are the
5228 low-order bits. However, if EXP's type is a record and this is
5229 big-endian machine, we want the upper BITSIZE bits. */
5230 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5231 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5232 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5233 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5234 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5235 - bitsize),
5236 temp, 1);
5237
5238 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5239 MODE. */
5240 if (mode != VOIDmode && mode != BLKmode
5241 && mode != TYPE_MODE (TREE_TYPE (exp)))
5242 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5243
5244 /* If the modes of TARGET and TEMP are both BLKmode, both
5245 must be in memory and BITPOS must be aligned on a byte
5246 boundary. If so, we simply do a block copy. */
5247 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5248 {
5249 unsigned int exp_align = expr_align (exp);
5250
5251 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5252 || bitpos % BITS_PER_UNIT != 0)
5253 abort ();
5254
5255 target = change_address (target, VOIDmode,
5256 plus_constant (XEXP (target, 0),
5257 bitpos / BITS_PER_UNIT));
5258
5259 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5260 align = MIN (exp_align, align);
5261
5262 /* Find an alignment that is consistent with the bit position. */
5263 while ((bitpos % align) != 0)
5264 align >>= 1;
5265
5266 emit_block_move (target, temp,
5267 bitsize == -1 ? expr_size (exp)
5268 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5269 / BITS_PER_UNIT),
5270 align);
5271
5272 return value_mode == VOIDmode ? const0_rtx : target;
5273 }
5274
5275 /* Store the value in the bitfield. */
5276 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5277 if (value_mode != VOIDmode)
5278 {
5279 /* The caller wants an rtx for the value. */
5280 /* If possible, avoid refetching from the bitfield itself. */
5281 if (width_mask != 0
5282 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5283 {
5284 tree count;
5285 enum machine_mode tmode;
5286
5287 if (unsignedp)
5288 return expand_and (temp,
5289 GEN_INT
5290 (trunc_int_for_mode
5291 (width_mask,
5292 GET_MODE (temp) == VOIDmode
5293 ? value_mode
5294 : GET_MODE (temp))), NULL_RTX);
5295 tmode = GET_MODE (temp);
5296 if (tmode == VOIDmode)
5297 tmode = value_mode;
5298 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5299 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 }
5302 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5303 NULL_RTX, value_mode, 0, align,
5304 total_size);
5305 }
5306 return const0_rtx;
5307 }
5308 else
5309 {
5310 rtx addr = XEXP (target, 0);
5311 rtx to_rtx;
5312
5313 /* If a value is wanted, it must be the lhs;
5314 so make the address stable for multiple use. */
5315
5316 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5317 && ! CONSTANT_ADDRESS_P (addr)
5318 /* A frame-pointer reference is already stable. */
5319 && ! (GET_CODE (addr) == PLUS
5320 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5321 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5322 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5323 addr = copy_to_reg (addr);
5324
5325 /* Now build a reference to just the desired component. */
5326
5327 to_rtx = copy_rtx (change_address (target, mode,
5328 plus_constant (addr,
5329 (bitpos
5330 / BITS_PER_UNIT))));
5331 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5332 /* If the address of the structure varies, then it might be on
5333 the stack. And, stack slots may be shared across scopes.
5334 So, two different structures, of different types, can end up
5335 at the same location. We will give the structures alias set
5336 zero; here we must be careful not to give non-zero alias sets
5337 to their fields. */
5338 if (!rtx_varies_p (addr, /*for_alias=*/0))
5339 MEM_ALIAS_SET (to_rtx) = alias_set;
5340 else
5341 MEM_ALIAS_SET (to_rtx) = 0;
5342
5343 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5344 }
5345 }
5346 \f
5347 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5348 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5349 codes and find the ultimate containing object, which we return.
5350
5351 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5352 bit position, and *PUNSIGNEDP to the signedness of the field.
5353 If the position of the field is variable, we store a tree
5354 giving the variable offset (in units) in *POFFSET.
5355 This offset is in addition to the bit position.
5356 If the position is not variable, we store 0 in *POFFSET.
5357 We set *PALIGNMENT to the alignment of the address that will be
5358 computed. This is the alignment of the thing we return if *POFFSET
5359 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5360
5361 If any of the extraction expressions is volatile,
5362 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5363
5364 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5365 is a mode that can be used to access the field. In that case, *PBITSIZE
5366 is redundant.
5367
5368 If the field describes a variable-sized object, *PMODE is set to
5369 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5370 this case, but the address of the object can be found. */
5371
5372 tree
5373 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5374 punsignedp, pvolatilep, palignment)
5375 tree exp;
5376 HOST_WIDE_INT *pbitsize;
5377 HOST_WIDE_INT *pbitpos;
5378 tree *poffset;
5379 enum machine_mode *pmode;
5380 int *punsignedp;
5381 int *pvolatilep;
5382 unsigned int *palignment;
5383 {
5384 tree size_tree = 0;
5385 enum machine_mode mode = VOIDmode;
5386 tree offset = size_zero_node;
5387 tree bit_offset = bitsize_zero_node;
5388 unsigned int alignment = BIGGEST_ALIGNMENT;
5389 tree tem;
5390
5391 /* First get the mode, signedness, and size. We do this from just the
5392 outermost expression. */
5393 if (TREE_CODE (exp) == COMPONENT_REF)
5394 {
5395 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5396 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5397 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5398
5399 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5400 }
5401 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5402 {
5403 size_tree = TREE_OPERAND (exp, 1);
5404 *punsignedp = TREE_UNSIGNED (exp);
5405 }
5406 else
5407 {
5408 mode = TYPE_MODE (TREE_TYPE (exp));
5409 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5410
5411 if (mode == BLKmode)
5412 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5413 else
5414 *pbitsize = GET_MODE_BITSIZE (mode);
5415 }
5416
5417 if (size_tree != 0)
5418 {
5419 if (! host_integerp (size_tree, 1))
5420 mode = BLKmode, *pbitsize = -1;
5421 else
5422 *pbitsize = tree_low_cst (size_tree, 1);
5423 }
5424
5425 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5426 and find the ultimate containing object. */
5427 while (1)
5428 {
5429 if (TREE_CODE (exp) == BIT_FIELD_REF)
5430 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5431 else if (TREE_CODE (exp) == COMPONENT_REF)
5432 {
5433 tree field = TREE_OPERAND (exp, 1);
5434 tree this_offset = DECL_FIELD_OFFSET (field);
5435
5436 /* If this field hasn't been filled in yet, don't go
5437 past it. This should only happen when folding expressions
5438 made during type construction. */
5439 if (this_offset == 0)
5440 break;
5441 else if (! TREE_CONSTANT (this_offset)
5442 && contains_placeholder_p (this_offset))
5443 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5444
5445 offset = size_binop (PLUS_EXPR, offset, this_offset);
5446 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5447 DECL_FIELD_BIT_OFFSET (field));
5448
5449 if (! host_integerp (offset, 0))
5450 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5451 }
5452
5453 else if (TREE_CODE (exp) == ARRAY_REF
5454 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5455 {
5456 tree index = TREE_OPERAND (exp, 1);
5457 tree array = TREE_OPERAND (exp, 0);
5458 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5459 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5460 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5461
5462 /* We assume all arrays have sizes that are a multiple of a byte.
5463 First subtract the lower bound, if any, in the type of the
5464 index, then convert to sizetype and multiply by the size of the
5465 array element. */
5466 if (low_bound != 0 && ! integer_zerop (low_bound))
5467 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5468 index, low_bound));
5469
5470 /* If the index has a self-referential type, pass it to a
5471 WITH_RECORD_EXPR; if the component size is, pass our
5472 component to one. */
5473 if (! TREE_CONSTANT (index)
5474 && contains_placeholder_p (index))
5475 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5476 if (! TREE_CONSTANT (unit_size)
5477 && contains_placeholder_p (unit_size))
5478 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5479
5480 offset = size_binop (PLUS_EXPR, offset,
5481 size_binop (MULT_EXPR,
5482 convert (sizetype, index),
5483 unit_size));
5484 }
5485
5486 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5487 && ! ((TREE_CODE (exp) == NOP_EXPR
5488 || TREE_CODE (exp) == CONVERT_EXPR)
5489 && (TYPE_MODE (TREE_TYPE (exp))
5490 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5491 break;
5492
5493 /* If any reference in the chain is volatile, the effect is volatile. */
5494 if (TREE_THIS_VOLATILE (exp))
5495 *pvolatilep = 1;
5496
5497 /* If the offset is non-constant already, then we can't assume any
5498 alignment more than the alignment here. */
5499 if (! TREE_CONSTANT (offset))
5500 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5501
5502 exp = TREE_OPERAND (exp, 0);
5503 }
5504
5505 if (DECL_P (exp))
5506 alignment = MIN (alignment, DECL_ALIGN (exp));
5507 else if (TREE_TYPE (exp) != 0)
5508 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5509
5510 /* If OFFSET is constant, see if we can return the whole thing as a
5511 constant bit position. Otherwise, split it up. */
5512 if (host_integerp (offset, 0)
5513 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5514 bitsize_unit_node))
5515 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5516 && host_integerp (tem, 0))
5517 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5518 else
5519 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5520
5521 *pmode = mode;
5522 *palignment = alignment;
5523 return exp;
5524 }
5525
5526 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5527
5528 static enum memory_use_mode
5529 get_memory_usage_from_modifier (modifier)
5530 enum expand_modifier modifier;
5531 {
5532 switch (modifier)
5533 {
5534 case EXPAND_NORMAL:
5535 case EXPAND_SUM:
5536 return MEMORY_USE_RO;
5537 break;
5538 case EXPAND_MEMORY_USE_WO:
5539 return MEMORY_USE_WO;
5540 break;
5541 case EXPAND_MEMORY_USE_RW:
5542 return MEMORY_USE_RW;
5543 break;
5544 case EXPAND_MEMORY_USE_DONT:
5545 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5546 MEMORY_USE_DONT, because they are modifiers to a call of
5547 expand_expr in the ADDR_EXPR case of expand_expr. */
5548 case EXPAND_CONST_ADDRESS:
5549 case EXPAND_INITIALIZER:
5550 return MEMORY_USE_DONT;
5551 case EXPAND_MEMORY_USE_BAD:
5552 default:
5553 abort ();
5554 }
5555 }
5556 \f
5557 /* Given an rtx VALUE that may contain additions and multiplications, return
5558 an equivalent value that just refers to a register, memory, or constant.
5559 This is done by generating instructions to perform the arithmetic and
5560 returning a pseudo-register containing the value.
5561
5562 The returned value may be a REG, SUBREG, MEM or constant. */
5563
5564 rtx
5565 force_operand (value, target)
5566 rtx value, target;
5567 {
5568 register optab binoptab = 0;
5569 /* Use a temporary to force order of execution of calls to
5570 `force_operand'. */
5571 rtx tmp;
5572 register rtx op2;
5573 /* Use subtarget as the target for operand 0 of a binary operation. */
5574 register rtx subtarget = get_subtarget (target);
5575
5576 /* Check for a PIC address load. */
5577 if (flag_pic
5578 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5579 && XEXP (value, 0) == pic_offset_table_rtx
5580 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5581 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5582 || GET_CODE (XEXP (value, 1)) == CONST))
5583 {
5584 if (!subtarget)
5585 subtarget = gen_reg_rtx (GET_MODE (value));
5586 emit_move_insn (subtarget, value);
5587 return subtarget;
5588 }
5589
5590 if (GET_CODE (value) == PLUS)
5591 binoptab = add_optab;
5592 else if (GET_CODE (value) == MINUS)
5593 binoptab = sub_optab;
5594 else if (GET_CODE (value) == MULT)
5595 {
5596 op2 = XEXP (value, 1);
5597 if (!CONSTANT_P (op2)
5598 && !(GET_CODE (op2) == REG && op2 != subtarget))
5599 subtarget = 0;
5600 tmp = force_operand (XEXP (value, 0), subtarget);
5601 return expand_mult (GET_MODE (value), tmp,
5602 force_operand (op2, NULL_RTX),
5603 target, 1);
5604 }
5605
5606 if (binoptab)
5607 {
5608 op2 = XEXP (value, 1);
5609 if (!CONSTANT_P (op2)
5610 && !(GET_CODE (op2) == REG && op2 != subtarget))
5611 subtarget = 0;
5612 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5613 {
5614 binoptab = add_optab;
5615 op2 = negate_rtx (GET_MODE (value), op2);
5616 }
5617
5618 /* Check for an addition with OP2 a constant integer and our first
5619 operand a PLUS of a virtual register and something else. In that
5620 case, we want to emit the sum of the virtual register and the
5621 constant first and then add the other value. This allows virtual
5622 register instantiation to simply modify the constant rather than
5623 creating another one around this addition. */
5624 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5625 && GET_CODE (XEXP (value, 0)) == PLUS
5626 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5627 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5628 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5629 {
5630 rtx temp = expand_binop (GET_MODE (value), binoptab,
5631 XEXP (XEXP (value, 0), 0), op2,
5632 subtarget, 0, OPTAB_LIB_WIDEN);
5633 return expand_binop (GET_MODE (value), binoptab, temp,
5634 force_operand (XEXP (XEXP (value, 0), 1), 0),
5635 target, 0, OPTAB_LIB_WIDEN);
5636 }
5637
5638 tmp = force_operand (XEXP (value, 0), subtarget);
5639 return expand_binop (GET_MODE (value), binoptab, tmp,
5640 force_operand (op2, NULL_RTX),
5641 target, 0, OPTAB_LIB_WIDEN);
5642 /* We give UNSIGNEDP = 0 to expand_binop
5643 because the only operations we are expanding here are signed ones. */
5644 }
5645 return value;
5646 }
5647 \f
5648 /* Subroutine of expand_expr:
5649 save the non-copied parts (LIST) of an expr (LHS), and return a list
5650 which can restore these values to their previous values,
5651 should something modify their storage. */
5652
5653 static tree
5654 save_noncopied_parts (lhs, list)
5655 tree lhs;
5656 tree list;
5657 {
5658 tree tail;
5659 tree parts = 0;
5660
5661 for (tail = list; tail; tail = TREE_CHAIN (tail))
5662 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5663 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5664 else
5665 {
5666 tree part = TREE_VALUE (tail);
5667 tree part_type = TREE_TYPE (part);
5668 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5669 rtx target
5670 = assign_temp (build_qualified_type (part_type,
5671 (TYPE_QUALS (part_type)
5672 | TYPE_QUAL_CONST)),
5673 0, 1, 1);
5674
5675 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5676 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5677 parts = tree_cons (to_be_saved,
5678 build (RTL_EXPR, part_type, NULL_TREE,
5679 (tree) target),
5680 parts);
5681 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5682 }
5683 return parts;
5684 }
5685
5686 /* Subroutine of expand_expr:
5687 record the non-copied parts (LIST) of an expr (LHS), and return a list
5688 which specifies the initial values of these parts. */
5689
5690 static tree
5691 init_noncopied_parts (lhs, list)
5692 tree lhs;
5693 tree list;
5694 {
5695 tree tail;
5696 tree parts = 0;
5697
5698 for (tail = list; tail; tail = TREE_CHAIN (tail))
5699 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5700 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5701 else if (TREE_PURPOSE (tail))
5702 {
5703 tree part = TREE_VALUE (tail);
5704 tree part_type = TREE_TYPE (part);
5705 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5706 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5707 }
5708 return parts;
5709 }
5710
5711 /* Subroutine of expand_expr: return nonzero iff there is no way that
5712 EXP can reference X, which is being modified. TOP_P is nonzero if this
5713 call is going to be used to determine whether we need a temporary
5714 for EXP, as opposed to a recursive call to this function.
5715
5716 It is always safe for this routine to return zero since it merely
5717 searches for optimization opportunities. */
5718
5719 int
5720 safe_from_p (x, exp, top_p)
5721 rtx x;
5722 tree exp;
5723 int top_p;
5724 {
5725 rtx exp_rtl = 0;
5726 int i, nops;
5727 static tree save_expr_list;
5728
5729 if (x == 0
5730 /* If EXP has varying size, we MUST use a target since we currently
5731 have no way of allocating temporaries of variable size
5732 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5733 So we assume here that something at a higher level has prevented a
5734 clash. This is somewhat bogus, but the best we can do. Only
5735 do this when X is BLKmode and when we are at the top level. */
5736 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5738 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5739 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5740 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5741 != INTEGER_CST)
5742 && GET_MODE (x) == BLKmode)
5743 /* If X is in the outgoing argument area, it is always safe. */
5744 || (GET_CODE (x) == MEM
5745 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5746 || (GET_CODE (XEXP (x, 0)) == PLUS
5747 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5748 return 1;
5749
5750 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5751 find the underlying pseudo. */
5752 if (GET_CODE (x) == SUBREG)
5753 {
5754 x = SUBREG_REG (x);
5755 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5756 return 0;
5757 }
5758
5759 /* A SAVE_EXPR might appear many times in the expression passed to the
5760 top-level safe_from_p call, and if it has a complex subexpression,
5761 examining it multiple times could result in a combinatorial explosion.
5762 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5763 with optimization took about 28 minutes to compile -- even though it was
5764 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5765 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5766 we have processed. Note that the only test of top_p was above. */
5767
5768 if (top_p)
5769 {
5770 int rtn;
5771 tree t;
5772
5773 save_expr_list = 0;
5774
5775 rtn = safe_from_p (x, exp, 0);
5776
5777 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5778 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5779
5780 return rtn;
5781 }
5782
5783 /* Now look at our tree code and possibly recurse. */
5784 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5785 {
5786 case 'd':
5787 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5788 break;
5789
5790 case 'c':
5791 return 1;
5792
5793 case 'x':
5794 if (TREE_CODE (exp) == TREE_LIST)
5795 return ((TREE_VALUE (exp) == 0
5796 || safe_from_p (x, TREE_VALUE (exp), 0))
5797 && (TREE_CHAIN (exp) == 0
5798 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5799 else if (TREE_CODE (exp) == ERROR_MARK)
5800 return 1; /* An already-visited SAVE_EXPR? */
5801 else
5802 return 0;
5803
5804 case '1':
5805 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5806
5807 case '2':
5808 case '<':
5809 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5810 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5811
5812 case 'e':
5813 case 'r':
5814 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5815 the expression. If it is set, we conflict iff we are that rtx or
5816 both are in memory. Otherwise, we check all operands of the
5817 expression recursively. */
5818
5819 switch (TREE_CODE (exp))
5820 {
5821 case ADDR_EXPR:
5822 return (staticp (TREE_OPERAND (exp, 0))
5823 || TREE_STATIC (exp)
5824 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5825
5826 case INDIRECT_REF:
5827 if (GET_CODE (x) == MEM
5828 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5829 get_alias_set (exp)))
5830 return 0;
5831 break;
5832
5833 case CALL_EXPR:
5834 /* Assume that the call will clobber all hard registers and
5835 all of memory. */
5836 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5837 || GET_CODE (x) == MEM)
5838 return 0;
5839 break;
5840
5841 case RTL_EXPR:
5842 /* If a sequence exists, we would have to scan every instruction
5843 in the sequence to see if it was safe. This is probably not
5844 worthwhile. */
5845 if (RTL_EXPR_SEQUENCE (exp))
5846 return 0;
5847
5848 exp_rtl = RTL_EXPR_RTL (exp);
5849 break;
5850
5851 case WITH_CLEANUP_EXPR:
5852 exp_rtl = RTL_EXPR_RTL (exp);
5853 break;
5854
5855 case CLEANUP_POINT_EXPR:
5856 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5857
5858 case SAVE_EXPR:
5859 exp_rtl = SAVE_EXPR_RTL (exp);
5860 if (exp_rtl)
5861 break;
5862
5863 /* If we've already scanned this, don't do it again. Otherwise,
5864 show we've scanned it and record for clearing the flag if we're
5865 going on. */
5866 if (TREE_PRIVATE (exp))
5867 return 1;
5868
5869 TREE_PRIVATE (exp) = 1;
5870 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5871 {
5872 TREE_PRIVATE (exp) = 0;
5873 return 0;
5874 }
5875
5876 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5877 return 1;
5878
5879 case BIND_EXPR:
5880 /* The only operand we look at is operand 1. The rest aren't
5881 part of the expression. */
5882 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5883
5884 case METHOD_CALL_EXPR:
5885 /* This takes a rtx argument, but shouldn't appear here. */
5886 abort ();
5887
5888 default:
5889 break;
5890 }
5891
5892 /* If we have an rtx, we do not need to scan our operands. */
5893 if (exp_rtl)
5894 break;
5895
5896 nops = first_rtl_op (TREE_CODE (exp));
5897 for (i = 0; i < nops; i++)
5898 if (TREE_OPERAND (exp, i) != 0
5899 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5900 return 0;
5901
5902 /* If this is a language-specific tree code, it may require
5903 special handling. */
5904 if ((unsigned int) TREE_CODE (exp)
5905 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5906 && lang_safe_from_p
5907 && !(*lang_safe_from_p) (x, exp))
5908 return 0;
5909 }
5910
5911 /* If we have an rtl, find any enclosed object. Then see if we conflict
5912 with it. */
5913 if (exp_rtl)
5914 {
5915 if (GET_CODE (exp_rtl) == SUBREG)
5916 {
5917 exp_rtl = SUBREG_REG (exp_rtl);
5918 if (GET_CODE (exp_rtl) == REG
5919 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5920 return 0;
5921 }
5922
5923 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5924 are memory and they conflict. */
5925 return ! (rtx_equal_p (x, exp_rtl)
5926 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5927 && true_dependence (exp_rtl, GET_MODE (x), x,
5928 rtx_addr_varies_p)));
5929 }
5930
5931 /* If we reach here, it is safe. */
5932 return 1;
5933 }
5934
5935 /* Subroutine of expand_expr: return nonzero iff EXP is an
5936 expression whose type is statically determinable. */
5937
5938 static int
5939 fixed_type_p (exp)
5940 tree exp;
5941 {
5942 if (TREE_CODE (exp) == PARM_DECL
5943 || TREE_CODE (exp) == VAR_DECL
5944 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5945 || TREE_CODE (exp) == COMPONENT_REF
5946 || TREE_CODE (exp) == ARRAY_REF)
5947 return 1;
5948 return 0;
5949 }
5950
5951 /* Subroutine of expand_expr: return rtx if EXP is a
5952 variable or parameter; else return 0. */
5953
5954 static rtx
5955 var_rtx (exp)
5956 tree exp;
5957 {
5958 STRIP_NOPS (exp);
5959 switch (TREE_CODE (exp))
5960 {
5961 case PARM_DECL:
5962 case VAR_DECL:
5963 return DECL_RTL (exp);
5964 default:
5965 return 0;
5966 }
5967 }
5968
5969 #ifdef MAX_INTEGER_COMPUTATION_MODE
5970
5971 void
5972 check_max_integer_computation_mode (exp)
5973 tree exp;
5974 {
5975 enum tree_code code;
5976 enum machine_mode mode;
5977
5978 /* Strip any NOPs that don't change the mode. */
5979 STRIP_NOPS (exp);
5980 code = TREE_CODE (exp);
5981
5982 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5983 if (code == NOP_EXPR
5984 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5985 return;
5986
5987 /* First check the type of the overall operation. We need only look at
5988 unary, binary and relational operations. */
5989 if (TREE_CODE_CLASS (code) == '1'
5990 || TREE_CODE_CLASS (code) == '2'
5991 || TREE_CODE_CLASS (code) == '<')
5992 {
5993 mode = TYPE_MODE (TREE_TYPE (exp));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
5996 internal_error ("unsupported wide integer operation");
5997 }
5998
5999 /* Check operand of a unary op. */
6000 if (TREE_CODE_CLASS (code) == '1')
6001 {
6002 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6003 if (GET_MODE_CLASS (mode) == MODE_INT
6004 && mode > MAX_INTEGER_COMPUTATION_MODE)
6005 internal_error ("unsupported wide integer operation");
6006 }
6007
6008 /* Check operands of a binary/comparison op. */
6009 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6010 {
6011 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6012 if (GET_MODE_CLASS (mode) == MODE_INT
6013 && mode > MAX_INTEGER_COMPUTATION_MODE)
6014 internal_error ("unsupported wide integer operation");
6015
6016 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6017 if (GET_MODE_CLASS (mode) == MODE_INT
6018 && mode > MAX_INTEGER_COMPUTATION_MODE)
6019 internal_error ("unsupported wide integer operation");
6020 }
6021 }
6022 #endif
6023 \f
6024 /* expand_expr: generate code for computing expression EXP.
6025 An rtx for the computed value is returned. The value is never null.
6026 In the case of a void EXP, const0_rtx is returned.
6027
6028 The value may be stored in TARGET if TARGET is nonzero.
6029 TARGET is just a suggestion; callers must assume that
6030 the rtx returned may not be the same as TARGET.
6031
6032 If TARGET is CONST0_RTX, it means that the value will be ignored.
6033
6034 If TMODE is not VOIDmode, it suggests generating the
6035 result in mode TMODE. But this is done only when convenient.
6036 Otherwise, TMODE is ignored and the value generated in its natural mode.
6037 TMODE is just a suggestion; callers must assume that
6038 the rtx returned may not have mode TMODE.
6039
6040 Note that TARGET may have neither TMODE nor MODE. In that case, it
6041 probably will not be used.
6042
6043 If MODIFIER is EXPAND_SUM then when EXP is an addition
6044 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6045 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6046 products as above, or REG or MEM, or constant.
6047 Ordinarily in such cases we would output mul or add instructions
6048 and then return a pseudo reg containing the sum.
6049
6050 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6051 it also marks a label as absolutely required (it can't be dead).
6052 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6053 This is used for outputting expressions used in initializers.
6054
6055 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6056 with a constant address even if that address is not normally legitimate.
6057 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6058
6059 rtx
6060 expand_expr (exp, target, tmode, modifier)
6061 register tree exp;
6062 rtx target;
6063 enum machine_mode tmode;
6064 enum expand_modifier modifier;
6065 {
6066 register rtx op0, op1, temp;
6067 tree type = TREE_TYPE (exp);
6068 int unsignedp = TREE_UNSIGNED (type);
6069 register enum machine_mode mode;
6070 register enum tree_code code = TREE_CODE (exp);
6071 optab this_optab;
6072 rtx subtarget, original_target;
6073 int ignore;
6074 tree context;
6075 /* Used by check-memory-usage to make modifier read only. */
6076 enum expand_modifier ro_modifier;
6077
6078 /* Handle ERROR_MARK before anybody tries to access its type. */
6079 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6080 {
6081 op0 = CONST0_RTX (tmode);
6082 if (op0 != 0)
6083 return op0;
6084 return const0_rtx;
6085 }
6086
6087 mode = TYPE_MODE (type);
6088 /* Use subtarget as the target for operand 0 of a binary operation. */
6089 subtarget = get_subtarget (target);
6090 original_target = target;
6091 ignore = (target == const0_rtx
6092 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6093 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6094 || code == COND_EXPR)
6095 && TREE_CODE (type) == VOID_TYPE));
6096
6097 /* Make a read-only version of the modifier. */
6098 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6099 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6100 ro_modifier = modifier;
6101 else
6102 ro_modifier = EXPAND_NORMAL;
6103
6104 /* If we are going to ignore this result, we need only do something
6105 if there is a side-effect somewhere in the expression. If there
6106 is, short-circuit the most common cases here. Note that we must
6107 not call expand_expr with anything but const0_rtx in case this
6108 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6109
6110 if (ignore)
6111 {
6112 if (! TREE_SIDE_EFFECTS (exp))
6113 return const0_rtx;
6114
6115 /* Ensure we reference a volatile object even if value is ignored, but
6116 don't do this if all we are doing is taking its address. */
6117 if (TREE_THIS_VOLATILE (exp)
6118 && TREE_CODE (exp) != FUNCTION_DECL
6119 && mode != VOIDmode && mode != BLKmode
6120 && modifier != EXPAND_CONST_ADDRESS)
6121 {
6122 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6123 if (GET_CODE (temp) == MEM)
6124 temp = copy_to_reg (temp);
6125 return const0_rtx;
6126 }
6127
6128 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6129 || code == INDIRECT_REF || code == BUFFER_REF)
6130 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6131 VOIDmode, ro_modifier);
6132 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6133 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6134 {
6135 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6136 ro_modifier);
6137 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6138 ro_modifier);
6139 return const0_rtx;
6140 }
6141 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6142 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6143 /* If the second operand has no side effects, just evaluate
6144 the first. */
6145 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6146 VOIDmode, ro_modifier);
6147 else if (code == BIT_FIELD_REF)
6148 {
6149 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6150 ro_modifier);
6151 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6152 ro_modifier);
6153 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6154 ro_modifier);
6155 return const0_rtx;
6156 }
6157 ;
6158 target = 0;
6159 }
6160
6161 #ifdef MAX_INTEGER_COMPUTATION_MODE
6162 /* Only check stuff here if the mode we want is different from the mode
6163 of the expression; if it's the same, check_max_integer_computiation_mode
6164 will handle it. Do we really need to check this stuff at all? */
6165
6166 if (target
6167 && GET_MODE (target) != mode
6168 && TREE_CODE (exp) != INTEGER_CST
6169 && TREE_CODE (exp) != PARM_DECL
6170 && TREE_CODE (exp) != ARRAY_REF
6171 && TREE_CODE (exp) != ARRAY_RANGE_REF
6172 && TREE_CODE (exp) != COMPONENT_REF
6173 && TREE_CODE (exp) != BIT_FIELD_REF
6174 && TREE_CODE (exp) != INDIRECT_REF
6175 && TREE_CODE (exp) != CALL_EXPR
6176 && TREE_CODE (exp) != VAR_DECL
6177 && TREE_CODE (exp) != RTL_EXPR)
6178 {
6179 enum machine_mode mode = GET_MODE (target);
6180
6181 if (GET_MODE_CLASS (mode) == MODE_INT
6182 && mode > MAX_INTEGER_COMPUTATION_MODE)
6183 internal_error ("unsupported wide integer operation");
6184 }
6185
6186 if (tmode != mode
6187 && TREE_CODE (exp) != INTEGER_CST
6188 && TREE_CODE (exp) != PARM_DECL
6189 && TREE_CODE (exp) != ARRAY_REF
6190 && TREE_CODE (exp) != ARRAY_RANGE_REF
6191 && TREE_CODE (exp) != COMPONENT_REF
6192 && TREE_CODE (exp) != BIT_FIELD_REF
6193 && TREE_CODE (exp) != INDIRECT_REF
6194 && TREE_CODE (exp) != VAR_DECL
6195 && TREE_CODE (exp) != CALL_EXPR
6196 && TREE_CODE (exp) != RTL_EXPR
6197 && GET_MODE_CLASS (tmode) == MODE_INT
6198 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6199 internal_error ("unsupported wide integer operation");
6200
6201 check_max_integer_computation_mode (exp);
6202 #endif
6203
6204 /* If will do cse, generate all results into pseudo registers
6205 since 1) that allows cse to find more things
6206 and 2) otherwise cse could produce an insn the machine
6207 cannot support. */
6208
6209 if (! cse_not_expected && mode != BLKmode && target
6210 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6211 target = subtarget;
6212
6213 switch (code)
6214 {
6215 case LABEL_DECL:
6216 {
6217 tree function = decl_function_context (exp);
6218 /* Handle using a label in a containing function. */
6219 if (function != current_function_decl
6220 && function != inline_function_decl && function != 0)
6221 {
6222 struct function *p = find_function_data (function);
6223 p->expr->x_forced_labels
6224 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6225 p->expr->x_forced_labels);
6226 }
6227 else
6228 {
6229 if (modifier == EXPAND_INITIALIZER)
6230 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6231 label_rtx (exp),
6232 forced_labels);
6233 }
6234
6235 temp = gen_rtx_MEM (FUNCTION_MODE,
6236 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6237 if (function != current_function_decl
6238 && function != inline_function_decl && function != 0)
6239 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6240 return temp;
6241 }
6242
6243 case PARM_DECL:
6244 if (DECL_RTL (exp) == 0)
6245 {
6246 error_with_decl (exp, "prior parameter's size depends on `%s'");
6247 return CONST0_RTX (mode);
6248 }
6249
6250 /* ... fall through ... */
6251
6252 case VAR_DECL:
6253 /* If a static var's type was incomplete when the decl was written,
6254 but the type is complete now, lay out the decl now. */
6255 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6256 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6257 {
6258 layout_decl (exp, 0);
6259 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6260 }
6261
6262 /* Although static-storage variables start off initialized, according to
6263 ANSI C, a memcpy could overwrite them with uninitialized values. So
6264 we check them too. This also lets us check for read-only variables
6265 accessed via a non-const declaration, in case it won't be detected
6266 any other way (e.g., in an embedded system or OS kernel without
6267 memory protection).
6268
6269 Aggregates are not checked here; they're handled elsewhere. */
6270 if (cfun && current_function_check_memory_usage
6271 && code == VAR_DECL
6272 && GET_CODE (DECL_RTL (exp)) == MEM
6273 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6274 {
6275 enum memory_use_mode memory_usage;
6276 memory_usage = get_memory_usage_from_modifier (modifier);
6277
6278 in_check_memory_usage = 1;
6279 if (memory_usage != MEMORY_USE_DONT)
6280 emit_library_call (chkr_check_addr_libfunc,
6281 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6282 XEXP (DECL_RTL (exp), 0), Pmode,
6283 GEN_INT (int_size_in_bytes (type)),
6284 TYPE_MODE (sizetype),
6285 GEN_INT (memory_usage),
6286 TYPE_MODE (integer_type_node));
6287 in_check_memory_usage = 0;
6288 }
6289
6290 /* ... fall through ... */
6291
6292 case FUNCTION_DECL:
6293 case RESULT_DECL:
6294 if (DECL_RTL (exp) == 0)
6295 abort ();
6296
6297 /* Ensure variable marked as used even if it doesn't go through
6298 a parser. If it hasn't be used yet, write out an external
6299 definition. */
6300 if (! TREE_USED (exp))
6301 {
6302 assemble_external (exp);
6303 TREE_USED (exp) = 1;
6304 }
6305
6306 /* Show we haven't gotten RTL for this yet. */
6307 temp = 0;
6308
6309 /* Handle variables inherited from containing functions. */
6310 context = decl_function_context (exp);
6311
6312 /* We treat inline_function_decl as an alias for the current function
6313 because that is the inline function whose vars, types, etc.
6314 are being merged into the current function.
6315 See expand_inline_function. */
6316
6317 if (context != 0 && context != current_function_decl
6318 && context != inline_function_decl
6319 /* If var is static, we don't need a static chain to access it. */
6320 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6321 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6322 {
6323 rtx addr;
6324
6325 /* Mark as non-local and addressable. */
6326 DECL_NONLOCAL (exp) = 1;
6327 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6328 abort ();
6329 mark_addressable (exp);
6330 if (GET_CODE (DECL_RTL (exp)) != MEM)
6331 abort ();
6332 addr = XEXP (DECL_RTL (exp), 0);
6333 if (GET_CODE (addr) == MEM)
6334 addr = change_address (addr, Pmode,
6335 fix_lexical_addr (XEXP (addr, 0), exp));
6336 else
6337 addr = fix_lexical_addr (addr, exp);
6338
6339 temp = change_address (DECL_RTL (exp), mode, addr);
6340 }
6341
6342 /* This is the case of an array whose size is to be determined
6343 from its initializer, while the initializer is still being parsed.
6344 See expand_decl. */
6345
6346 else if (GET_CODE (DECL_RTL (exp)) == MEM
6347 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6348 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6349 XEXP (DECL_RTL (exp), 0));
6350
6351 /* If DECL_RTL is memory, we are in the normal case and either
6352 the address is not valid or it is not a register and -fforce-addr
6353 is specified, get the address into a register. */
6354
6355 else if (GET_CODE (DECL_RTL (exp)) == MEM
6356 && modifier != EXPAND_CONST_ADDRESS
6357 && modifier != EXPAND_SUM
6358 && modifier != EXPAND_INITIALIZER
6359 && (! memory_address_p (DECL_MODE (exp),
6360 XEXP (DECL_RTL (exp), 0))
6361 || (flag_force_addr
6362 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6363 temp = change_address (DECL_RTL (exp), VOIDmode,
6364 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6365
6366 /* If we got something, return it. But first, set the alignment
6367 if the address is a register. */
6368 if (temp != 0)
6369 {
6370 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6371 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6372
6373 return temp;
6374 }
6375
6376 /* If the mode of DECL_RTL does not match that of the decl, it
6377 must be a promoted value. We return a SUBREG of the wanted mode,
6378 but mark it so that we know that it was already extended. */
6379
6380 if (GET_CODE (DECL_RTL (exp)) == REG
6381 && GET_MODE (DECL_RTL (exp)) != mode)
6382 {
6383 /* Get the signedness used for this variable. Ensure we get the
6384 same mode we got when the variable was declared. */
6385 if (GET_MODE (DECL_RTL (exp))
6386 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6387 abort ();
6388
6389 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6390 SUBREG_PROMOTED_VAR_P (temp) = 1;
6391 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6392 return temp;
6393 }
6394
6395 return DECL_RTL (exp);
6396
6397 case INTEGER_CST:
6398 return immed_double_const (TREE_INT_CST_LOW (exp),
6399 TREE_INT_CST_HIGH (exp), mode);
6400
6401 case CONST_DECL:
6402 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6403 EXPAND_MEMORY_USE_BAD);
6404
6405 case REAL_CST:
6406 /* If optimized, generate immediate CONST_DOUBLE
6407 which will be turned into memory by reload if necessary.
6408
6409 We used to force a register so that loop.c could see it. But
6410 this does not allow gen_* patterns to perform optimizations with
6411 the constants. It also produces two insns in cases like "x = 1.0;".
6412 On most machines, floating-point constants are not permitted in
6413 many insns, so we'd end up copying it to a register in any case.
6414
6415 Now, we do the copying in expand_binop, if appropriate. */
6416 return immed_real_const (exp);
6417
6418 case COMPLEX_CST:
6419 case STRING_CST:
6420 if (! TREE_CST_RTL (exp))
6421 output_constant_def (exp, 1);
6422
6423 /* TREE_CST_RTL probably contains a constant address.
6424 On RISC machines where a constant address isn't valid,
6425 make some insns to get that address into a register. */
6426 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6427 && modifier != EXPAND_CONST_ADDRESS
6428 && modifier != EXPAND_INITIALIZER
6429 && modifier != EXPAND_SUM
6430 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6431 || (flag_force_addr
6432 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6433 return change_address (TREE_CST_RTL (exp), VOIDmode,
6434 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6435 return TREE_CST_RTL (exp);
6436
6437 case EXPR_WITH_FILE_LOCATION:
6438 {
6439 rtx to_return;
6440 const char *saved_input_filename = input_filename;
6441 int saved_lineno = lineno;
6442 input_filename = EXPR_WFL_FILENAME (exp);
6443 lineno = EXPR_WFL_LINENO (exp);
6444 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6445 emit_line_note (input_filename, lineno);
6446 /* Possibly avoid switching back and force here. */
6447 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6448 input_filename = saved_input_filename;
6449 lineno = saved_lineno;
6450 return to_return;
6451 }
6452
6453 case SAVE_EXPR:
6454 context = decl_function_context (exp);
6455
6456 /* If this SAVE_EXPR was at global context, assume we are an
6457 initialization function and move it into our context. */
6458 if (context == 0)
6459 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6460
6461 /* We treat inline_function_decl as an alias for the current function
6462 because that is the inline function whose vars, types, etc.
6463 are being merged into the current function.
6464 See expand_inline_function. */
6465 if (context == current_function_decl || context == inline_function_decl)
6466 context = 0;
6467
6468 /* If this is non-local, handle it. */
6469 if (context)
6470 {
6471 /* The following call just exists to abort if the context is
6472 not of a containing function. */
6473 find_function_data (context);
6474
6475 temp = SAVE_EXPR_RTL (exp);
6476 if (temp && GET_CODE (temp) == REG)
6477 {
6478 put_var_into_stack (exp);
6479 temp = SAVE_EXPR_RTL (exp);
6480 }
6481 if (temp == 0 || GET_CODE (temp) != MEM)
6482 abort ();
6483 return change_address (temp, mode,
6484 fix_lexical_addr (XEXP (temp, 0), exp));
6485 }
6486 if (SAVE_EXPR_RTL (exp) == 0)
6487 {
6488 if (mode == VOIDmode)
6489 temp = const0_rtx;
6490 else
6491 temp = assign_temp (build_qualified_type (type,
6492 (TYPE_QUALS (type)
6493 | TYPE_QUAL_CONST)),
6494 3, 0, 0);
6495
6496 SAVE_EXPR_RTL (exp) = temp;
6497 if (!optimize && GET_CODE (temp) == REG)
6498 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6499 save_expr_regs);
6500
6501 /* If the mode of TEMP does not match that of the expression, it
6502 must be a promoted value. We pass store_expr a SUBREG of the
6503 wanted mode but mark it so that we know that it was already
6504 extended. Note that `unsignedp' was modified above in
6505 this case. */
6506
6507 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6508 {
6509 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6510 SUBREG_PROMOTED_VAR_P (temp) = 1;
6511 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 }
6513
6514 if (temp == const0_rtx)
6515 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6516 EXPAND_MEMORY_USE_BAD);
6517 else
6518 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6519
6520 TREE_USED (exp) = 1;
6521 }
6522
6523 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6524 must be a promoted value. We return a SUBREG of the wanted mode,
6525 but mark it so that we know that it was already extended. */
6526
6527 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6528 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6529 {
6530 /* Compute the signedness and make the proper SUBREG. */
6531 promote_mode (type, mode, &unsignedp, 0);
6532 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6533 SUBREG_PROMOTED_VAR_P (temp) = 1;
6534 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6535 return temp;
6536 }
6537
6538 return SAVE_EXPR_RTL (exp);
6539
6540 case UNSAVE_EXPR:
6541 {
6542 rtx temp;
6543 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6544 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6545 return temp;
6546 }
6547
6548 case PLACEHOLDER_EXPR:
6549 {
6550 tree placeholder_expr;
6551
6552 /* If there is an object on the head of the placeholder list,
6553 see if some object in it of type TYPE or a pointer to it. For
6554 further information, see tree.def. */
6555 for (placeholder_expr = placeholder_list;
6556 placeholder_expr != 0;
6557 placeholder_expr = TREE_CHAIN (placeholder_expr))
6558 {
6559 tree need_type = TYPE_MAIN_VARIANT (type);
6560 tree object = 0;
6561 tree old_list = placeholder_list;
6562 tree elt;
6563
6564 /* Find the outermost reference that is of the type we want.
6565 If none, see if any object has a type that is a pointer to
6566 the type we want. */
6567 for (elt = TREE_PURPOSE (placeholder_expr);
6568 elt != 0 && object == 0;
6569 elt
6570 = ((TREE_CODE (elt) == COMPOUND_EXPR
6571 || TREE_CODE (elt) == COND_EXPR)
6572 ? TREE_OPERAND (elt, 1)
6573 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6574 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6575 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6576 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6577 ? TREE_OPERAND (elt, 0) : 0))
6578 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6579 object = elt;
6580
6581 for (elt = TREE_PURPOSE (placeholder_expr);
6582 elt != 0 && object == 0;
6583 elt
6584 = ((TREE_CODE (elt) == COMPOUND_EXPR
6585 || TREE_CODE (elt) == COND_EXPR)
6586 ? TREE_OPERAND (elt, 1)
6587 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6589 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6590 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6591 ? TREE_OPERAND (elt, 0) : 0))
6592 if (POINTER_TYPE_P (TREE_TYPE (elt))
6593 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6594 == need_type))
6595 object = build1 (INDIRECT_REF, need_type, elt);
6596
6597 if (object != 0)
6598 {
6599 /* Expand this object skipping the list entries before
6600 it was found in case it is also a PLACEHOLDER_EXPR.
6601 In that case, we want to translate it using subsequent
6602 entries. */
6603 placeholder_list = TREE_CHAIN (placeholder_expr);
6604 temp = expand_expr (object, original_target, tmode,
6605 ro_modifier);
6606 placeholder_list = old_list;
6607 return temp;
6608 }
6609 }
6610 }
6611
6612 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6613 abort ();
6614
6615 case WITH_RECORD_EXPR:
6616 /* Put the object on the placeholder list, expand our first operand,
6617 and pop the list. */
6618 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6619 placeholder_list);
6620 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6621 tmode, ro_modifier);
6622 placeholder_list = TREE_CHAIN (placeholder_list);
6623 return target;
6624
6625 case GOTO_EXPR:
6626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6627 expand_goto (TREE_OPERAND (exp, 0));
6628 else
6629 expand_computed_goto (TREE_OPERAND (exp, 0));
6630 return const0_rtx;
6631
6632 case EXIT_EXPR:
6633 expand_exit_loop_if_false (NULL,
6634 invert_truthvalue (TREE_OPERAND (exp, 0)));
6635 return const0_rtx;
6636
6637 case LABELED_BLOCK_EXPR:
6638 if (LABELED_BLOCK_BODY (exp))
6639 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6640 /* Should perhaps use expand_label, but this is simpler and safer. */
6641 do_pending_stack_adjust ();
6642 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6643 return const0_rtx;
6644
6645 case EXIT_BLOCK_EXPR:
6646 if (EXIT_BLOCK_RETURN (exp))
6647 sorry ("returned value in block_exit_expr");
6648 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6649 return const0_rtx;
6650
6651 case LOOP_EXPR:
6652 push_temp_slots ();
6653 expand_start_loop (1);
6654 expand_expr_stmt (TREE_OPERAND (exp, 0));
6655 expand_end_loop ();
6656 pop_temp_slots ();
6657
6658 return const0_rtx;
6659
6660 case BIND_EXPR:
6661 {
6662 tree vars = TREE_OPERAND (exp, 0);
6663 int vars_need_expansion = 0;
6664
6665 /* Need to open a binding contour here because
6666 if there are any cleanups they must be contained here. */
6667 expand_start_bindings (2);
6668
6669 /* Mark the corresponding BLOCK for output in its proper place. */
6670 if (TREE_OPERAND (exp, 2) != 0
6671 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6672 insert_block (TREE_OPERAND (exp, 2));
6673
6674 /* If VARS have not yet been expanded, expand them now. */
6675 while (vars)
6676 {
6677 if (!DECL_RTL_SET_P (vars))
6678 {
6679 vars_need_expansion = 1;
6680 expand_decl (vars);
6681 }
6682 expand_decl_init (vars);
6683 vars = TREE_CHAIN (vars);
6684 }
6685
6686 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6687
6688 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6689
6690 return temp;
6691 }
6692
6693 case RTL_EXPR:
6694 if (RTL_EXPR_SEQUENCE (exp))
6695 {
6696 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6697 abort ();
6698 emit_insns (RTL_EXPR_SEQUENCE (exp));
6699 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6700 }
6701 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6702 free_temps_for_rtl_expr (exp);
6703 return RTL_EXPR_RTL (exp);
6704
6705 case CONSTRUCTOR:
6706 /* If we don't need the result, just ensure we evaluate any
6707 subexpressions. */
6708 if (ignore)
6709 {
6710 tree elt;
6711 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6712 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6713 EXPAND_MEMORY_USE_BAD);
6714 return const0_rtx;
6715 }
6716
6717 /* All elts simple constants => refer to a constant in memory. But
6718 if this is a non-BLKmode mode, let it store a field at a time
6719 since that should make a CONST_INT or CONST_DOUBLE when we
6720 fold. Likewise, if we have a target we can use, it is best to
6721 store directly into the target unless the type is large enough
6722 that memcpy will be used. If we are making an initializer and
6723 all operands are constant, put it in memory as well. */
6724 else if ((TREE_STATIC (exp)
6725 && ((mode == BLKmode
6726 && ! (target != 0 && safe_from_p (target, exp, 1)))
6727 || TREE_ADDRESSABLE (exp)
6728 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6729 && (! MOVE_BY_PIECES_P
6730 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6731 TYPE_ALIGN (type)))
6732 && ! mostly_zeros_p (exp))))
6733 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6734 {
6735 rtx constructor = output_constant_def (exp, 1);
6736
6737 if (modifier != EXPAND_CONST_ADDRESS
6738 && modifier != EXPAND_INITIALIZER
6739 && modifier != EXPAND_SUM
6740 && (! memory_address_p (GET_MODE (constructor),
6741 XEXP (constructor, 0))
6742 || (flag_force_addr
6743 && GET_CODE (XEXP (constructor, 0)) != REG)))
6744 constructor = change_address (constructor, VOIDmode,
6745 XEXP (constructor, 0));
6746 return constructor;
6747 }
6748 else
6749 {
6750 /* Handle calls that pass values in multiple non-contiguous
6751 locations. The Irix 6 ABI has examples of this. */
6752 if (target == 0 || ! safe_from_p (target, exp, 1)
6753 || GET_CODE (target) == PARALLEL)
6754 target
6755 = assign_temp (build_qualified_type (type,
6756 (TYPE_QUALS (type)
6757 | (TREE_READONLY (exp)
6758 * TYPE_QUAL_CONST))),
6759 TREE_ADDRESSABLE (exp), 1, 1);
6760
6761 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6762 int_size_in_bytes (TREE_TYPE (exp)));
6763 return target;
6764 }
6765
6766 case INDIRECT_REF:
6767 {
6768 tree exp1 = TREE_OPERAND (exp, 0);
6769 tree index;
6770 tree string = string_constant (exp1, &index);
6771
6772 /* Try to optimize reads from const strings. */
6773 if (string
6774 && TREE_CODE (string) == STRING_CST
6775 && TREE_CODE (index) == INTEGER_CST
6776 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6777 && GET_MODE_CLASS (mode) == MODE_INT
6778 && GET_MODE_SIZE (mode) == 1
6779 && modifier != EXPAND_MEMORY_USE_WO)
6780 return
6781 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6782
6783 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6784 op0 = memory_address (mode, op0);
6785
6786 if (cfun && current_function_check_memory_usage
6787 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6788 {
6789 enum memory_use_mode memory_usage;
6790 memory_usage = get_memory_usage_from_modifier (modifier);
6791
6792 if (memory_usage != MEMORY_USE_DONT)
6793 {
6794 in_check_memory_usage = 1;
6795 emit_library_call (chkr_check_addr_libfunc,
6796 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6797 Pmode, GEN_INT (int_size_in_bytes (type)),
6798 TYPE_MODE (sizetype),
6799 GEN_INT (memory_usage),
6800 TYPE_MODE (integer_type_node));
6801 in_check_memory_usage = 0;
6802 }
6803 }
6804
6805 temp = gen_rtx_MEM (mode, op0);
6806 set_mem_attributes (temp, exp, 0);
6807
6808 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6809 here, because, in C and C++, the fact that a location is accessed
6810 through a pointer to const does not mean that the value there can
6811 never change. Languages where it can never change should
6812 also set TREE_STATIC. */
6813 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6814
6815 /* If we are writing to this object and its type is a record with
6816 readonly fields, we must mark it as readonly so it will
6817 conflict with readonly references to those fields. */
6818 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6819 RTX_UNCHANGING_P (temp) = 1;
6820
6821 return temp;
6822 }
6823
6824 case ARRAY_REF:
6825 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6826 abort ();
6827
6828 {
6829 tree array = TREE_OPERAND (exp, 0);
6830 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6831 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6832 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6833 HOST_WIDE_INT i;
6834
6835 /* Optimize the special-case of a zero lower bound.
6836
6837 We convert the low_bound to sizetype to avoid some problems
6838 with constant folding. (E.g. suppose the lower bound is 1,
6839 and its mode is QI. Without the conversion, (ARRAY
6840 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6841 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6842
6843 if (! integer_zerop (low_bound))
6844 index = size_diffop (index, convert (sizetype, low_bound));
6845
6846 /* Fold an expression like: "foo"[2].
6847 This is not done in fold so it won't happen inside &.
6848 Don't fold if this is for wide characters since it's too
6849 difficult to do correctly and this is a very rare case. */
6850
6851 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6852 && TREE_CODE (array) == STRING_CST
6853 && TREE_CODE (index) == INTEGER_CST
6854 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6855 && GET_MODE_CLASS (mode) == MODE_INT
6856 && GET_MODE_SIZE (mode) == 1)
6857 return
6858 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6859
6860 /* If this is a constant index into a constant array,
6861 just get the value from the array. Handle both the cases when
6862 we have an explicit constructor and when our operand is a variable
6863 that was declared const. */
6864
6865 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6866 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6867 && TREE_CODE (index) == INTEGER_CST
6868 && 0 > compare_tree_int (index,
6869 list_length (CONSTRUCTOR_ELTS
6870 (TREE_OPERAND (exp, 0)))))
6871 {
6872 tree elem;
6873
6874 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6875 i = TREE_INT_CST_LOW (index);
6876 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6877 ;
6878
6879 if (elem)
6880 return expand_expr (fold (TREE_VALUE (elem)), target,
6881 tmode, ro_modifier);
6882 }
6883
6884 else if (optimize >= 1
6885 && modifier != EXPAND_CONST_ADDRESS
6886 && modifier != EXPAND_INITIALIZER
6887 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6888 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6889 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6890 {
6891 if (TREE_CODE (index) == INTEGER_CST)
6892 {
6893 tree init = DECL_INITIAL (array);
6894
6895 if (TREE_CODE (init) == CONSTRUCTOR)
6896 {
6897 tree elem;
6898
6899 for (elem = CONSTRUCTOR_ELTS (init);
6900 (elem
6901 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6902 elem = TREE_CHAIN (elem))
6903 ;
6904
6905 if (elem && !TREE_SIDE_EFFECTS (elem))
6906 return expand_expr (fold (TREE_VALUE (elem)), target,
6907 tmode, ro_modifier);
6908 }
6909 else if (TREE_CODE (init) == STRING_CST
6910 && 0 > compare_tree_int (index,
6911 TREE_STRING_LENGTH (init)))
6912 {
6913 tree type = TREE_TYPE (TREE_TYPE (init));
6914 enum machine_mode mode = TYPE_MODE (type);
6915
6916 if (GET_MODE_CLASS (mode) == MODE_INT
6917 && GET_MODE_SIZE (mode) == 1)
6918 return (GEN_INT
6919 (TREE_STRING_POINTER
6920 (init)[TREE_INT_CST_LOW (index)]));
6921 }
6922 }
6923 }
6924 }
6925 /* Fall through. */
6926
6927 case COMPONENT_REF:
6928 case BIT_FIELD_REF:
6929 case ARRAY_RANGE_REF:
6930 /* If the operand is a CONSTRUCTOR, we can just extract the
6931 appropriate field if it is present. Don't do this if we have
6932 already written the data since we want to refer to that copy
6933 and varasm.c assumes that's what we'll do. */
6934 if (code == COMPONENT_REF
6935 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6936 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6937 {
6938 tree elt;
6939
6940 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6941 elt = TREE_CHAIN (elt))
6942 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6943 /* We can normally use the value of the field in the
6944 CONSTRUCTOR. However, if this is a bitfield in
6945 an integral mode that we can fit in a HOST_WIDE_INT,
6946 we must mask only the number of bits in the bitfield,
6947 since this is done implicitly by the constructor. If
6948 the bitfield does not meet either of those conditions,
6949 we can't do this optimization. */
6950 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6951 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6952 == MODE_INT)
6953 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6954 <= HOST_BITS_PER_WIDE_INT))))
6955 {
6956 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6957 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6958 {
6959 HOST_WIDE_INT bitsize
6960 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6961
6962 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6963 {
6964 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6965 op0 = expand_and (op0, op1, target);
6966 }
6967 else
6968 {
6969 enum machine_mode imode
6970 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6971 tree count
6972 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6973 0);
6974
6975 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6976 target, 0);
6977 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6978 target, 0);
6979 }
6980 }
6981
6982 return op0;
6983 }
6984 }
6985
6986 {
6987 enum machine_mode mode1;
6988 HOST_WIDE_INT bitsize, bitpos;
6989 tree offset;
6990 int volatilep = 0;
6991 unsigned int alignment;
6992 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6993 &mode1, &unsignedp, &volatilep,
6994 &alignment);
6995
6996 /* If we got back the original object, something is wrong. Perhaps
6997 we are evaluating an expression too early. In any event, don't
6998 infinitely recurse. */
6999 if (tem == exp)
7000 abort ();
7001
7002 /* If TEM's type is a union of variable size, pass TARGET to the inner
7003 computation, since it will need a temporary and TARGET is known
7004 to have to do. This occurs in unchecked conversion in Ada. */
7005
7006 op0 = expand_expr (tem,
7007 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7008 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7009 != INTEGER_CST)
7010 ? target : NULL_RTX),
7011 VOIDmode,
7012 (modifier == EXPAND_INITIALIZER
7013 || modifier == EXPAND_CONST_ADDRESS)
7014 ? modifier : EXPAND_NORMAL);
7015
7016 /* If this is a constant, put it into a register if it is a
7017 legitimate constant and OFFSET is 0 and memory if it isn't. */
7018 if (CONSTANT_P (op0))
7019 {
7020 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7021 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7022 && offset == 0)
7023 op0 = force_reg (mode, op0);
7024 else
7025 op0 = validize_mem (force_const_mem (mode, op0));
7026 }
7027
7028 if (offset != 0)
7029 {
7030 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7031
7032 /* If this object is in a register, put it into memory.
7033 This case can't occur in C, but can in Ada if we have
7034 unchecked conversion of an expression from a scalar type to
7035 an array or record type. */
7036 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7037 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7038 {
7039 /* If the operand is a SAVE_EXPR, we can deal with this by
7040 forcing the SAVE_EXPR into memory. */
7041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7042 put_var_into_stack (TREE_OPERAND (exp, 0));
7043 else
7044 {
7045 tree nt
7046 = build_qualified_type (TREE_TYPE (tem),
7047 (TYPE_QUALS (TREE_TYPE (tem))
7048 | TYPE_QUAL_CONST));
7049 rtx memloc = assign_temp (nt, 1, 1, 1);
7050
7051 mark_temp_addr_taken (memloc);
7052 emit_move_insn (memloc, op0);
7053 op0 = memloc;
7054 }
7055 }
7056
7057 if (GET_CODE (op0) != MEM)
7058 abort ();
7059
7060 if (GET_MODE (offset_rtx) != ptr_mode)
7061 {
7062 #ifdef POINTERS_EXTEND_UNSIGNED
7063 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7064 #else
7065 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7066 #endif
7067 }
7068
7069 /* A constant address in OP0 can have VOIDmode, we must not try
7070 to call force_reg for that case. Avoid that case. */
7071 if (GET_CODE (op0) == MEM
7072 && GET_MODE (op0) == BLKmode
7073 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7074 && bitsize != 0
7075 && (bitpos % bitsize) == 0
7076 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7077 && alignment == GET_MODE_ALIGNMENT (mode1))
7078 {
7079 rtx temp = change_address (op0, mode1,
7080 plus_constant (XEXP (op0, 0),
7081 (bitpos /
7082 BITS_PER_UNIT)));
7083 if (GET_CODE (XEXP (temp, 0)) == REG)
7084 op0 = temp;
7085 else
7086 op0 = change_address (op0, mode1,
7087 force_reg (GET_MODE (XEXP (temp, 0)),
7088 XEXP (temp, 0)));
7089 bitpos = 0;
7090 }
7091
7092 op0 = change_address (op0, VOIDmode,
7093 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7094 force_reg (ptr_mode,
7095 offset_rtx)));
7096 }
7097
7098 /* Don't forget about volatility even if this is a bitfield. */
7099 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7100 {
7101 op0 = copy_rtx (op0);
7102 MEM_VOLATILE_P (op0) = 1;
7103 }
7104
7105 /* Check the access. */
7106 if (cfun != 0 && current_function_check_memory_usage
7107 && GET_CODE (op0) == MEM)
7108 {
7109 enum memory_use_mode memory_usage;
7110 memory_usage = get_memory_usage_from_modifier (modifier);
7111
7112 if (memory_usage != MEMORY_USE_DONT)
7113 {
7114 rtx to;
7115 int size;
7116
7117 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7118 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7119
7120 /* Check the access right of the pointer. */
7121 in_check_memory_usage = 1;
7122 if (size > BITS_PER_UNIT)
7123 emit_library_call (chkr_check_addr_libfunc,
7124 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7125 Pmode, GEN_INT (size / BITS_PER_UNIT),
7126 TYPE_MODE (sizetype),
7127 GEN_INT (memory_usage),
7128 TYPE_MODE (integer_type_node));
7129 in_check_memory_usage = 0;
7130 }
7131 }
7132
7133 /* In cases where an aligned union has an unaligned object
7134 as a field, we might be extracting a BLKmode value from
7135 an integer-mode (e.g., SImode) object. Handle this case
7136 by doing the extract into an object as wide as the field
7137 (which we know to be the width of a basic mode), then
7138 storing into memory, and changing the mode to BLKmode. */
7139 if (mode1 == VOIDmode
7140 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7141 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7143 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7144 /* If the field isn't aligned enough to fetch as a memref,
7145 fetch it as a bit field. */
7146 || (mode1 != BLKmode
7147 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7148 && ((TYPE_ALIGN (TREE_TYPE (tem))
7149 < GET_MODE_ALIGNMENT (mode))
7150 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7151 /* If the type and the field are a constant size and the
7152 size of the type isn't the same size as the bitfield,
7153 we must use bitfield operations. */
7154 || (bitsize >= 0
7155 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7156 == INTEGER_CST)
7157 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7158 bitsize))
7159 || (mode == BLKmode
7160 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7161 && (TYPE_ALIGN (type) > alignment
7162 || bitpos % TYPE_ALIGN (type) != 0)))
7163 {
7164 enum machine_mode ext_mode = mode;
7165
7166 if (ext_mode == BLKmode
7167 && ! (target != 0 && GET_CODE (op0) == MEM
7168 && GET_CODE (target) == MEM
7169 && bitpos % BITS_PER_UNIT == 0))
7170 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7171
7172 if (ext_mode == BLKmode)
7173 {
7174 /* In this case, BITPOS must start at a byte boundary and
7175 TARGET, if specified, must be a MEM. */
7176 if (GET_CODE (op0) != MEM
7177 || (target != 0 && GET_CODE (target) != MEM)
7178 || bitpos % BITS_PER_UNIT != 0)
7179 abort ();
7180
7181 op0 = change_address (op0, VOIDmode,
7182 plus_constant (XEXP (op0, 0),
7183 bitpos / BITS_PER_UNIT));
7184 if (target == 0)
7185 target = assign_temp (type, 0, 1, 1);
7186
7187 emit_block_move (target, op0,
7188 bitsize == -1 ? expr_size (exp)
7189 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7190 / BITS_PER_UNIT),
7191 BITS_PER_UNIT);
7192
7193 return target;
7194 }
7195
7196 op0 = validize_mem (op0);
7197
7198 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7199 mark_reg_pointer (XEXP (op0, 0), alignment);
7200
7201 op0 = extract_bit_field (op0, bitsize, bitpos,
7202 unsignedp, target, ext_mode, ext_mode,
7203 alignment,
7204 int_size_in_bytes (TREE_TYPE (tem)));
7205
7206 /* If the result is a record type and BITSIZE is narrower than
7207 the mode of OP0, an integral mode, and this is a big endian
7208 machine, we must put the field into the high-order bits. */
7209 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7210 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7211 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7212 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7213 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7214 - bitsize),
7215 op0, 1);
7216
7217 if (mode == BLKmode)
7218 {
7219 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7220 TYPE_QUAL_CONST);
7221 rtx new = assign_temp (nt, 0, 1, 1);
7222
7223 emit_move_insn (new, op0);
7224 op0 = copy_rtx (new);
7225 PUT_MODE (op0, BLKmode);
7226 }
7227
7228 return op0;
7229 }
7230
7231 /* If the result is BLKmode, use that to access the object
7232 now as well. */
7233 if (mode == BLKmode)
7234 mode1 = BLKmode;
7235
7236 /* Get a reference to just this component. */
7237 if (modifier == EXPAND_CONST_ADDRESS
7238 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7239 {
7240 rtx new = gen_rtx_MEM (mode1,
7241 plus_constant (XEXP (op0, 0),
7242 (bitpos / BITS_PER_UNIT)));
7243
7244 MEM_COPY_ATTRIBUTES (new, op0);
7245 op0 = new;
7246 }
7247 else
7248 op0 = change_address (op0, mode1,
7249 plus_constant (XEXP (op0, 0),
7250 (bitpos / BITS_PER_UNIT)));
7251
7252 set_mem_attributes (op0, exp, 0);
7253 if (GET_CODE (XEXP (op0, 0)) == REG)
7254 mark_reg_pointer (XEXP (op0, 0), alignment);
7255
7256 MEM_VOLATILE_P (op0) |= volatilep;
7257 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7258 || modifier == EXPAND_CONST_ADDRESS
7259 || modifier == EXPAND_INITIALIZER)
7260 return op0;
7261 else if (target == 0)
7262 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7263
7264 convert_move (target, op0, unsignedp);
7265 return target;
7266 }
7267
7268 /* Intended for a reference to a buffer of a file-object in Pascal.
7269 But it's not certain that a special tree code will really be
7270 necessary for these. INDIRECT_REF might work for them. */
7271 case BUFFER_REF:
7272 abort ();
7273
7274 case IN_EXPR:
7275 {
7276 /* Pascal set IN expression.
7277
7278 Algorithm:
7279 rlo = set_low - (set_low%bits_per_word);
7280 the_word = set [ (index - rlo)/bits_per_word ];
7281 bit_index = index % bits_per_word;
7282 bitmask = 1 << bit_index;
7283 return !!(the_word & bitmask); */
7284
7285 tree set = TREE_OPERAND (exp, 0);
7286 tree index = TREE_OPERAND (exp, 1);
7287 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7288 tree set_type = TREE_TYPE (set);
7289 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7290 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7291 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7292 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7293 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7294 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7295 rtx setaddr = XEXP (setval, 0);
7296 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7297 rtx rlow;
7298 rtx diff, quo, rem, addr, bit, result;
7299
7300 /* If domain is empty, answer is no. Likewise if index is constant
7301 and out of bounds. */
7302 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7303 && TREE_CODE (set_low_bound) == INTEGER_CST
7304 && tree_int_cst_lt (set_high_bound, set_low_bound))
7305 || (TREE_CODE (index) == INTEGER_CST
7306 && TREE_CODE (set_low_bound) == INTEGER_CST
7307 && tree_int_cst_lt (index, set_low_bound))
7308 || (TREE_CODE (set_high_bound) == INTEGER_CST
7309 && TREE_CODE (index) == INTEGER_CST
7310 && tree_int_cst_lt (set_high_bound, index))))
7311 return const0_rtx;
7312
7313 if (target == 0)
7314 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7315
7316 /* If we get here, we have to generate the code for both cases
7317 (in range and out of range). */
7318
7319 op0 = gen_label_rtx ();
7320 op1 = gen_label_rtx ();
7321
7322 if (! (GET_CODE (index_val) == CONST_INT
7323 && GET_CODE (lo_r) == CONST_INT))
7324 {
7325 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7326 GET_MODE (index_val), iunsignedp, 0, op1);
7327 }
7328
7329 if (! (GET_CODE (index_val) == CONST_INT
7330 && GET_CODE (hi_r) == CONST_INT))
7331 {
7332 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7333 GET_MODE (index_val), iunsignedp, 0, op1);
7334 }
7335
7336 /* Calculate the element number of bit zero in the first word
7337 of the set. */
7338 if (GET_CODE (lo_r) == CONST_INT)
7339 rlow = GEN_INT (INTVAL (lo_r)
7340 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7341 else
7342 rlow = expand_binop (index_mode, and_optab, lo_r,
7343 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7344 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7345
7346 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7348
7349 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7350 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7351 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7352 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7353
7354 addr = memory_address (byte_mode,
7355 expand_binop (index_mode, add_optab, diff,
7356 setaddr, NULL_RTX, iunsignedp,
7357 OPTAB_LIB_WIDEN));
7358
7359 /* Extract the bit we want to examine. */
7360 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7361 gen_rtx_MEM (byte_mode, addr),
7362 make_tree (TREE_TYPE (index), rem),
7363 NULL_RTX, 1);
7364 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7365 GET_MODE (target) == byte_mode ? target : 0,
7366 1, OPTAB_LIB_WIDEN);
7367
7368 if (result != target)
7369 convert_move (target, result, 1);
7370
7371 /* Output the code to handle the out-of-range case. */
7372 emit_jump (op0);
7373 emit_label (op1);
7374 emit_move_insn (target, const0_rtx);
7375 emit_label (op0);
7376 return target;
7377 }
7378
7379 case WITH_CLEANUP_EXPR:
7380 if (RTL_EXPR_RTL (exp) == 0)
7381 {
7382 RTL_EXPR_RTL (exp)
7383 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7384 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7385
7386 /* That's it for this cleanup. */
7387 TREE_OPERAND (exp, 2) = 0;
7388 }
7389 return RTL_EXPR_RTL (exp);
7390
7391 case CLEANUP_POINT_EXPR:
7392 {
7393 /* Start a new binding layer that will keep track of all cleanup
7394 actions to be performed. */
7395 expand_start_bindings (2);
7396
7397 target_temp_slot_level = temp_slot_level;
7398
7399 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7400 /* If we're going to use this value, load it up now. */
7401 if (! ignore)
7402 op0 = force_not_mem (op0);
7403 preserve_temp_slots (op0);
7404 expand_end_bindings (NULL_TREE, 0, 0);
7405 }
7406 return op0;
7407
7408 case CALL_EXPR:
7409 /* Check for a built-in function. */
7410 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7411 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7412 == FUNCTION_DECL)
7413 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7414 {
7415 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7416 == BUILT_IN_FRONTEND)
7417 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7418 else
7419 return expand_builtin (exp, target, subtarget, tmode, ignore);
7420 }
7421
7422 return expand_call (exp, target, ignore);
7423
7424 case NON_LVALUE_EXPR:
7425 case NOP_EXPR:
7426 case CONVERT_EXPR:
7427 case REFERENCE_EXPR:
7428 if (TREE_OPERAND (exp, 0) == error_mark_node)
7429 return const0_rtx;
7430
7431 if (TREE_CODE (type) == UNION_TYPE)
7432 {
7433 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7434
7435 /* If both input and output are BLKmode, this conversion
7436 isn't actually doing anything unless we need to make the
7437 alignment stricter. */
7438 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7439 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7440 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7441 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7442 modifier);
7443
7444 if (target == 0)
7445 target = assign_temp (type, 0, 1, 1);
7446
7447 if (GET_CODE (target) == MEM)
7448 /* Store data into beginning of memory target. */
7449 store_expr (TREE_OPERAND (exp, 0),
7450 change_address (target, TYPE_MODE (valtype), 0), 0);
7451
7452 else if (GET_CODE (target) == REG)
7453 /* Store this field into a union of the proper type. */
7454 store_field (target,
7455 MIN ((int_size_in_bytes (TREE_TYPE
7456 (TREE_OPERAND (exp, 0)))
7457 * BITS_PER_UNIT),
7458 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7459 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7460 VOIDmode, 0, BITS_PER_UNIT,
7461 int_size_in_bytes (type), 0);
7462 else
7463 abort ();
7464
7465 /* Return the entire union. */
7466 return target;
7467 }
7468
7469 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7470 {
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7472 ro_modifier);
7473
7474 /* If the signedness of the conversion differs and OP0 is
7475 a promoted SUBREG, clear that indication since we now
7476 have to do the proper extension. */
7477 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7478 && GET_CODE (op0) == SUBREG)
7479 SUBREG_PROMOTED_VAR_P (op0) = 0;
7480
7481 return op0;
7482 }
7483
7484 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7485 if (GET_MODE (op0) == mode)
7486 return op0;
7487
7488 /* If OP0 is a constant, just convert it into the proper mode. */
7489 if (CONSTANT_P (op0))
7490 return
7491 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7492 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7493
7494 if (modifier == EXPAND_INITIALIZER)
7495 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7496
7497 if (target == 0)
7498 return
7499 convert_to_mode (mode, op0,
7500 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7501 else
7502 convert_move (target, op0,
7503 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7504 return target;
7505
7506 case PLUS_EXPR:
7507 /* We come here from MINUS_EXPR when the second operand is a
7508 constant. */
7509 plus_expr:
7510 this_optab = ! unsignedp && flag_trapv
7511 && (GET_MODE_CLASS(mode) == MODE_INT)
7512 ? addv_optab : add_optab;
7513
7514 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7515 something else, make sure we add the register to the constant and
7516 then to the other thing. This case can occur during strength
7517 reduction and doing it this way will produce better code if the
7518 frame pointer or argument pointer is eliminated.
7519
7520 fold-const.c will ensure that the constant is always in the inner
7521 PLUS_EXPR, so the only case we need to do anything about is if
7522 sp, ap, or fp is our second argument, in which case we must swap
7523 the innermost first argument and our second argument. */
7524
7525 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7526 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7527 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7528 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7529 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7530 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7531 {
7532 tree t = TREE_OPERAND (exp, 1);
7533
7534 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7535 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7536 }
7537
7538 /* If the result is to be ptr_mode and we are adding an integer to
7539 something, we might be forming a constant. So try to use
7540 plus_constant. If it produces a sum and we can't accept it,
7541 use force_operand. This allows P = &ARR[const] to generate
7542 efficient code on machines where a SYMBOL_REF is not a valid
7543 address.
7544
7545 If this is an EXPAND_SUM call, always return the sum. */
7546 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7547 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7548 {
7549 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7550 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7551 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7552 {
7553 rtx constant_part;
7554
7555 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7556 EXPAND_SUM);
7557 /* Use immed_double_const to ensure that the constant is
7558 truncated according to the mode of OP1, then sign extended
7559 to a HOST_WIDE_INT. Using the constant directly can result
7560 in non-canonical RTL in a 64x32 cross compile. */
7561 constant_part
7562 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7563 (HOST_WIDE_INT) 0,
7564 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7565 op1 = plus_constant (op1, INTVAL (constant_part));
7566 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7567 op1 = force_operand (op1, target);
7568 return op1;
7569 }
7570
7571 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7572 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7573 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7574 {
7575 rtx constant_part;
7576
7577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7578 EXPAND_SUM);
7579 if (! CONSTANT_P (op0))
7580 {
7581 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7582 VOIDmode, modifier);
7583 /* Don't go to both_summands if modifier
7584 says it's not right to return a PLUS. */
7585 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7586 goto binop2;
7587 goto both_summands;
7588 }
7589 /* Use immed_double_const to ensure that the constant is
7590 truncated according to the mode of OP1, then sign extended
7591 to a HOST_WIDE_INT. Using the constant directly can result
7592 in non-canonical RTL in a 64x32 cross compile. */
7593 constant_part
7594 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7595 (HOST_WIDE_INT) 0,
7596 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7597 op0 = plus_constant (op0, INTVAL (constant_part));
7598 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 op0 = force_operand (op0, target);
7600 return op0;
7601 }
7602 }
7603
7604 /* No sense saving up arithmetic to be done
7605 if it's all in the wrong mode to form part of an address.
7606 And force_operand won't know whether to sign-extend or
7607 zero-extend. */
7608 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7609 || mode != ptr_mode)
7610 goto binop;
7611
7612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7613 subtarget = 0;
7614
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7616 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7617
7618 both_summands:
7619 /* Make sure any term that's a sum with a constant comes last. */
7620 if (GET_CODE (op0) == PLUS
7621 && CONSTANT_P (XEXP (op0, 1)))
7622 {
7623 temp = op0;
7624 op0 = op1;
7625 op1 = temp;
7626 }
7627 /* If adding to a sum including a constant,
7628 associate it to put the constant outside. */
7629 if (GET_CODE (op1) == PLUS
7630 && CONSTANT_P (XEXP (op1, 1)))
7631 {
7632 rtx constant_term = const0_rtx;
7633
7634 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7635 if (temp != 0)
7636 op0 = temp;
7637 /* Ensure that MULT comes first if there is one. */
7638 else if (GET_CODE (op0) == MULT)
7639 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7640 else
7641 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7642
7643 /* Let's also eliminate constants from op0 if possible. */
7644 op0 = eliminate_constant_term (op0, &constant_term);
7645
7646 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7647 their sum should be a constant. Form it into OP1, since the
7648 result we want will then be OP0 + OP1. */
7649
7650 temp = simplify_binary_operation (PLUS, mode, constant_term,
7651 XEXP (op1, 1));
7652 if (temp != 0)
7653 op1 = temp;
7654 else
7655 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7656 }
7657
7658 /* Put a constant term last and put a multiplication first. */
7659 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7660 temp = op1, op1 = op0, op0 = temp;
7661
7662 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7663 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7664
7665 case MINUS_EXPR:
7666 /* For initializers, we are allowed to return a MINUS of two
7667 symbolic constants. Here we handle all cases when both operands
7668 are constant. */
7669 /* Handle difference of two symbolic constants,
7670 for the sake of an initializer. */
7671 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7672 && really_constant_p (TREE_OPERAND (exp, 0))
7673 && really_constant_p (TREE_OPERAND (exp, 1)))
7674 {
7675 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7676 VOIDmode, ro_modifier);
7677 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7678 VOIDmode, ro_modifier);
7679
7680 /* If the last operand is a CONST_INT, use plus_constant of
7681 the negated constant. Else make the MINUS. */
7682 if (GET_CODE (op1) == CONST_INT)
7683 return plus_constant (op0, - INTVAL (op1));
7684 else
7685 return gen_rtx_MINUS (mode, op0, op1);
7686 }
7687 /* Convert A - const to A + (-const). */
7688 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7689 {
7690 tree negated = fold (build1 (NEGATE_EXPR, type,
7691 TREE_OPERAND (exp, 1)));
7692
7693 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7694 /* If we can't negate the constant in TYPE, leave it alone and
7695 expand_binop will negate it for us. We used to try to do it
7696 here in the signed version of TYPE, but that doesn't work
7697 on POINTER_TYPEs. */;
7698 else
7699 {
7700 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7701 goto plus_expr;
7702 }
7703 }
7704 this_optab = ! unsignedp && flag_trapv
7705 && (GET_MODE_CLASS(mode) == MODE_INT)
7706 ? subv_optab : sub_optab;
7707 goto binop;
7708
7709 case MULT_EXPR:
7710 /* If first operand is constant, swap them.
7711 Thus the following special case checks need only
7712 check the second operand. */
7713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7714 {
7715 register tree t1 = TREE_OPERAND (exp, 0);
7716 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7717 TREE_OPERAND (exp, 1) = t1;
7718 }
7719
7720 /* Attempt to return something suitable for generating an
7721 indexed address, for machines that support that. */
7722
7723 if (modifier == EXPAND_SUM && mode == ptr_mode
7724 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7725 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7726 {
7727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7728 EXPAND_SUM);
7729
7730 /* Apply distributive law if OP0 is x+c. */
7731 if (GET_CODE (op0) == PLUS
7732 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7733 return
7734 gen_rtx_PLUS
7735 (mode,
7736 gen_rtx_MULT
7737 (mode, XEXP (op0, 0),
7738 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7739 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7740 * INTVAL (XEXP (op0, 1))));
7741
7742 if (GET_CODE (op0) != REG)
7743 op0 = force_operand (op0, NULL_RTX);
7744 if (GET_CODE (op0) != REG)
7745 op0 = copy_to_mode_reg (mode, op0);
7746
7747 return
7748 gen_rtx_MULT (mode, op0,
7749 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7750 }
7751
7752 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7753 subtarget = 0;
7754
7755 /* Check for multiplying things that have been extended
7756 from a narrower type. If this machine supports multiplying
7757 in that narrower type with a result in the desired type,
7758 do it that way, and avoid the explicit type-conversion. */
7759 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7760 && TREE_CODE (type) == INTEGER_TYPE
7761 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7762 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7763 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7764 && int_fits_type_p (TREE_OPERAND (exp, 1),
7765 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7766 /* Don't use a widening multiply if a shift will do. */
7767 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7768 > HOST_BITS_PER_WIDE_INT)
7769 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7770 ||
7771 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7772 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7773 ==
7774 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7775 /* If both operands are extended, they must either both
7776 be zero-extended or both be sign-extended. */
7777 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7778 ==
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7780 {
7781 enum machine_mode innermode
7782 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7783 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7784 ? smul_widen_optab : umul_widen_optab);
7785 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7786 ? umul_widen_optab : smul_widen_optab);
7787 if (mode == GET_MODE_WIDER_MODE (innermode))
7788 {
7789 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7790 {
7791 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7792 NULL_RTX, VOIDmode, 0);
7793 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7794 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7795 VOIDmode, 0);
7796 else
7797 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7798 NULL_RTX, VOIDmode, 0);
7799 goto binop2;
7800 }
7801 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7802 && innermode == word_mode)
7803 {
7804 rtx htem;
7805 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7808 op1 = convert_modes (innermode, mode,
7809 expand_expr (TREE_OPERAND (exp, 1),
7810 NULL_RTX, VOIDmode, 0),
7811 unsignedp);
7812 else
7813 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7814 NULL_RTX, VOIDmode, 0);
7815 temp = expand_binop (mode, other_optab, op0, op1, target,
7816 unsignedp, OPTAB_LIB_WIDEN);
7817 htem = expand_mult_highpart_adjust (innermode,
7818 gen_highpart (innermode, temp),
7819 op0, op1,
7820 gen_highpart (innermode, temp),
7821 unsignedp);
7822 emit_move_insn (gen_highpart (innermode, temp), htem);
7823 return temp;
7824 }
7825 }
7826 }
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7828 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7829 return expand_mult (mode, op0, op1, target, unsignedp);
7830
7831 case TRUNC_DIV_EXPR:
7832 case FLOOR_DIV_EXPR:
7833 case CEIL_DIV_EXPR:
7834 case ROUND_DIV_EXPR:
7835 case EXACT_DIV_EXPR:
7836 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7837 subtarget = 0;
7838 /* Possible optimization: compute the dividend with EXPAND_SUM
7839 then if the divisor is constant can optimize the case
7840 where some terms of the dividend have coeffs divisible by it. */
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7842 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7843 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7844
7845 case RDIV_EXPR:
7846 this_optab = flodiv_optab;
7847 goto binop;
7848
7849 case TRUNC_MOD_EXPR:
7850 case FLOOR_MOD_EXPR:
7851 case CEIL_MOD_EXPR:
7852 case ROUND_MOD_EXPR:
7853 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7854 subtarget = 0;
7855 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7856 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7857 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7858
7859 case FIX_ROUND_EXPR:
7860 case FIX_FLOOR_EXPR:
7861 case FIX_CEIL_EXPR:
7862 abort (); /* Not used for C. */
7863
7864 case FIX_TRUNC_EXPR:
7865 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7866 if (target == 0)
7867 target = gen_reg_rtx (mode);
7868 expand_fix (target, op0, unsignedp);
7869 return target;
7870
7871 case FLOAT_EXPR:
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7873 if (target == 0)
7874 target = gen_reg_rtx (mode);
7875 /* expand_float can't figure out what to do if FROM has VOIDmode.
7876 So give it the correct mode. With -O, cse will optimize this. */
7877 if (GET_MODE (op0) == VOIDmode)
7878 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7879 op0);
7880 expand_float (target, op0,
7881 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7882 return target;
7883
7884 case NEGATE_EXPR:
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7886 temp = expand_unop (mode,
7887 ! unsignedp && flag_trapv
7888 && (GET_MODE_CLASS(mode) == MODE_INT)
7889 ? negv_optab : neg_optab, op0, target, 0);
7890 if (temp == 0)
7891 abort ();
7892 return temp;
7893
7894 case ABS_EXPR:
7895 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7896
7897 /* Handle complex values specially. */
7898 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7899 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7900 return expand_complex_abs (mode, op0, target, unsignedp);
7901
7902 /* Unsigned abs is simply the operand. Testing here means we don't
7903 risk generating incorrect code below. */
7904 if (TREE_UNSIGNED (type))
7905 return op0;
7906
7907 return expand_abs (mode, op0, target, unsignedp,
7908 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7909
7910 case MAX_EXPR:
7911 case MIN_EXPR:
7912 target = original_target;
7913 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7914 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7915 || GET_MODE (target) != mode
7916 || (GET_CODE (target) == REG
7917 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7918 target = gen_reg_rtx (mode);
7919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7921
7922 /* First try to do it with a special MIN or MAX instruction.
7923 If that does not win, use a conditional jump to select the proper
7924 value. */
7925 this_optab = (TREE_UNSIGNED (type)
7926 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7927 : (code == MIN_EXPR ? smin_optab : smax_optab));
7928
7929 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7930 OPTAB_WIDEN);
7931 if (temp != 0)
7932 return temp;
7933
7934 /* At this point, a MEM target is no longer useful; we will get better
7935 code without it. */
7936
7937 if (GET_CODE (target) == MEM)
7938 target = gen_reg_rtx (mode);
7939
7940 if (target != op0)
7941 emit_move_insn (target, op0);
7942
7943 op0 = gen_label_rtx ();
7944
7945 /* If this mode is an integer too wide to compare properly,
7946 compare word by word. Rely on cse to optimize constant cases. */
7947 if (GET_MODE_CLASS (mode) == MODE_INT
7948 && ! can_compare_p (GE, mode, ccp_jump))
7949 {
7950 if (code == MAX_EXPR)
7951 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7952 target, op1, NULL_RTX, op0);
7953 else
7954 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7955 op1, target, NULL_RTX, op0);
7956 }
7957 else
7958 {
7959 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7960 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7961 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7962 op0);
7963 }
7964 emit_move_insn (target, op1);
7965 emit_label (op0);
7966 return target;
7967
7968 case BIT_NOT_EXPR:
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7970 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7971 if (temp == 0)
7972 abort ();
7973 return temp;
7974
7975 case FFS_EXPR:
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7977 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7978 if (temp == 0)
7979 abort ();
7980 return temp;
7981
7982 /* ??? Can optimize bitwise operations with one arg constant.
7983 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7984 and (a bitwise1 b) bitwise2 b (etc)
7985 but that is probably not worth while. */
7986
7987 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7988 boolean values when we want in all cases to compute both of them. In
7989 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7990 as actual zero-or-1 values and then bitwise anding. In cases where
7991 there cannot be any side effects, better code would be made by
7992 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7993 how to recognize those cases. */
7994
7995 case TRUTH_AND_EXPR:
7996 case BIT_AND_EXPR:
7997 this_optab = and_optab;
7998 goto binop;
7999
8000 case TRUTH_OR_EXPR:
8001 case BIT_IOR_EXPR:
8002 this_optab = ior_optab;
8003 goto binop;
8004
8005 case TRUTH_XOR_EXPR:
8006 case BIT_XOR_EXPR:
8007 this_optab = xor_optab;
8008 goto binop;
8009
8010 case LSHIFT_EXPR:
8011 case RSHIFT_EXPR:
8012 case LROTATE_EXPR:
8013 case RROTATE_EXPR:
8014 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8015 subtarget = 0;
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8017 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8018 unsignedp);
8019
8020 /* Could determine the answer when only additive constants differ. Also,
8021 the addition of one can be handled by changing the condition. */
8022 case LT_EXPR:
8023 case LE_EXPR:
8024 case GT_EXPR:
8025 case GE_EXPR:
8026 case EQ_EXPR:
8027 case NE_EXPR:
8028 case UNORDERED_EXPR:
8029 case ORDERED_EXPR:
8030 case UNLT_EXPR:
8031 case UNLE_EXPR:
8032 case UNGT_EXPR:
8033 case UNGE_EXPR:
8034 case UNEQ_EXPR:
8035 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8036 if (temp != 0)
8037 return temp;
8038
8039 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8040 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8041 && original_target
8042 && GET_CODE (original_target) == REG
8043 && (GET_MODE (original_target)
8044 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8045 {
8046 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8047 VOIDmode, 0);
8048
8049 if (temp != original_target)
8050 temp = copy_to_reg (temp);
8051
8052 op1 = gen_label_rtx ();
8053 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8054 GET_MODE (temp), unsignedp, 0, op1);
8055 emit_move_insn (temp, const1_rtx);
8056 emit_label (op1);
8057 return temp;
8058 }
8059
8060 /* If no set-flag instruction, must generate a conditional
8061 store into a temporary variable. Drop through
8062 and handle this like && and ||. */
8063
8064 case TRUTH_ANDIF_EXPR:
8065 case TRUTH_ORIF_EXPR:
8066 if (! ignore
8067 && (target == 0 || ! safe_from_p (target, exp, 1)
8068 /* Make sure we don't have a hard reg (such as function's return
8069 value) live across basic blocks, if not optimizing. */
8070 || (!optimize && GET_CODE (target) == REG
8071 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8072 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8073
8074 if (target)
8075 emit_clr_insn (target);
8076
8077 op1 = gen_label_rtx ();
8078 jumpifnot (exp, op1);
8079
8080 if (target)
8081 emit_0_to_1_insn (target);
8082
8083 emit_label (op1);
8084 return ignore ? const0_rtx : target;
8085
8086 case TRUTH_NOT_EXPR:
8087 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8088 /* The parser is careful to generate TRUTH_NOT_EXPR
8089 only with operands that are always zero or one. */
8090 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8091 target, 1, OPTAB_LIB_WIDEN);
8092 if (temp == 0)
8093 abort ();
8094 return temp;
8095
8096 case COMPOUND_EXPR:
8097 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8098 emit_queue ();
8099 return expand_expr (TREE_OPERAND (exp, 1),
8100 (ignore ? const0_rtx : target),
8101 VOIDmode, 0);
8102
8103 case COND_EXPR:
8104 /* If we would have a "singleton" (see below) were it not for a
8105 conversion in each arm, bring that conversion back out. */
8106 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8107 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8108 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8109 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8110 {
8111 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8112 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8113
8114 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8115 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8116 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8117 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8118 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8119 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8120 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8121 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8122 return expand_expr (build1 (NOP_EXPR, type,
8123 build (COND_EXPR, TREE_TYPE (iftrue),
8124 TREE_OPERAND (exp, 0),
8125 iftrue, iffalse)),
8126 target, tmode, modifier);
8127 }
8128
8129 {
8130 /* Note that COND_EXPRs whose type is a structure or union
8131 are required to be constructed to contain assignments of
8132 a temporary variable, so that we can evaluate them here
8133 for side effect only. If type is void, we must do likewise. */
8134
8135 /* If an arm of the branch requires a cleanup,
8136 only that cleanup is performed. */
8137
8138 tree singleton = 0;
8139 tree binary_op = 0, unary_op = 0;
8140
8141 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8142 convert it to our mode, if necessary. */
8143 if (integer_onep (TREE_OPERAND (exp, 1))
8144 && integer_zerop (TREE_OPERAND (exp, 2))
8145 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8146 {
8147 if (ignore)
8148 {
8149 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8150 ro_modifier);
8151 return const0_rtx;
8152 }
8153
8154 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8155 if (GET_MODE (op0) == mode)
8156 return op0;
8157
8158 if (target == 0)
8159 target = gen_reg_rtx (mode);
8160 convert_move (target, op0, unsignedp);
8161 return target;
8162 }
8163
8164 /* Check for X ? A + B : A. If we have this, we can copy A to the
8165 output and conditionally add B. Similarly for unary operations.
8166 Don't do this if X has side-effects because those side effects
8167 might affect A or B and the "?" operation is a sequence point in
8168 ANSI. (operand_equal_p tests for side effects.) */
8169
8170 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8171 && operand_equal_p (TREE_OPERAND (exp, 2),
8172 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8173 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8174 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8175 && operand_equal_p (TREE_OPERAND (exp, 1),
8176 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8177 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8178 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8179 && operand_equal_p (TREE_OPERAND (exp, 2),
8180 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8181 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8182 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8183 && operand_equal_p (TREE_OPERAND (exp, 1),
8184 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8185 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8186
8187 /* If we are not to produce a result, we have no target. Otherwise,
8188 if a target was specified use it; it will not be used as an
8189 intermediate target unless it is safe. If no target, use a
8190 temporary. */
8191
8192 if (ignore)
8193 temp = 0;
8194 else if (original_target
8195 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8196 || (singleton && GET_CODE (original_target) == REG
8197 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8198 && original_target == var_rtx (singleton)))
8199 && GET_MODE (original_target) == mode
8200 #ifdef HAVE_conditional_move
8201 && (! can_conditionally_move_p (mode)
8202 || GET_CODE (original_target) == REG
8203 || TREE_ADDRESSABLE (type))
8204 #endif
8205 && ! (GET_CODE (original_target) == MEM
8206 && MEM_VOLATILE_P (original_target)))
8207 temp = original_target;
8208 else if (TREE_ADDRESSABLE (type))
8209 abort ();
8210 else
8211 temp = assign_temp (type, 0, 0, 1);
8212
8213 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8214 do the test of X as a store-flag operation, do this as
8215 A + ((X != 0) << log C). Similarly for other simple binary
8216 operators. Only do for C == 1 if BRANCH_COST is low. */
8217 if (temp && singleton && binary_op
8218 && (TREE_CODE (binary_op) == PLUS_EXPR
8219 || TREE_CODE (binary_op) == MINUS_EXPR
8220 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8221 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8222 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8223 : integer_onep (TREE_OPERAND (binary_op, 1)))
8224 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8225 {
8226 rtx result;
8227 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8228 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8229 ? addv_optab : add_optab)
8230 : TREE_CODE (binary_op) == MINUS_EXPR
8231 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8232 ? subv_optab : sub_optab)
8233 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8234 : xor_optab);
8235
8236 /* If we had X ? A : A + 1, do this as A + (X == 0).
8237
8238 We have to invert the truth value here and then put it
8239 back later if do_store_flag fails. We cannot simply copy
8240 TREE_OPERAND (exp, 0) to another variable and modify that
8241 because invert_truthvalue can modify the tree pointed to
8242 by its argument. */
8243 if (singleton == TREE_OPERAND (exp, 1))
8244 TREE_OPERAND (exp, 0)
8245 = invert_truthvalue (TREE_OPERAND (exp, 0));
8246
8247 result = do_store_flag (TREE_OPERAND (exp, 0),
8248 (safe_from_p (temp, singleton, 1)
8249 ? temp : NULL_RTX),
8250 mode, BRANCH_COST <= 1);
8251
8252 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8253 result = expand_shift (LSHIFT_EXPR, mode, result,
8254 build_int_2 (tree_log2
8255 (TREE_OPERAND
8256 (binary_op, 1)),
8257 0),
8258 (safe_from_p (temp, singleton, 1)
8259 ? temp : NULL_RTX), 0);
8260
8261 if (result)
8262 {
8263 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8264 return expand_binop (mode, boptab, op1, result, temp,
8265 unsignedp, OPTAB_LIB_WIDEN);
8266 }
8267 else if (singleton == TREE_OPERAND (exp, 1))
8268 TREE_OPERAND (exp, 0)
8269 = invert_truthvalue (TREE_OPERAND (exp, 0));
8270 }
8271
8272 do_pending_stack_adjust ();
8273 NO_DEFER_POP;
8274 op0 = gen_label_rtx ();
8275
8276 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8277 {
8278 if (temp != 0)
8279 {
8280 /* If the target conflicts with the other operand of the
8281 binary op, we can't use it. Also, we can't use the target
8282 if it is a hard register, because evaluating the condition
8283 might clobber it. */
8284 if ((binary_op
8285 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8286 || (GET_CODE (temp) == REG
8287 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8288 temp = gen_reg_rtx (mode);
8289 store_expr (singleton, temp, 0);
8290 }
8291 else
8292 expand_expr (singleton,
8293 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8294 if (singleton == TREE_OPERAND (exp, 1))
8295 jumpif (TREE_OPERAND (exp, 0), op0);
8296 else
8297 jumpifnot (TREE_OPERAND (exp, 0), op0);
8298
8299 start_cleanup_deferral ();
8300 if (binary_op && temp == 0)
8301 /* Just touch the other operand. */
8302 expand_expr (TREE_OPERAND (binary_op, 1),
8303 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8304 else if (binary_op)
8305 store_expr (build (TREE_CODE (binary_op), type,
8306 make_tree (type, temp),
8307 TREE_OPERAND (binary_op, 1)),
8308 temp, 0);
8309 else
8310 store_expr (build1 (TREE_CODE (unary_op), type,
8311 make_tree (type, temp)),
8312 temp, 0);
8313 op1 = op0;
8314 }
8315 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8316 comparison operator. If we have one of these cases, set the
8317 output to A, branch on A (cse will merge these two references),
8318 then set the output to FOO. */
8319 else if (temp
8320 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8321 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8322 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8323 TREE_OPERAND (exp, 1), 0)
8324 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8325 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8326 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8327 {
8328 if (GET_CODE (temp) == REG
8329 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8330 temp = gen_reg_rtx (mode);
8331 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8332 jumpif (TREE_OPERAND (exp, 0), op0);
8333
8334 start_cleanup_deferral ();
8335 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8336 op1 = op0;
8337 }
8338 else if (temp
8339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (exp, 2), 0)
8343 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8344 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8345 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8346 {
8347 if (GET_CODE (temp) == REG
8348 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8349 temp = gen_reg_rtx (mode);
8350 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8351 jumpifnot (TREE_OPERAND (exp, 0), op0);
8352
8353 start_cleanup_deferral ();
8354 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8355 op1 = op0;
8356 }
8357 else
8358 {
8359 op1 = gen_label_rtx ();
8360 jumpifnot (TREE_OPERAND (exp, 0), op0);
8361
8362 start_cleanup_deferral ();
8363
8364 /* One branch of the cond can be void, if it never returns. For
8365 example A ? throw : E */
8366 if (temp != 0
8367 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8368 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8369 else
8370 expand_expr (TREE_OPERAND (exp, 1),
8371 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8372 end_cleanup_deferral ();
8373 emit_queue ();
8374 emit_jump_insn (gen_jump (op1));
8375 emit_barrier ();
8376 emit_label (op0);
8377 start_cleanup_deferral ();
8378 if (temp != 0
8379 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8380 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8381 else
8382 expand_expr (TREE_OPERAND (exp, 2),
8383 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8384 }
8385
8386 end_cleanup_deferral ();
8387
8388 emit_queue ();
8389 emit_label (op1);
8390 OK_DEFER_POP;
8391
8392 return temp;
8393 }
8394
8395 case TARGET_EXPR:
8396 {
8397 /* Something needs to be initialized, but we didn't know
8398 where that thing was when building the tree. For example,
8399 it could be the return value of a function, or a parameter
8400 to a function which lays down in the stack, or a temporary
8401 variable which must be passed by reference.
8402
8403 We guarantee that the expression will either be constructed
8404 or copied into our original target. */
8405
8406 tree slot = TREE_OPERAND (exp, 0);
8407 tree cleanups = NULL_TREE;
8408 tree exp1;
8409
8410 if (TREE_CODE (slot) != VAR_DECL)
8411 abort ();
8412
8413 if (! ignore)
8414 target = original_target;
8415
8416 /* Set this here so that if we get a target that refers to a
8417 register variable that's already been used, put_reg_into_stack
8418 knows that it should fix up those uses. */
8419 TREE_USED (slot) = 1;
8420
8421 if (target == 0)
8422 {
8423 if (DECL_RTL_SET_P (slot))
8424 {
8425 target = DECL_RTL (slot);
8426 /* If we have already expanded the slot, so don't do
8427 it again. (mrs) */
8428 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8429 return target;
8430 }
8431 else
8432 {
8433 target = assign_temp (type, 2, 0, 1);
8434 /* All temp slots at this level must not conflict. */
8435 preserve_temp_slots (target);
8436 SET_DECL_RTL (slot, target);
8437 if (TREE_ADDRESSABLE (slot))
8438 put_var_into_stack (slot);
8439
8440 /* Since SLOT is not known to the called function
8441 to belong to its stack frame, we must build an explicit
8442 cleanup. This case occurs when we must build up a reference
8443 to pass the reference as an argument. In this case,
8444 it is very likely that such a reference need not be
8445 built here. */
8446
8447 if (TREE_OPERAND (exp, 2) == 0)
8448 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8449 cleanups = TREE_OPERAND (exp, 2);
8450 }
8451 }
8452 else
8453 {
8454 /* This case does occur, when expanding a parameter which
8455 needs to be constructed on the stack. The target
8456 is the actual stack address that we want to initialize.
8457 The function we call will perform the cleanup in this case. */
8458
8459 /* If we have already assigned it space, use that space,
8460 not target that we were passed in, as our target
8461 parameter is only a hint. */
8462 if (DECL_RTL_SET_P (slot))
8463 {
8464 target = DECL_RTL (slot);
8465 /* If we have already expanded the slot, so don't do
8466 it again. (mrs) */
8467 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8468 return target;
8469 }
8470 else
8471 {
8472 SET_DECL_RTL (slot, target);
8473 /* If we must have an addressable slot, then make sure that
8474 the RTL that we just stored in slot is OK. */
8475 if (TREE_ADDRESSABLE (slot))
8476 put_var_into_stack (slot);
8477 }
8478 }
8479
8480 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8481 /* Mark it as expanded. */
8482 TREE_OPERAND (exp, 1) = NULL_TREE;
8483
8484 store_expr (exp1, target, 0);
8485
8486 expand_decl_cleanup (NULL_TREE, cleanups);
8487
8488 return target;
8489 }
8490
8491 case INIT_EXPR:
8492 {
8493 tree lhs = TREE_OPERAND (exp, 0);
8494 tree rhs = TREE_OPERAND (exp, 1);
8495 tree noncopied_parts = 0;
8496 tree lhs_type = TREE_TYPE (lhs);
8497
8498 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8499 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8500 noncopied_parts
8501 = init_noncopied_parts (stabilize_reference (lhs),
8502 TYPE_NONCOPIED_PARTS (lhs_type));
8503
8504 while (noncopied_parts != 0)
8505 {
8506 expand_assignment (TREE_VALUE (noncopied_parts),
8507 TREE_PURPOSE (noncopied_parts), 0, 0);
8508 noncopied_parts = TREE_CHAIN (noncopied_parts);
8509 }
8510 return temp;
8511 }
8512
8513 case MODIFY_EXPR:
8514 {
8515 /* If lhs is complex, expand calls in rhs before computing it.
8516 That's so we don't compute a pointer and save it over a call.
8517 If lhs is simple, compute it first so we can give it as a
8518 target if the rhs is just a call. This avoids an extra temp and copy
8519 and that prevents a partial-subsumption which makes bad code.
8520 Actually we could treat component_ref's of vars like vars. */
8521
8522 tree lhs = TREE_OPERAND (exp, 0);
8523 tree rhs = TREE_OPERAND (exp, 1);
8524 tree noncopied_parts = 0;
8525 tree lhs_type = TREE_TYPE (lhs);
8526
8527 temp = 0;
8528
8529 /* Check for |= or &= of a bitfield of size one into another bitfield
8530 of size 1. In this case, (unless we need the result of the
8531 assignment) we can do this more efficiently with a
8532 test followed by an assignment, if necessary.
8533
8534 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8535 things change so we do, this code should be enhanced to
8536 support it. */
8537 if (ignore
8538 && TREE_CODE (lhs) == COMPONENT_REF
8539 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8540 || TREE_CODE (rhs) == BIT_AND_EXPR)
8541 && TREE_OPERAND (rhs, 0) == lhs
8542 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8543 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8544 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8545 {
8546 rtx label = gen_label_rtx ();
8547
8548 do_jump (TREE_OPERAND (rhs, 1),
8549 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8550 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8551 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8552 (TREE_CODE (rhs) == BIT_IOR_EXPR
8553 ? integer_one_node
8554 : integer_zero_node)),
8555 0, 0);
8556 do_pending_stack_adjust ();
8557 emit_label (label);
8558 return const0_rtx;
8559 }
8560
8561 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8562 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8563 noncopied_parts
8564 = save_noncopied_parts (stabilize_reference (lhs),
8565 TYPE_NONCOPIED_PARTS (lhs_type));
8566
8567 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8568 while (noncopied_parts != 0)
8569 {
8570 expand_assignment (TREE_PURPOSE (noncopied_parts),
8571 TREE_VALUE (noncopied_parts), 0, 0);
8572 noncopied_parts = TREE_CHAIN (noncopied_parts);
8573 }
8574 return temp;
8575 }
8576
8577 case RETURN_EXPR:
8578 if (!TREE_OPERAND (exp, 0))
8579 expand_null_return ();
8580 else
8581 expand_return (TREE_OPERAND (exp, 0));
8582 return const0_rtx;
8583
8584 case PREINCREMENT_EXPR:
8585 case PREDECREMENT_EXPR:
8586 return expand_increment (exp, 0, ignore);
8587
8588 case POSTINCREMENT_EXPR:
8589 case POSTDECREMENT_EXPR:
8590 /* Faster to treat as pre-increment if result is not used. */
8591 return expand_increment (exp, ! ignore, ignore);
8592
8593 case ADDR_EXPR:
8594 /* If nonzero, TEMP will be set to the address of something that might
8595 be a MEM corresponding to a stack slot. */
8596 temp = 0;
8597
8598 /* Are we taking the address of a nested function? */
8599 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8600 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8601 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8602 && ! TREE_STATIC (exp))
8603 {
8604 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8605 op0 = force_operand (op0, target);
8606 }
8607 /* If we are taking the address of something erroneous, just
8608 return a zero. */
8609 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8610 return const0_rtx;
8611 else
8612 {
8613 /* We make sure to pass const0_rtx down if we came in with
8614 ignore set, to avoid doing the cleanups twice for something. */
8615 op0 = expand_expr (TREE_OPERAND (exp, 0),
8616 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8617 (modifier == EXPAND_INITIALIZER
8618 ? modifier : EXPAND_CONST_ADDRESS));
8619
8620 /* If we are going to ignore the result, OP0 will have been set
8621 to const0_rtx, so just return it. Don't get confused and
8622 think we are taking the address of the constant. */
8623 if (ignore)
8624 return op0;
8625
8626 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8627 clever and returns a REG when given a MEM. */
8628 op0 = protect_from_queue (op0, 1);
8629
8630 /* We would like the object in memory. If it is a constant, we can
8631 have it be statically allocated into memory. For a non-constant,
8632 we need to allocate some memory and store the value into it. */
8633
8634 if (CONSTANT_P (op0))
8635 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8636 op0);
8637 else if (GET_CODE (op0) == MEM)
8638 {
8639 mark_temp_addr_taken (op0);
8640 temp = XEXP (op0, 0);
8641 }
8642
8643 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8644 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8645 || GET_CODE (op0) == PARALLEL)
8646 {
8647 /* If this object is in a register, it must be not
8648 be BLKmode. */
8649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8650 tree nt = build_qualified_type (inner_type,
8651 (TYPE_QUALS (inner_type)
8652 | TYPE_QUAL_CONST));
8653 rtx memloc = assign_temp (nt, 1, 1, 1);
8654
8655 mark_temp_addr_taken (memloc);
8656 if (GET_CODE (op0) == PARALLEL)
8657 /* Handle calls that pass values in multiple non-contiguous
8658 locations. The Irix 6 ABI has examples of this. */
8659 emit_group_store (memloc, op0,
8660 int_size_in_bytes (inner_type),
8661 TYPE_ALIGN (inner_type));
8662 else
8663 emit_move_insn (memloc, op0);
8664 op0 = memloc;
8665 }
8666
8667 if (GET_CODE (op0) != MEM)
8668 abort ();
8669
8670 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8671 {
8672 temp = XEXP (op0, 0);
8673 #ifdef POINTERS_EXTEND_UNSIGNED
8674 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8675 && mode == ptr_mode)
8676 temp = convert_memory_address (ptr_mode, temp);
8677 #endif
8678 return temp;
8679 }
8680
8681 op0 = force_operand (XEXP (op0, 0), target);
8682 }
8683
8684 if (flag_force_addr && GET_CODE (op0) != REG)
8685 op0 = force_reg (Pmode, op0);
8686
8687 if (GET_CODE (op0) == REG
8688 && ! REG_USERVAR_P (op0))
8689 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8690
8691 /* If we might have had a temp slot, add an equivalent address
8692 for it. */
8693 if (temp != 0)
8694 update_temp_slot_address (temp, op0);
8695
8696 #ifdef POINTERS_EXTEND_UNSIGNED
8697 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8698 && mode == ptr_mode)
8699 op0 = convert_memory_address (ptr_mode, op0);
8700 #endif
8701
8702 return op0;
8703
8704 case ENTRY_VALUE_EXPR:
8705 abort ();
8706
8707 /* COMPLEX type for Extended Pascal & Fortran */
8708 case COMPLEX_EXPR:
8709 {
8710 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8711 rtx insns;
8712
8713 /* Get the rtx code of the operands. */
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8715 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8716
8717 if (! target)
8718 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8719
8720 start_sequence ();
8721
8722 /* Move the real (op0) and imaginary (op1) parts to their location. */
8723 emit_move_insn (gen_realpart (mode, target), op0);
8724 emit_move_insn (gen_imagpart (mode, target), op1);
8725
8726 insns = get_insns ();
8727 end_sequence ();
8728
8729 /* Complex construction should appear as a single unit. */
8730 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8731 each with a separate pseudo as destination.
8732 It's not correct for flow to treat them as a unit. */
8733 if (GET_CODE (target) != CONCAT)
8734 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8735 else
8736 emit_insns (insns);
8737
8738 return target;
8739 }
8740
8741 case REALPART_EXPR:
8742 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8743 return gen_realpart (mode, op0);
8744
8745 case IMAGPART_EXPR:
8746 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8747 return gen_imagpart (mode, op0);
8748
8749 case CONJ_EXPR:
8750 {
8751 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8752 rtx imag_t;
8753 rtx insns;
8754
8755 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8756
8757 if (! target)
8758 target = gen_reg_rtx (mode);
8759
8760 start_sequence ();
8761
8762 /* Store the realpart and the negated imagpart to target. */
8763 emit_move_insn (gen_realpart (partmode, target),
8764 gen_realpart (partmode, op0));
8765
8766 imag_t = gen_imagpart (partmode, target);
8767 temp = expand_unop (partmode,
8768 ! unsignedp && flag_trapv
8769 && (GET_MODE_CLASS(partmode) == MODE_INT)
8770 ? negv_optab : neg_optab,
8771 gen_imagpart (partmode, op0), imag_t, 0);
8772 if (temp != imag_t)
8773 emit_move_insn (imag_t, temp);
8774
8775 insns = get_insns ();
8776 end_sequence ();
8777
8778 /* Conjugate should appear as a single unit
8779 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8780 each with a separate pseudo as destination.
8781 It's not correct for flow to treat them as a unit. */
8782 if (GET_CODE (target) != CONCAT)
8783 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8784 else
8785 emit_insns (insns);
8786
8787 return target;
8788 }
8789
8790 case TRY_CATCH_EXPR:
8791 {
8792 tree handler = TREE_OPERAND (exp, 1);
8793
8794 expand_eh_region_start ();
8795
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8797
8798 expand_eh_region_end_cleanup (handler);
8799
8800 return op0;
8801 }
8802
8803 case TRY_FINALLY_EXPR:
8804 {
8805 tree try_block = TREE_OPERAND (exp, 0);
8806 tree finally_block = TREE_OPERAND (exp, 1);
8807 rtx finally_label = gen_label_rtx ();
8808 rtx done_label = gen_label_rtx ();
8809 rtx return_link = gen_reg_rtx (Pmode);
8810 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8811 (tree) finally_label, (tree) return_link);
8812 TREE_SIDE_EFFECTS (cleanup) = 1;
8813
8814 /* Start a new binding layer that will keep track of all cleanup
8815 actions to be performed. */
8816 expand_start_bindings (2);
8817
8818 target_temp_slot_level = temp_slot_level;
8819
8820 expand_decl_cleanup (NULL_TREE, cleanup);
8821 op0 = expand_expr (try_block, target, tmode, modifier);
8822
8823 preserve_temp_slots (op0);
8824 expand_end_bindings (NULL_TREE, 0, 0);
8825 emit_jump (done_label);
8826 emit_label (finally_label);
8827 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8828 emit_indirect_jump (return_link);
8829 emit_label (done_label);
8830 return op0;
8831 }
8832
8833 case GOTO_SUBROUTINE_EXPR:
8834 {
8835 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8836 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8837 rtx return_address = gen_label_rtx ();
8838 emit_move_insn (return_link,
8839 gen_rtx_LABEL_REF (Pmode, return_address));
8840 emit_jump (subr);
8841 emit_label (return_address);
8842 return const0_rtx;
8843 }
8844
8845 case VA_ARG_EXPR:
8846 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8847
8848 case EXC_PTR_EXPR:
8849 return get_exception_pointer (cfun);
8850
8851 default:
8852 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8853 }
8854
8855 /* Here to do an ordinary binary operator, generating an instruction
8856 from the optab already placed in `this_optab'. */
8857 binop:
8858 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8859 subtarget = 0;
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8862 binop2:
8863 temp = expand_binop (mode, this_optab, op0, op1, target,
8864 unsignedp, OPTAB_LIB_WIDEN);
8865 if (temp == 0)
8866 abort ();
8867 return temp;
8868 }
8869 \f
8870 /* Similar to expand_expr, except that we don't specify a target, target
8871 mode, or modifier and we return the alignment of the inner type. This is
8872 used in cases where it is not necessary to align the result to the
8873 alignment of its type as long as we know the alignment of the result, for
8874 example for comparisons of BLKmode values. */
8875
8876 static rtx
8877 expand_expr_unaligned (exp, palign)
8878 register tree exp;
8879 unsigned int *palign;
8880 {
8881 register rtx op0;
8882 tree type = TREE_TYPE (exp);
8883 register enum machine_mode mode = TYPE_MODE (type);
8884
8885 /* Default the alignment we return to that of the type. */
8886 *palign = TYPE_ALIGN (type);
8887
8888 /* The only cases in which we do anything special is if the resulting mode
8889 is BLKmode. */
8890 if (mode != BLKmode)
8891 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8892
8893 switch (TREE_CODE (exp))
8894 {
8895 case CONVERT_EXPR:
8896 case NOP_EXPR:
8897 case NON_LVALUE_EXPR:
8898 /* Conversions between BLKmode values don't change the underlying
8899 alignment or value. */
8900 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8901 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8902 break;
8903
8904 case ARRAY_REF:
8905 /* Much of the code for this case is copied directly from expand_expr.
8906 We need to duplicate it here because we will do something different
8907 in the fall-through case, so we need to handle the same exceptions
8908 it does. */
8909 {
8910 tree array = TREE_OPERAND (exp, 0);
8911 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8912 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8913 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8914 HOST_WIDE_INT i;
8915
8916 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8917 abort ();
8918
8919 /* Optimize the special-case of a zero lower bound.
8920
8921 We convert the low_bound to sizetype to avoid some problems
8922 with constant folding. (E.g. suppose the lower bound is 1,
8923 and its mode is QI. Without the conversion, (ARRAY
8924 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8925 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8926
8927 if (! integer_zerop (low_bound))
8928 index = size_diffop (index, convert (sizetype, low_bound));
8929
8930 /* If this is a constant index into a constant array,
8931 just get the value from the array. Handle both the cases when
8932 we have an explicit constructor and when our operand is a variable
8933 that was declared const. */
8934
8935 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8936 && host_integerp (index, 0)
8937 && 0 > compare_tree_int (index,
8938 list_length (CONSTRUCTOR_ELTS
8939 (TREE_OPERAND (exp, 0)))))
8940 {
8941 tree elem;
8942
8943 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8944 i = tree_low_cst (index, 0);
8945 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8946 ;
8947
8948 if (elem)
8949 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8950 }
8951
8952 else if (optimize >= 1
8953 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8954 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8955 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8956 {
8957 if (TREE_CODE (index) == INTEGER_CST)
8958 {
8959 tree init = DECL_INITIAL (array);
8960
8961 if (TREE_CODE (init) == CONSTRUCTOR)
8962 {
8963 tree elem;
8964
8965 for (elem = CONSTRUCTOR_ELTS (init);
8966 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8967 elem = TREE_CHAIN (elem))
8968 ;
8969
8970 if (elem)
8971 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8972 palign);
8973 }
8974 }
8975 }
8976 }
8977 /* Fall through. */
8978
8979 case COMPONENT_REF:
8980 case BIT_FIELD_REF:
8981 case ARRAY_RANGE_REF:
8982 /* If the operand is a CONSTRUCTOR, we can just extract the
8983 appropriate field if it is present. Don't do this if we have
8984 already written the data since we want to refer to that copy
8985 and varasm.c assumes that's what we'll do. */
8986 if (TREE_CODE (exp) == COMPONENT_REF
8987 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8988 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8989 {
8990 tree elt;
8991
8992 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8993 elt = TREE_CHAIN (elt))
8994 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8995 /* Note that unlike the case in expand_expr, we know this is
8996 BLKmode and hence not an integer. */
8997 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8998 }
8999
9000 {
9001 enum machine_mode mode1;
9002 HOST_WIDE_INT bitsize, bitpos;
9003 tree offset;
9004 int volatilep = 0;
9005 unsigned int alignment;
9006 int unsignedp;
9007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9008 &mode1, &unsignedp, &volatilep,
9009 &alignment);
9010
9011 /* If we got back the original object, something is wrong. Perhaps
9012 we are evaluating an expression too early. In any event, don't
9013 infinitely recurse. */
9014 if (tem == exp)
9015 abort ();
9016
9017 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9018
9019 /* If this is a constant, put it into a register if it is a
9020 legitimate constant and OFFSET is 0 and memory if it isn't. */
9021 if (CONSTANT_P (op0))
9022 {
9023 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9024
9025 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9026 && offset == 0)
9027 op0 = force_reg (inner_mode, op0);
9028 else
9029 op0 = validize_mem (force_const_mem (inner_mode, op0));
9030 }
9031
9032 if (offset != 0)
9033 {
9034 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9035
9036 /* If this object is in a register, put it into memory.
9037 This case can't occur in C, but can in Ada if we have
9038 unchecked conversion of an expression from a scalar type to
9039 an array or record type. */
9040 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9041 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9042 {
9043 tree nt = build_qualified_type (TREE_TYPE (tem),
9044 (TYPE_QUALS (TREE_TYPE (tem))
9045 | TYPE_QUAL_CONST));
9046 rtx memloc = assign_temp (nt, 1, 1, 1);
9047
9048 mark_temp_addr_taken (memloc);
9049 emit_move_insn (memloc, op0);
9050 op0 = memloc;
9051 }
9052
9053 if (GET_CODE (op0) != MEM)
9054 abort ();
9055
9056 if (GET_MODE (offset_rtx) != ptr_mode)
9057 {
9058 #ifdef POINTERS_EXTEND_UNSIGNED
9059 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9060 #else
9061 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9062 #endif
9063 }
9064
9065 op0 = change_address (op0, VOIDmode,
9066 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9067 force_reg (ptr_mode,
9068 offset_rtx)));
9069 }
9070
9071 /* Don't forget about volatility even if this is a bitfield. */
9072 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9073 {
9074 op0 = copy_rtx (op0);
9075 MEM_VOLATILE_P (op0) = 1;
9076 }
9077
9078 /* Check the access. */
9079 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9080 {
9081 rtx to;
9082 int size;
9083
9084 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9085 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9086
9087 /* Check the access right of the pointer. */
9088 in_check_memory_usage = 1;
9089 if (size > BITS_PER_UNIT)
9090 emit_library_call (chkr_check_addr_libfunc,
9091 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9092 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9093 TYPE_MODE (sizetype),
9094 GEN_INT (MEMORY_USE_RO),
9095 TYPE_MODE (integer_type_node));
9096 in_check_memory_usage = 0;
9097 }
9098
9099 /* In cases where an aligned union has an unaligned object
9100 as a field, we might be extracting a BLKmode value from
9101 an integer-mode (e.g., SImode) object. Handle this case
9102 by doing the extract into an object as wide as the field
9103 (which we know to be the width of a basic mode), then
9104 storing into memory, and changing the mode to BLKmode.
9105 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9106 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9107 if (mode1 == VOIDmode
9108 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9109 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9110 && (TYPE_ALIGN (type) > alignment
9111 || bitpos % TYPE_ALIGN (type) != 0)))
9112 {
9113 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9114
9115 if (ext_mode == BLKmode)
9116 {
9117 /* In this case, BITPOS must start at a byte boundary. */
9118 if (GET_CODE (op0) != MEM
9119 || bitpos % BITS_PER_UNIT != 0)
9120 abort ();
9121
9122 op0 = change_address (op0, VOIDmode,
9123 plus_constant (XEXP (op0, 0),
9124 bitpos / BITS_PER_UNIT));
9125 }
9126 else
9127 {
9128 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9129 TYPE_QUAL_CONST);
9130 rtx new = assign_temp (nt, 0, 1, 1);
9131
9132 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9133 unsignedp, NULL_RTX, ext_mode,
9134 ext_mode, alignment,
9135 int_size_in_bytes (TREE_TYPE (tem)));
9136
9137 /* If the result is a record type and BITSIZE is narrower than
9138 the mode of OP0, an integral mode, and this is a big endian
9139 machine, we must put the field into the high-order bits. */
9140 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9141 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9142 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9143 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9144 size_int (GET_MODE_BITSIZE
9145 (GET_MODE (op0))
9146 - bitsize),
9147 op0, 1);
9148
9149 emit_move_insn (new, op0);
9150 op0 = copy_rtx (new);
9151 PUT_MODE (op0, BLKmode);
9152 }
9153 }
9154 else
9155 /* Get a reference to just this component. */
9156 op0 = change_address (op0, mode1,
9157 plus_constant (XEXP (op0, 0),
9158 (bitpos / BITS_PER_UNIT)));
9159
9160 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9161
9162 /* Adjust the alignment in case the bit position is not
9163 a multiple of the alignment of the inner object. */
9164 while (bitpos % alignment != 0)
9165 alignment >>= 1;
9166
9167 if (GET_CODE (XEXP (op0, 0)) == REG)
9168 mark_reg_pointer (XEXP (op0, 0), alignment);
9169
9170 MEM_IN_STRUCT_P (op0) = 1;
9171 MEM_VOLATILE_P (op0) |= volatilep;
9172
9173 *palign = alignment;
9174 return op0;
9175 }
9176
9177 default:
9178 break;
9179
9180 }
9181
9182 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9183 }
9184 \f
9185 /* Return the tree node if a ARG corresponds to a string constant or zero
9186 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9187 in bytes within the string that ARG is accessing. The type of the
9188 offset will be `sizetype'. */
9189
9190 tree
9191 string_constant (arg, ptr_offset)
9192 tree arg;
9193 tree *ptr_offset;
9194 {
9195 STRIP_NOPS (arg);
9196
9197 if (TREE_CODE (arg) == ADDR_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9199 {
9200 *ptr_offset = size_zero_node;
9201 return TREE_OPERAND (arg, 0);
9202 }
9203 else if (TREE_CODE (arg) == PLUS_EXPR)
9204 {
9205 tree arg0 = TREE_OPERAND (arg, 0);
9206 tree arg1 = TREE_OPERAND (arg, 1);
9207
9208 STRIP_NOPS (arg0);
9209 STRIP_NOPS (arg1);
9210
9211 if (TREE_CODE (arg0) == ADDR_EXPR
9212 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9213 {
9214 *ptr_offset = convert (sizetype, arg1);
9215 return TREE_OPERAND (arg0, 0);
9216 }
9217 else if (TREE_CODE (arg1) == ADDR_EXPR
9218 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9219 {
9220 *ptr_offset = convert (sizetype, arg0);
9221 return TREE_OPERAND (arg1, 0);
9222 }
9223 }
9224
9225 return 0;
9226 }
9227 \f
9228 /* Expand code for a post- or pre- increment or decrement
9229 and return the RTX for the result.
9230 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9231
9232 static rtx
9233 expand_increment (exp, post, ignore)
9234 register tree exp;
9235 int post, ignore;
9236 {
9237 register rtx op0, op1;
9238 register rtx temp, value;
9239 register tree incremented = TREE_OPERAND (exp, 0);
9240 optab this_optab = add_optab;
9241 int icode;
9242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9243 int op0_is_copy = 0;
9244 int single_insn = 0;
9245 /* 1 means we can't store into OP0 directly,
9246 because it is a subreg narrower than a word,
9247 and we don't dare clobber the rest of the word. */
9248 int bad_subreg = 0;
9249
9250 /* Stabilize any component ref that might need to be
9251 evaluated more than once below. */
9252 if (!post
9253 || TREE_CODE (incremented) == BIT_FIELD_REF
9254 || (TREE_CODE (incremented) == COMPONENT_REF
9255 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9256 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9257 incremented = stabilize_reference (incremented);
9258 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9259 ones into save exprs so that they don't accidentally get evaluated
9260 more than once by the code below. */
9261 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9262 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9263 incremented = save_expr (incremented);
9264
9265 /* Compute the operands as RTX.
9266 Note whether OP0 is the actual lvalue or a copy of it:
9267 I believe it is a copy iff it is a register or subreg
9268 and insns were generated in computing it. */
9269
9270 temp = get_last_insn ();
9271 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9272
9273 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9274 in place but instead must do sign- or zero-extension during assignment,
9275 so we copy it into a new register and let the code below use it as
9276 a copy.
9277
9278 Note that we can safely modify this SUBREG since it is know not to be
9279 shared (it was made by the expand_expr call above). */
9280
9281 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9282 {
9283 if (post)
9284 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9285 else
9286 bad_subreg = 1;
9287 }
9288 else if (GET_CODE (op0) == SUBREG
9289 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9290 {
9291 /* We cannot increment this SUBREG in place. If we are
9292 post-incrementing, get a copy of the old value. Otherwise,
9293 just mark that we cannot increment in place. */
9294 if (post)
9295 op0 = copy_to_reg (op0);
9296 else
9297 bad_subreg = 1;
9298 }
9299
9300 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9301 && temp != get_last_insn ());
9302 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9303 EXPAND_MEMORY_USE_BAD);
9304
9305 /* Decide whether incrementing or decrementing. */
9306 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9307 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9308 this_optab = sub_optab;
9309
9310 /* Convert decrement by a constant into a negative increment. */
9311 if (this_optab == sub_optab
9312 && GET_CODE (op1) == CONST_INT)
9313 {
9314 op1 = GEN_INT (-INTVAL (op1));
9315 this_optab = add_optab;
9316 }
9317
9318 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9319 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9320
9321 /* For a preincrement, see if we can do this with a single instruction. */
9322 if (!post)
9323 {
9324 icode = (int) this_optab->handlers[(int) mode].insn_code;
9325 if (icode != (int) CODE_FOR_nothing
9326 /* Make sure that OP0 is valid for operands 0 and 1
9327 of the insn we want to queue. */
9328 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9329 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9330 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9331 single_insn = 1;
9332 }
9333
9334 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9335 then we cannot just increment OP0. We must therefore contrive to
9336 increment the original value. Then, for postincrement, we can return
9337 OP0 since it is a copy of the old value. For preincrement, expand here
9338 unless we can do it with a single insn.
9339
9340 Likewise if storing directly into OP0 would clobber high bits
9341 we need to preserve (bad_subreg). */
9342 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9343 {
9344 /* This is the easiest way to increment the value wherever it is.
9345 Problems with multiple evaluation of INCREMENTED are prevented
9346 because either (1) it is a component_ref or preincrement,
9347 in which case it was stabilized above, or (2) it is an array_ref
9348 with constant index in an array in a register, which is
9349 safe to reevaluate. */
9350 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9351 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9352 ? MINUS_EXPR : PLUS_EXPR),
9353 TREE_TYPE (exp),
9354 incremented,
9355 TREE_OPERAND (exp, 1));
9356
9357 while (TREE_CODE (incremented) == NOP_EXPR
9358 || TREE_CODE (incremented) == CONVERT_EXPR)
9359 {
9360 newexp = convert (TREE_TYPE (incremented), newexp);
9361 incremented = TREE_OPERAND (incremented, 0);
9362 }
9363
9364 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9365 return post ? op0 : temp;
9366 }
9367
9368 if (post)
9369 {
9370 /* We have a true reference to the value in OP0.
9371 If there is an insn to add or subtract in this mode, queue it.
9372 Queueing the increment insn avoids the register shuffling
9373 that often results if we must increment now and first save
9374 the old value for subsequent use. */
9375
9376 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9377 op0 = stabilize (op0);
9378 #endif
9379
9380 icode = (int) this_optab->handlers[(int) mode].insn_code;
9381 if (icode != (int) CODE_FOR_nothing
9382 /* Make sure that OP0 is valid for operands 0 and 1
9383 of the insn we want to queue. */
9384 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9385 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9386 {
9387 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9388 op1 = force_reg (mode, op1);
9389
9390 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9391 }
9392 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9393 {
9394 rtx addr = (general_operand (XEXP (op0, 0), mode)
9395 ? force_reg (Pmode, XEXP (op0, 0))
9396 : copy_to_reg (XEXP (op0, 0)));
9397 rtx temp, result;
9398
9399 op0 = change_address (op0, VOIDmode, addr);
9400 temp = force_reg (GET_MODE (op0), op0);
9401 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9402 op1 = force_reg (mode, op1);
9403
9404 /* The increment queue is LIFO, thus we have to `queue'
9405 the instructions in reverse order. */
9406 enqueue_insn (op0, gen_move_insn (op0, temp));
9407 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9408 return result;
9409 }
9410 }
9411
9412 /* Preincrement, or we can't increment with one simple insn. */
9413 if (post)
9414 /* Save a copy of the value before inc or dec, to return it later. */
9415 temp = value = copy_to_reg (op0);
9416 else
9417 /* Arrange to return the incremented value. */
9418 /* Copy the rtx because expand_binop will protect from the queue,
9419 and the results of that would be invalid for us to return
9420 if our caller does emit_queue before using our result. */
9421 temp = copy_rtx (value = op0);
9422
9423 /* Increment however we can. */
9424 op1 = expand_binop (mode, this_optab, value, op1,
9425 current_function_check_memory_usage ? NULL_RTX : op0,
9426 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9427 /* Make sure the value is stored into OP0. */
9428 if (op1 != op0)
9429 emit_move_insn (op0, op1);
9430
9431 return temp;
9432 }
9433 \f
9434 /* At the start of a function, record that we have no previously-pushed
9435 arguments waiting to be popped. */
9436
9437 void
9438 init_pending_stack_adjust ()
9439 {
9440 pending_stack_adjust = 0;
9441 }
9442
9443 /* When exiting from function, if safe, clear out any pending stack adjust
9444 so the adjustment won't get done.
9445
9446 Note, if the current function calls alloca, then it must have a
9447 frame pointer regardless of the value of flag_omit_frame_pointer. */
9448
9449 void
9450 clear_pending_stack_adjust ()
9451 {
9452 #ifdef EXIT_IGNORE_STACK
9453 if (optimize > 0
9454 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9455 && EXIT_IGNORE_STACK
9456 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9457 && ! flag_inline_functions)
9458 {
9459 stack_pointer_delta -= pending_stack_adjust,
9460 pending_stack_adjust = 0;
9461 }
9462 #endif
9463 }
9464
9465 /* Pop any previously-pushed arguments that have not been popped yet. */
9466
9467 void
9468 do_pending_stack_adjust ()
9469 {
9470 if (inhibit_defer_pop == 0)
9471 {
9472 if (pending_stack_adjust != 0)
9473 adjust_stack (GEN_INT (pending_stack_adjust));
9474 pending_stack_adjust = 0;
9475 }
9476 }
9477 \f
9478 /* Expand conditional expressions. */
9479
9480 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9481 LABEL is an rtx of code CODE_LABEL, in this function and all the
9482 functions here. */
9483
9484 void
9485 jumpifnot (exp, label)
9486 tree exp;
9487 rtx label;
9488 {
9489 do_jump (exp, label, NULL_RTX);
9490 }
9491
9492 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9493
9494 void
9495 jumpif (exp, label)
9496 tree exp;
9497 rtx label;
9498 {
9499 do_jump (exp, NULL_RTX, label);
9500 }
9501
9502 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9503 the result is zero, or IF_TRUE_LABEL if the result is one.
9504 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9505 meaning fall through in that case.
9506
9507 do_jump always does any pending stack adjust except when it does not
9508 actually perform a jump. An example where there is no jump
9509 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9510
9511 This function is responsible for optimizing cases such as
9512 &&, || and comparison operators in EXP. */
9513
9514 void
9515 do_jump (exp, if_false_label, if_true_label)
9516 tree exp;
9517 rtx if_false_label, if_true_label;
9518 {
9519 register enum tree_code code = TREE_CODE (exp);
9520 /* Some cases need to create a label to jump to
9521 in order to properly fall through.
9522 These cases set DROP_THROUGH_LABEL nonzero. */
9523 rtx drop_through_label = 0;
9524 rtx temp;
9525 int i;
9526 tree type;
9527 enum machine_mode mode;
9528
9529 #ifdef MAX_INTEGER_COMPUTATION_MODE
9530 check_max_integer_computation_mode (exp);
9531 #endif
9532
9533 emit_queue ();
9534
9535 switch (code)
9536 {
9537 case ERROR_MARK:
9538 break;
9539
9540 case INTEGER_CST:
9541 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9542 if (temp)
9543 emit_jump (temp);
9544 break;
9545
9546 #if 0
9547 /* This is not true with #pragma weak */
9548 case ADDR_EXPR:
9549 /* The address of something can never be zero. */
9550 if (if_true_label)
9551 emit_jump (if_true_label);
9552 break;
9553 #endif
9554
9555 case NOP_EXPR:
9556 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9557 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9558 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9559 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9560 goto normal;
9561 case CONVERT_EXPR:
9562 /* If we are narrowing the operand, we have to do the compare in the
9563 narrower mode. */
9564 if ((TYPE_PRECISION (TREE_TYPE (exp))
9565 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9566 goto normal;
9567 case NON_LVALUE_EXPR:
9568 case REFERENCE_EXPR:
9569 case ABS_EXPR:
9570 case NEGATE_EXPR:
9571 case LROTATE_EXPR:
9572 case RROTATE_EXPR:
9573 /* These cannot change zero->non-zero or vice versa. */
9574 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9575 break;
9576
9577 case WITH_RECORD_EXPR:
9578 /* Put the object on the placeholder list, recurse through our first
9579 operand, and pop the list. */
9580 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9581 placeholder_list);
9582 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9583 placeholder_list = TREE_CHAIN (placeholder_list);
9584 break;
9585
9586 #if 0
9587 /* This is never less insns than evaluating the PLUS_EXPR followed by
9588 a test and can be longer if the test is eliminated. */
9589 case PLUS_EXPR:
9590 /* Reduce to minus. */
9591 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9592 TREE_OPERAND (exp, 0),
9593 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9594 TREE_OPERAND (exp, 1))));
9595 /* Process as MINUS. */
9596 #endif
9597
9598 case MINUS_EXPR:
9599 /* Non-zero iff operands of minus differ. */
9600 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9601 TREE_OPERAND (exp, 0),
9602 TREE_OPERAND (exp, 1)),
9603 NE, NE, if_false_label, if_true_label);
9604 break;
9605
9606 case BIT_AND_EXPR:
9607 /* If we are AND'ing with a small constant, do this comparison in the
9608 smallest type that fits. If the machine doesn't have comparisons
9609 that small, it will be converted back to the wider comparison.
9610 This helps if we are testing the sign bit of a narrower object.
9611 combine can't do this for us because it can't know whether a
9612 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9613
9614 if (! SLOW_BYTE_ACCESS
9615 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9616 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9617 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9618 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9619 && (type = type_for_mode (mode, 1)) != 0
9620 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9621 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9622 != CODE_FOR_nothing))
9623 {
9624 do_jump (convert (type, exp), if_false_label, if_true_label);
9625 break;
9626 }
9627 goto normal;
9628
9629 case TRUTH_NOT_EXPR:
9630 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9631 break;
9632
9633 case TRUTH_ANDIF_EXPR:
9634 if (if_false_label == 0)
9635 if_false_label = drop_through_label = gen_label_rtx ();
9636 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9637 start_cleanup_deferral ();
9638 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9639 end_cleanup_deferral ();
9640 break;
9641
9642 case TRUTH_ORIF_EXPR:
9643 if (if_true_label == 0)
9644 if_true_label = drop_through_label = gen_label_rtx ();
9645 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9646 start_cleanup_deferral ();
9647 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9648 end_cleanup_deferral ();
9649 break;
9650
9651 case COMPOUND_EXPR:
9652 push_temp_slots ();
9653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9654 preserve_temp_slots (NULL_RTX);
9655 free_temp_slots ();
9656 pop_temp_slots ();
9657 emit_queue ();
9658 do_pending_stack_adjust ();
9659 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9660 break;
9661
9662 case COMPONENT_REF:
9663 case BIT_FIELD_REF:
9664 case ARRAY_REF:
9665 case ARRAY_RANGE_REF:
9666 {
9667 HOST_WIDE_INT bitsize, bitpos;
9668 int unsignedp;
9669 enum machine_mode mode;
9670 tree type;
9671 tree offset;
9672 int volatilep = 0;
9673 unsigned int alignment;
9674
9675 /* Get description of this reference. We don't actually care
9676 about the underlying object here. */
9677 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9678 &unsignedp, &volatilep, &alignment);
9679
9680 type = type_for_size (bitsize, unsignedp);
9681 if (! SLOW_BYTE_ACCESS
9682 && type != 0 && bitsize >= 0
9683 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9684 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9685 != CODE_FOR_nothing))
9686 {
9687 do_jump (convert (type, exp), if_false_label, if_true_label);
9688 break;
9689 }
9690 goto normal;
9691 }
9692
9693 case COND_EXPR:
9694 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9695 if (integer_onep (TREE_OPERAND (exp, 1))
9696 && integer_zerop (TREE_OPERAND (exp, 2)))
9697 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9698
9699 else if (integer_zerop (TREE_OPERAND (exp, 1))
9700 && integer_onep (TREE_OPERAND (exp, 2)))
9701 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9702
9703 else
9704 {
9705 register rtx label1 = gen_label_rtx ();
9706 drop_through_label = gen_label_rtx ();
9707
9708 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9709
9710 start_cleanup_deferral ();
9711 /* Now the THEN-expression. */
9712 do_jump (TREE_OPERAND (exp, 1),
9713 if_false_label ? if_false_label : drop_through_label,
9714 if_true_label ? if_true_label : drop_through_label);
9715 /* In case the do_jump just above never jumps. */
9716 do_pending_stack_adjust ();
9717 emit_label (label1);
9718
9719 /* Now the ELSE-expression. */
9720 do_jump (TREE_OPERAND (exp, 2),
9721 if_false_label ? if_false_label : drop_through_label,
9722 if_true_label ? if_true_label : drop_through_label);
9723 end_cleanup_deferral ();
9724 }
9725 break;
9726
9727 case EQ_EXPR:
9728 {
9729 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9730
9731 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9732 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9733 {
9734 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9735 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9736 do_jump
9737 (fold
9738 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9739 fold (build (EQ_EXPR, TREE_TYPE (exp),
9740 fold (build1 (REALPART_EXPR,
9741 TREE_TYPE (inner_type),
9742 exp0)),
9743 fold (build1 (REALPART_EXPR,
9744 TREE_TYPE (inner_type),
9745 exp1)))),
9746 fold (build (EQ_EXPR, TREE_TYPE (exp),
9747 fold (build1 (IMAGPART_EXPR,
9748 TREE_TYPE (inner_type),
9749 exp0)),
9750 fold (build1 (IMAGPART_EXPR,
9751 TREE_TYPE (inner_type),
9752 exp1)))))),
9753 if_false_label, if_true_label);
9754 }
9755
9756 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9757 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9758
9759 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9760 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9761 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9762 else
9763 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9764 break;
9765 }
9766
9767 case NE_EXPR:
9768 {
9769 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9770
9771 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9772 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9773 {
9774 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9775 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9776 do_jump
9777 (fold
9778 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9779 fold (build (NE_EXPR, TREE_TYPE (exp),
9780 fold (build1 (REALPART_EXPR,
9781 TREE_TYPE (inner_type),
9782 exp0)),
9783 fold (build1 (REALPART_EXPR,
9784 TREE_TYPE (inner_type),
9785 exp1)))),
9786 fold (build (NE_EXPR, TREE_TYPE (exp),
9787 fold (build1 (IMAGPART_EXPR,
9788 TREE_TYPE (inner_type),
9789 exp0)),
9790 fold (build1 (IMAGPART_EXPR,
9791 TREE_TYPE (inner_type),
9792 exp1)))))),
9793 if_false_label, if_true_label);
9794 }
9795
9796 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9797 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9798
9799 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9800 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9801 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9802 else
9803 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9804 break;
9805 }
9806
9807 case LT_EXPR:
9808 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9809 if (GET_MODE_CLASS (mode) == MODE_INT
9810 && ! can_compare_p (LT, mode, ccp_jump))
9811 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9812 else
9813 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9814 break;
9815
9816 case LE_EXPR:
9817 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9818 if (GET_MODE_CLASS (mode) == MODE_INT
9819 && ! can_compare_p (LE, mode, ccp_jump))
9820 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9821 else
9822 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9823 break;
9824
9825 case GT_EXPR:
9826 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9827 if (GET_MODE_CLASS (mode) == MODE_INT
9828 && ! can_compare_p (GT, mode, ccp_jump))
9829 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9830 else
9831 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9832 break;
9833
9834 case GE_EXPR:
9835 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9836 if (GET_MODE_CLASS (mode) == MODE_INT
9837 && ! can_compare_p (GE, mode, ccp_jump))
9838 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9839 else
9840 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9841 break;
9842
9843 case UNORDERED_EXPR:
9844 case ORDERED_EXPR:
9845 {
9846 enum rtx_code cmp, rcmp;
9847 int do_rev;
9848
9849 if (code == UNORDERED_EXPR)
9850 cmp = UNORDERED, rcmp = ORDERED;
9851 else
9852 cmp = ORDERED, rcmp = UNORDERED;
9853 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9854
9855 do_rev = 0;
9856 if (! can_compare_p (cmp, mode, ccp_jump)
9857 && (can_compare_p (rcmp, mode, ccp_jump)
9858 /* If the target doesn't provide either UNORDERED or ORDERED
9859 comparisons, canonicalize on UNORDERED for the library. */
9860 || rcmp == UNORDERED))
9861 do_rev = 1;
9862
9863 if (! do_rev)
9864 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9865 else
9866 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9867 }
9868 break;
9869
9870 {
9871 enum rtx_code rcode1;
9872 enum tree_code tcode2;
9873
9874 case UNLT_EXPR:
9875 rcode1 = UNLT;
9876 tcode2 = LT_EXPR;
9877 goto unordered_bcc;
9878 case UNLE_EXPR:
9879 rcode1 = UNLE;
9880 tcode2 = LE_EXPR;
9881 goto unordered_bcc;
9882 case UNGT_EXPR:
9883 rcode1 = UNGT;
9884 tcode2 = GT_EXPR;
9885 goto unordered_bcc;
9886 case UNGE_EXPR:
9887 rcode1 = UNGE;
9888 tcode2 = GE_EXPR;
9889 goto unordered_bcc;
9890 case UNEQ_EXPR:
9891 rcode1 = UNEQ;
9892 tcode2 = EQ_EXPR;
9893 goto unordered_bcc;
9894
9895 unordered_bcc:
9896 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9897 if (can_compare_p (rcode1, mode, ccp_jump))
9898 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9899 if_true_label);
9900 else
9901 {
9902 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9903 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9904 tree cmp0, cmp1;
9905
9906 /* If the target doesn't support combined unordered
9907 compares, decompose into UNORDERED + comparison. */
9908 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9909 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9910 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9911 do_jump (exp, if_false_label, if_true_label);
9912 }
9913 }
9914 break;
9915
9916 /* Special case:
9917 __builtin_expect (<test>, 0) and
9918 __builtin_expect (<test>, 1)
9919
9920 We need to do this here, so that <test> is not converted to a SCC
9921 operation on machines that use condition code registers and COMPARE
9922 like the PowerPC, and then the jump is done based on whether the SCC
9923 operation produced a 1 or 0. */
9924 case CALL_EXPR:
9925 /* Check for a built-in function. */
9926 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9927 {
9928 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9929 tree arglist = TREE_OPERAND (exp, 1);
9930
9931 if (TREE_CODE (fndecl) == FUNCTION_DECL
9932 && DECL_BUILT_IN (fndecl)
9933 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9934 && arglist != NULL_TREE
9935 && TREE_CHAIN (arglist) != NULL_TREE)
9936 {
9937 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9938 if_true_label);
9939
9940 if (seq != NULL_RTX)
9941 {
9942 emit_insn (seq);
9943 return;
9944 }
9945 }
9946 }
9947 /* fall through and generate the normal code. */
9948
9949 default:
9950 normal:
9951 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9952 #if 0
9953 /* This is not needed any more and causes poor code since it causes
9954 comparisons and tests from non-SI objects to have different code
9955 sequences. */
9956 /* Copy to register to avoid generating bad insns by cse
9957 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9958 if (!cse_not_expected && GET_CODE (temp) == MEM)
9959 temp = copy_to_reg (temp);
9960 #endif
9961 do_pending_stack_adjust ();
9962 /* Do any postincrements in the expression that was tested. */
9963 emit_queue ();
9964
9965 if (GET_CODE (temp) == CONST_INT
9966 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9967 || GET_CODE (temp) == LABEL_REF)
9968 {
9969 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9970 if (target)
9971 emit_jump (target);
9972 }
9973 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9974 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9975 /* Note swapping the labels gives us not-equal. */
9976 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9977 else if (GET_MODE (temp) != VOIDmode)
9978 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9979 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9980 GET_MODE (temp), NULL_RTX, 0,
9981 if_false_label, if_true_label);
9982 else
9983 abort ();
9984 }
9985
9986 if (drop_through_label)
9987 {
9988 /* If do_jump produces code that might be jumped around,
9989 do any stack adjusts from that code, before the place
9990 where control merges in. */
9991 do_pending_stack_adjust ();
9992 emit_label (drop_through_label);
9993 }
9994 }
9995 \f
9996 /* Given a comparison expression EXP for values too wide to be compared
9997 with one insn, test the comparison and jump to the appropriate label.
9998 The code of EXP is ignored; we always test GT if SWAP is 0,
9999 and LT if SWAP is 1. */
10000
10001 static void
10002 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10003 tree exp;
10004 int swap;
10005 rtx if_false_label, if_true_label;
10006 {
10007 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10008 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10009 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10010 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10011
10012 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10013 }
10014
10015 /* Compare OP0 with OP1, word at a time, in mode MODE.
10016 UNSIGNEDP says to do unsigned comparison.
10017 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10018
10019 void
10020 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10021 enum machine_mode mode;
10022 int unsignedp;
10023 rtx op0, op1;
10024 rtx if_false_label, if_true_label;
10025 {
10026 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10027 rtx drop_through_label = 0;
10028 int i;
10029
10030 if (! if_true_label || ! if_false_label)
10031 drop_through_label = gen_label_rtx ();
10032 if (! if_true_label)
10033 if_true_label = drop_through_label;
10034 if (! if_false_label)
10035 if_false_label = drop_through_label;
10036
10037 /* Compare a word at a time, high order first. */
10038 for (i = 0; i < nwords; i++)
10039 {
10040 rtx op0_word, op1_word;
10041
10042 if (WORDS_BIG_ENDIAN)
10043 {
10044 op0_word = operand_subword_force (op0, i, mode);
10045 op1_word = operand_subword_force (op1, i, mode);
10046 }
10047 else
10048 {
10049 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10050 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10051 }
10052
10053 /* All but high-order word must be compared as unsigned. */
10054 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10055 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10056 NULL_RTX, if_true_label);
10057
10058 /* Consider lower words only if these are equal. */
10059 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10060 NULL_RTX, 0, NULL_RTX, if_false_label);
10061 }
10062
10063 if (if_false_label)
10064 emit_jump (if_false_label);
10065 if (drop_through_label)
10066 emit_label (drop_through_label);
10067 }
10068
10069 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10070 with one insn, test the comparison and jump to the appropriate label. */
10071
10072 static void
10073 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10074 tree exp;
10075 rtx if_false_label, if_true_label;
10076 {
10077 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10078 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10079 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10080 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10081 int i;
10082 rtx drop_through_label = 0;
10083
10084 if (! if_false_label)
10085 drop_through_label = if_false_label = gen_label_rtx ();
10086
10087 for (i = 0; i < nwords; i++)
10088 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10089 operand_subword_force (op1, i, mode),
10090 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10091 word_mode, NULL_RTX, 0, if_false_label,
10092 NULL_RTX);
10093
10094 if (if_true_label)
10095 emit_jump (if_true_label);
10096 if (drop_through_label)
10097 emit_label (drop_through_label);
10098 }
10099 \f
10100 /* Jump according to whether OP0 is 0.
10101 We assume that OP0 has an integer mode that is too wide
10102 for the available compare insns. */
10103
10104 void
10105 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10106 rtx op0;
10107 rtx if_false_label, if_true_label;
10108 {
10109 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10110 rtx part;
10111 int i;
10112 rtx drop_through_label = 0;
10113
10114 /* The fastest way of doing this comparison on almost any machine is to
10115 "or" all the words and compare the result. If all have to be loaded
10116 from memory and this is a very wide item, it's possible this may
10117 be slower, but that's highly unlikely. */
10118
10119 part = gen_reg_rtx (word_mode);
10120 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10121 for (i = 1; i < nwords && part != 0; i++)
10122 part = expand_binop (word_mode, ior_optab, part,
10123 operand_subword_force (op0, i, GET_MODE (op0)),
10124 part, 1, OPTAB_WIDEN);
10125
10126 if (part != 0)
10127 {
10128 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10129 NULL_RTX, 0, if_false_label, if_true_label);
10130
10131 return;
10132 }
10133
10134 /* If we couldn't do the "or" simply, do this with a series of compares. */
10135 if (! if_false_label)
10136 drop_through_label = if_false_label = gen_label_rtx ();
10137
10138 for (i = 0; i < nwords; i++)
10139 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10140 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10141 if_false_label, NULL_RTX);
10142
10143 if (if_true_label)
10144 emit_jump (if_true_label);
10145
10146 if (drop_through_label)
10147 emit_label (drop_through_label);
10148 }
10149 \f
10150 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10151 (including code to compute the values to be compared)
10152 and set (CC0) according to the result.
10153 The decision as to signed or unsigned comparison must be made by the caller.
10154
10155 We force a stack adjustment unless there are currently
10156 things pushed on the stack that aren't yet used.
10157
10158 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10159 compared.
10160
10161 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10162 size of MODE should be used. */
10163
10164 rtx
10165 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10166 register rtx op0, op1;
10167 enum rtx_code code;
10168 int unsignedp;
10169 enum machine_mode mode;
10170 rtx size;
10171 unsigned int align;
10172 {
10173 rtx tem;
10174
10175 /* If one operand is constant, make it the second one. Only do this
10176 if the other operand is not constant as well. */
10177
10178 if (swap_commutative_operands_p (op0, op1))
10179 {
10180 tem = op0;
10181 op0 = op1;
10182 op1 = tem;
10183 code = swap_condition (code);
10184 }
10185
10186 if (flag_force_mem)
10187 {
10188 op0 = force_not_mem (op0);
10189 op1 = force_not_mem (op1);
10190 }
10191
10192 do_pending_stack_adjust ();
10193
10194 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10195 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10196 return tem;
10197
10198 #if 0
10199 /* There's no need to do this now that combine.c can eliminate lots of
10200 sign extensions. This can be less efficient in certain cases on other
10201 machines. */
10202
10203 /* If this is a signed equality comparison, we can do it as an
10204 unsigned comparison since zero-extension is cheaper than sign
10205 extension and comparisons with zero are done as unsigned. This is
10206 the case even on machines that can do fast sign extension, since
10207 zero-extension is easier to combine with other operations than
10208 sign-extension is. If we are comparing against a constant, we must
10209 convert it to what it would look like unsigned. */
10210 if ((code == EQ || code == NE) && ! unsignedp
10211 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10212 {
10213 if (GET_CODE (op1) == CONST_INT
10214 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10215 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10216 unsignedp = 1;
10217 }
10218 #endif
10219
10220 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10221
10222 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10223 }
10224
10225 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10226 The decision as to signed or unsigned comparison must be made by the caller.
10227
10228 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10229 compared.
10230
10231 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10232 size of MODE should be used. */
10233
10234 void
10235 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10236 if_false_label, if_true_label)
10237 register rtx op0, op1;
10238 enum rtx_code code;
10239 int unsignedp;
10240 enum machine_mode mode;
10241 rtx size;
10242 unsigned int align;
10243 rtx if_false_label, if_true_label;
10244 {
10245 rtx tem;
10246 int dummy_true_label = 0;
10247
10248 /* Reverse the comparison if that is safe and we want to jump if it is
10249 false. */
10250 if (! if_true_label && ! FLOAT_MODE_P (mode))
10251 {
10252 if_true_label = if_false_label;
10253 if_false_label = 0;
10254 code = reverse_condition (code);
10255 }
10256
10257 /* If one operand is constant, make it the second one. Only do this
10258 if the other operand is not constant as well. */
10259
10260 if (swap_commutative_operands_p (op0, op1))
10261 {
10262 tem = op0;
10263 op0 = op1;
10264 op1 = tem;
10265 code = swap_condition (code);
10266 }
10267
10268 if (flag_force_mem)
10269 {
10270 op0 = force_not_mem (op0);
10271 op1 = force_not_mem (op1);
10272 }
10273
10274 do_pending_stack_adjust ();
10275
10276 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10277 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10278 {
10279 if (tem == const_true_rtx)
10280 {
10281 if (if_true_label)
10282 emit_jump (if_true_label);
10283 }
10284 else
10285 {
10286 if (if_false_label)
10287 emit_jump (if_false_label);
10288 }
10289 return;
10290 }
10291
10292 #if 0
10293 /* There's no need to do this now that combine.c can eliminate lots of
10294 sign extensions. This can be less efficient in certain cases on other
10295 machines. */
10296
10297 /* If this is a signed equality comparison, we can do it as an
10298 unsigned comparison since zero-extension is cheaper than sign
10299 extension and comparisons with zero are done as unsigned. This is
10300 the case even on machines that can do fast sign extension, since
10301 zero-extension is easier to combine with other operations than
10302 sign-extension is. If we are comparing against a constant, we must
10303 convert it to what it would look like unsigned. */
10304 if ((code == EQ || code == NE) && ! unsignedp
10305 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10306 {
10307 if (GET_CODE (op1) == CONST_INT
10308 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10309 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10310 unsignedp = 1;
10311 }
10312 #endif
10313
10314 if (! if_true_label)
10315 {
10316 dummy_true_label = 1;
10317 if_true_label = gen_label_rtx ();
10318 }
10319
10320 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10321 if_true_label);
10322
10323 if (if_false_label)
10324 emit_jump (if_false_label);
10325 if (dummy_true_label)
10326 emit_label (if_true_label);
10327 }
10328
10329 /* Generate code for a comparison expression EXP (including code to compute
10330 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10331 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10332 generated code will drop through.
10333 SIGNED_CODE should be the rtx operation for this comparison for
10334 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10335
10336 We force a stack adjustment unless there are currently
10337 things pushed on the stack that aren't yet used. */
10338
10339 static void
10340 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10341 if_true_label)
10342 register tree exp;
10343 enum rtx_code signed_code, unsigned_code;
10344 rtx if_false_label, if_true_label;
10345 {
10346 unsigned int align0, align1;
10347 register rtx op0, op1;
10348 register tree type;
10349 register enum machine_mode mode;
10350 int unsignedp;
10351 enum rtx_code code;
10352
10353 /* Don't crash if the comparison was erroneous. */
10354 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10356 return;
10357
10358 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10359 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10360 return;
10361
10362 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10363 mode = TYPE_MODE (type);
10364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10365 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10366 || (GET_MODE_BITSIZE (mode)
10367 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10368 1)))))))
10369 {
10370 /* op0 might have been replaced by promoted constant, in which
10371 case the type of second argument should be used. */
10372 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10373 mode = TYPE_MODE (type);
10374 }
10375 unsignedp = TREE_UNSIGNED (type);
10376 code = unsignedp ? unsigned_code : signed_code;
10377
10378 #ifdef HAVE_canonicalize_funcptr_for_compare
10379 /* If function pointers need to be "canonicalized" before they can
10380 be reliably compared, then canonicalize them. */
10381 if (HAVE_canonicalize_funcptr_for_compare
10382 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10383 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10384 == FUNCTION_TYPE))
10385 {
10386 rtx new_op0 = gen_reg_rtx (mode);
10387
10388 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10389 op0 = new_op0;
10390 }
10391
10392 if (HAVE_canonicalize_funcptr_for_compare
10393 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10394 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10395 == FUNCTION_TYPE))
10396 {
10397 rtx new_op1 = gen_reg_rtx (mode);
10398
10399 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10400 op1 = new_op1;
10401 }
10402 #endif
10403
10404 /* Do any postincrements in the expression that was tested. */
10405 emit_queue ();
10406
10407 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10408 ((mode == BLKmode)
10409 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10410 MIN (align0, align1),
10411 if_false_label, if_true_label);
10412 }
10413 \f
10414 /* Generate code to calculate EXP using a store-flag instruction
10415 and return an rtx for the result. EXP is either a comparison
10416 or a TRUTH_NOT_EXPR whose operand is a comparison.
10417
10418 If TARGET is nonzero, store the result there if convenient.
10419
10420 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10421 cheap.
10422
10423 Return zero if there is no suitable set-flag instruction
10424 available on this machine.
10425
10426 Once expand_expr has been called on the arguments of the comparison,
10427 we are committed to doing the store flag, since it is not safe to
10428 re-evaluate the expression. We emit the store-flag insn by calling
10429 emit_store_flag, but only expand the arguments if we have a reason
10430 to believe that emit_store_flag will be successful. If we think that
10431 it will, but it isn't, we have to simulate the store-flag with a
10432 set/jump/set sequence. */
10433
10434 static rtx
10435 do_store_flag (exp, target, mode, only_cheap)
10436 tree exp;
10437 rtx target;
10438 enum machine_mode mode;
10439 int only_cheap;
10440 {
10441 enum rtx_code code;
10442 tree arg0, arg1, type;
10443 tree tem;
10444 enum machine_mode operand_mode;
10445 int invert = 0;
10446 int unsignedp;
10447 rtx op0, op1;
10448 enum insn_code icode;
10449 rtx subtarget = target;
10450 rtx result, label;
10451
10452 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10453 result at the end. We can't simply invert the test since it would
10454 have already been inverted if it were valid. This case occurs for
10455 some floating-point comparisons. */
10456
10457 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10458 invert = 1, exp = TREE_OPERAND (exp, 0);
10459
10460 arg0 = TREE_OPERAND (exp, 0);
10461 arg1 = TREE_OPERAND (exp, 1);
10462
10463 /* Don't crash if the comparison was erroneous. */
10464 if (arg0 == error_mark_node || arg1 == error_mark_node)
10465 return const0_rtx;
10466
10467 type = TREE_TYPE (arg0);
10468 operand_mode = TYPE_MODE (type);
10469 unsignedp = TREE_UNSIGNED (type);
10470
10471 /* We won't bother with BLKmode store-flag operations because it would mean
10472 passing a lot of information to emit_store_flag. */
10473 if (operand_mode == BLKmode)
10474 return 0;
10475
10476 /* We won't bother with store-flag operations involving function pointers
10477 when function pointers must be canonicalized before comparisons. */
10478 #ifdef HAVE_canonicalize_funcptr_for_compare
10479 if (HAVE_canonicalize_funcptr_for_compare
10480 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10481 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10482 == FUNCTION_TYPE))
10483 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10484 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10485 == FUNCTION_TYPE))))
10486 return 0;
10487 #endif
10488
10489 STRIP_NOPS (arg0);
10490 STRIP_NOPS (arg1);
10491
10492 /* Get the rtx comparison code to use. We know that EXP is a comparison
10493 operation of some type. Some comparisons against 1 and -1 can be
10494 converted to comparisons with zero. Do so here so that the tests
10495 below will be aware that we have a comparison with zero. These
10496 tests will not catch constants in the first operand, but constants
10497 are rarely passed as the first operand. */
10498
10499 switch (TREE_CODE (exp))
10500 {
10501 case EQ_EXPR:
10502 code = EQ;
10503 break;
10504 case NE_EXPR:
10505 code = NE;
10506 break;
10507 case LT_EXPR:
10508 if (integer_onep (arg1))
10509 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10510 else
10511 code = unsignedp ? LTU : LT;
10512 break;
10513 case LE_EXPR:
10514 if (! unsignedp && integer_all_onesp (arg1))
10515 arg1 = integer_zero_node, code = LT;
10516 else
10517 code = unsignedp ? LEU : LE;
10518 break;
10519 case GT_EXPR:
10520 if (! unsignedp && integer_all_onesp (arg1))
10521 arg1 = integer_zero_node, code = GE;
10522 else
10523 code = unsignedp ? GTU : GT;
10524 break;
10525 case GE_EXPR:
10526 if (integer_onep (arg1))
10527 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10528 else
10529 code = unsignedp ? GEU : GE;
10530 break;
10531
10532 case UNORDERED_EXPR:
10533 code = UNORDERED;
10534 break;
10535 case ORDERED_EXPR:
10536 code = ORDERED;
10537 break;
10538 case UNLT_EXPR:
10539 code = UNLT;
10540 break;
10541 case UNLE_EXPR:
10542 code = UNLE;
10543 break;
10544 case UNGT_EXPR:
10545 code = UNGT;
10546 break;
10547 case UNGE_EXPR:
10548 code = UNGE;
10549 break;
10550 case UNEQ_EXPR:
10551 code = UNEQ;
10552 break;
10553
10554 default:
10555 abort ();
10556 }
10557
10558 /* Put a constant second. */
10559 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10560 {
10561 tem = arg0; arg0 = arg1; arg1 = tem;
10562 code = swap_condition (code);
10563 }
10564
10565 /* If this is an equality or inequality test of a single bit, we can
10566 do this by shifting the bit being tested to the low-order bit and
10567 masking the result with the constant 1. If the condition was EQ,
10568 we xor it with 1. This does not require an scc insn and is faster
10569 than an scc insn even if we have it. */
10570
10571 if ((code == NE || code == EQ)
10572 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10573 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10574 {
10575 tree inner = TREE_OPERAND (arg0, 0);
10576 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10577 int ops_unsignedp;
10578
10579 /* If INNER is a right shift of a constant and it plus BITNUM does
10580 not overflow, adjust BITNUM and INNER. */
10581
10582 if (TREE_CODE (inner) == RSHIFT_EXPR
10583 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10584 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10585 && bitnum < TYPE_PRECISION (type)
10586 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10587 bitnum - TYPE_PRECISION (type)))
10588 {
10589 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10590 inner = TREE_OPERAND (inner, 0);
10591 }
10592
10593 /* If we are going to be able to omit the AND below, we must do our
10594 operations as unsigned. If we must use the AND, we have a choice.
10595 Normally unsigned is faster, but for some machines signed is. */
10596 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10597 #ifdef LOAD_EXTEND_OP
10598 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10599 #else
10600 : 1
10601 #endif
10602 );
10603
10604 if (! get_subtarget (subtarget)
10605 || GET_MODE (subtarget) != operand_mode
10606 || ! safe_from_p (subtarget, inner, 1))
10607 subtarget = 0;
10608
10609 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10610
10611 if (bitnum != 0)
10612 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10613 size_int (bitnum), subtarget, ops_unsignedp);
10614
10615 if (GET_MODE (op0) != mode)
10616 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10617
10618 if ((code == EQ && ! invert) || (code == NE && invert))
10619 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10620 ops_unsignedp, OPTAB_LIB_WIDEN);
10621
10622 /* Put the AND last so it can combine with more things. */
10623 if (bitnum != TYPE_PRECISION (type) - 1)
10624 op0 = expand_and (op0, const1_rtx, subtarget);
10625
10626 return op0;
10627 }
10628
10629 /* Now see if we are likely to be able to do this. Return if not. */
10630 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10631 return 0;
10632
10633 icode = setcc_gen_code[(int) code];
10634 if (icode == CODE_FOR_nothing
10635 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10636 {
10637 /* We can only do this if it is one of the special cases that
10638 can be handled without an scc insn. */
10639 if ((code == LT && integer_zerop (arg1))
10640 || (! only_cheap && code == GE && integer_zerop (arg1)))
10641 ;
10642 else if (BRANCH_COST >= 0
10643 && ! only_cheap && (code == NE || code == EQ)
10644 && TREE_CODE (type) != REAL_TYPE
10645 && ((abs_optab->handlers[(int) operand_mode].insn_code
10646 != CODE_FOR_nothing)
10647 || (ffs_optab->handlers[(int) operand_mode].insn_code
10648 != CODE_FOR_nothing)))
10649 ;
10650 else
10651 return 0;
10652 }
10653
10654 if (! get_subtarget (target)
10655 || GET_MODE (subtarget) != operand_mode
10656 || ! safe_from_p (subtarget, arg1, 1))
10657 subtarget = 0;
10658
10659 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10660 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10661
10662 if (target == 0)
10663 target = gen_reg_rtx (mode);
10664
10665 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10666 because, if the emit_store_flag does anything it will succeed and
10667 OP0 and OP1 will not be used subsequently. */
10668
10669 result = emit_store_flag (target, code,
10670 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10671 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10672 operand_mode, unsignedp, 1);
10673
10674 if (result)
10675 {
10676 if (invert)
10677 result = expand_binop (mode, xor_optab, result, const1_rtx,
10678 result, 0, OPTAB_LIB_WIDEN);
10679 return result;
10680 }
10681
10682 /* If this failed, we have to do this with set/compare/jump/set code. */
10683 if (GET_CODE (target) != REG
10684 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10685 target = gen_reg_rtx (GET_MODE (target));
10686
10687 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10688 result = compare_from_rtx (op0, op1, code, unsignedp,
10689 operand_mode, NULL_RTX, 0);
10690 if (GET_CODE (result) == CONST_INT)
10691 return (((result == const0_rtx && ! invert)
10692 || (result != const0_rtx && invert))
10693 ? const0_rtx : const1_rtx);
10694
10695 label = gen_label_rtx ();
10696 if (bcc_gen_fctn[(int) code] == 0)
10697 abort ();
10698
10699 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10700 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10701 emit_label (label);
10702
10703 return target;
10704 }
10705 \f
10706 /* Generate a tablejump instruction (used for switch statements). */
10707
10708 #ifdef HAVE_tablejump
10709
10710 /* INDEX is the value being switched on, with the lowest value
10711 in the table already subtracted.
10712 MODE is its expected mode (needed if INDEX is constant).
10713 RANGE is the length of the jump table.
10714 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10715
10716 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10717 index value is out of range. */
10718
10719 void
10720 do_tablejump (index, mode, range, table_label, default_label)
10721 rtx index, range, table_label, default_label;
10722 enum machine_mode mode;
10723 {
10724 register rtx temp, vector;
10725
10726 /* Do an unsigned comparison (in the proper mode) between the index
10727 expression and the value which represents the length of the range.
10728 Since we just finished subtracting the lower bound of the range
10729 from the index expression, this comparison allows us to simultaneously
10730 check that the original index expression value is both greater than
10731 or equal to the minimum value of the range and less than or equal to
10732 the maximum value of the range. */
10733
10734 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10735 0, default_label);
10736
10737 /* If index is in range, it must fit in Pmode.
10738 Convert to Pmode so we can index with it. */
10739 if (mode != Pmode)
10740 index = convert_to_mode (Pmode, index, 1);
10741
10742 /* Don't let a MEM slip thru, because then INDEX that comes
10743 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10744 and break_out_memory_refs will go to work on it and mess it up. */
10745 #ifdef PIC_CASE_VECTOR_ADDRESS
10746 if (flag_pic && GET_CODE (index) != REG)
10747 index = copy_to_mode_reg (Pmode, index);
10748 #endif
10749
10750 /* If flag_force_addr were to affect this address
10751 it could interfere with the tricky assumptions made
10752 about addresses that contain label-refs,
10753 which may be valid only very near the tablejump itself. */
10754 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10755 GET_MODE_SIZE, because this indicates how large insns are. The other
10756 uses should all be Pmode, because they are addresses. This code
10757 could fail if addresses and insns are not the same size. */
10758 index = gen_rtx_PLUS (Pmode,
10759 gen_rtx_MULT (Pmode, index,
10760 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10761 gen_rtx_LABEL_REF (Pmode, table_label));
10762 #ifdef PIC_CASE_VECTOR_ADDRESS
10763 if (flag_pic)
10764 index = PIC_CASE_VECTOR_ADDRESS (index);
10765 else
10766 #endif
10767 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10768 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10769 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10770 RTX_UNCHANGING_P (vector) = 1;
10771 convert_move (temp, vector, 0);
10772
10773 emit_jump_insn (gen_tablejump (temp, table_label));
10774
10775 /* If we are generating PIC code or if the table is PC-relative, the
10776 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10777 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10778 emit_barrier ();
10779 }
10780
10781 #endif /* HAVE_tablejump */