af36b3b5820fff038176f5c9edd0d52b42c5b6db
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Don't check memory usage, since code is being emitted to check a memory
85 usage. Used when current_function_check_memory_usage is true, to avoid
86 infinite recursion. */
87 static int in_check_memory_usage;
88
89 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
90 static tree placeholder_list = 0;
91
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces
95 {
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
107 };
108
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
111
112 struct store_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
121 PTR constfundata;
122 int reverse;
123 };
124
125 extern struct obstack permanent_obstack;
126
127 static rtx get_push_address PARAMS ((int));
128
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
136 enum machine_mode));
137 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
138 unsigned int));
139 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
140 unsigned int));
141 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct store_by_pieces *));
144 static rtx get_subtarget PARAMS ((rtx));
145 static int is_zeros_p PARAMS ((tree));
146 static int mostly_zeros_p PARAMS ((tree));
147 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int));
150 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
151 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
152 HOST_WIDE_INT, enum machine_mode,
153 tree, enum machine_mode, int, tree,
154 int));
155 static enum memory_use_mode
156 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
157 static rtx var_rtx PARAMS ((tree));
158 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
159 static rtx expand_increment PARAMS ((tree, int, int));
160 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
161 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
162 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
163 rtx, rtx));
164 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
165 #ifdef PUSH_ROUNDING
166 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
167 #endif
168 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
169
170 /* Record for each mode whether we can move a register directly to or
171 from an object of that mode in memory. If we can't, we won't try
172 to use that mode directly when accessing a field of that mode. */
173
174 static char direct_load[NUM_MACHINE_MODES];
175 static char direct_store[NUM_MACHINE_MODES];
176
177 /* If a memory-to-memory move would take MOVE_RATIO or more simple
178 move-instruction sequences, we will do a movstr or libcall instead. */
179
180 #ifndef MOVE_RATIO
181 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
182 #define MOVE_RATIO 2
183 #else
184 /* If we are optimizing for space (-Os), cut down the default move ratio. */
185 #define MOVE_RATIO (optimize_size ? 3 : 15)
186 #endif
187 #endif
188
189 /* This macro is used to determine whether move_by_pieces should be called
190 to perform a structure copy. */
191 #ifndef MOVE_BY_PIECES_P
192 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
194 #endif
195
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201
202 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203
204 #ifndef SLOW_UNALIGNED_ACCESS
205 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
206 #endif
207 \f
208 /* This is run once per compilation to set up which modes can be used
209 directly in memory and to initialize the block move optab. */
210
211 void
212 init_expr_once ()
213 {
214 rtx insn, pat;
215 enum machine_mode mode;
216 int num_clobbers;
217 rtx mem, mem1;
218
219 start_sequence ();
220
221 /* Try indexing by frame ptr and try by stack ptr.
222 It is known that on the Convex the stack ptr isn't a valid index.
223 With luck, one or the other is valid on any machine. */
224 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
225 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
226
227 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
228 pat = PATTERN (insn);
229
230 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
231 mode = (enum machine_mode) ((int) mode + 1))
232 {
233 int regno;
234 rtx reg;
235
236 direct_load[(int) mode] = direct_store[(int) mode] = 0;
237 PUT_MODE (mem, mode);
238 PUT_MODE (mem1, mode);
239
240 /* See if there is some register that can be used in this mode and
241 directly loaded or stored from memory. */
242
243 if (mode != VOIDmode && mode != BLKmode)
244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
245 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
246 regno++)
247 {
248 if (! HARD_REGNO_MODE_OK (regno, mode))
249 continue;
250
251 reg = gen_rtx_REG (mode, regno);
252
253 SET_SRC (pat) = mem;
254 SET_DEST (pat) = reg;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_load[(int) mode] = 1;
257
258 SET_SRC (pat) = mem1;
259 SET_DEST (pat) = reg;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_load[(int) mode] = 1;
262
263 SET_SRC (pat) = reg;
264 SET_DEST (pat) = mem;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_store[(int) mode] = 1;
267
268 SET_SRC (pat) = reg;
269 SET_DEST (pat) = mem1;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_store[(int) mode] = 1;
272 }
273 }
274
275 end_sequence ();
276 }
277
278 /* This is run at the start of compiling a function. */
279
280 void
281 init_expr ()
282 {
283 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
284
285 pending_chain = 0;
286 pending_stack_adjust = 0;
287 stack_pointer_delta = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
292 }
293
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
297 {
298 if (p == NULL)
299 return;
300
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
304 }
305
306 void
307 free_expr_status (f)
308 struct function *f;
309 {
310 free (f->expr);
311 f->expr = NULL;
312 }
313
314 /* Small sanity check that the queue is empty at the end of a function. */
315
316 void
317 finish_expr_for_function ()
318 {
319 if (pending_chain)
320 abort ();
321 }
322 \f
323 /* Manage the queue of increment instructions to be output
324 for POSTINCREMENT_EXPR expressions, etc. */
325
326 /* Queue up to increment (or change) VAR later. BODY says how:
327 BODY should be the same thing you would pass to emit_insn
328 to increment right away. It will go to emit_insn later on.
329
330 The value is a QUEUED expression to be used in place of VAR
331 where you want to guarantee the pre-incrementation value of VAR. */
332
333 static rtx
334 enqueue_insn (var, body)
335 rtx var, body;
336 {
337 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
338 body, pending_chain);
339 return pending_chain;
340 }
341
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
348
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
352
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
356
357 rtx
358 protect_from_queue (x, modify)
359 rtx x;
360 int modify;
361 {
362 RTX_CODE code = GET_CODE (x);
363
364 #if 0 /* A QUEUED can hang around after the queue is forced out. */
365 /* Shortcut for most common case. */
366 if (pending_chain == 0)
367 return x;
368 #endif
369
370 if (code != QUEUED)
371 {
372 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
373 use of autoincrement. Make a copy of the contents of the memory
374 location rather than a copy of the address, but not if the value is
375 of mode BLKmode. Don't modify X in place since it might be
376 shared. */
377 if (code == MEM && GET_MODE (x) != BLKmode
378 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
379 {
380 rtx y = XEXP (x, 0);
381 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
382
383 if (QUEUED_INSN (y))
384 {
385 rtx temp = gen_reg_rtx (GET_MODE (x));
386
387 emit_insn_before (gen_move_insn (temp, new),
388 QUEUED_INSN (y));
389 return temp;
390 }
391
392 /* Copy the address into a pseudo, so that the returned value
393 remains correct across calls to emit_queue. */
394 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
395 }
396
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
408 else if (code == PLUS || code == MULT)
409 {
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. Copy it
422 into a new pseudo so that the value remains correct across calls to
423 emit_queue. */
424 if (QUEUED_INSN (x) == 0)
425 return copy_to_reg (QUEUED_VAR (x));
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436 }
437
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
443 int
444 queued_subexp_p (x)
445 rtx x;
446 {
447 enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
461 }
462 }
463
464 /* Perform all the pending incrementations. */
465
466 void
467 emit_queue ()
468 {
469 rtx p;
470 while ((p = pending_chain))
471 {
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
482 }
483 }
484 \f
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490 void
491 convert_move (to, from, unsignedp)
492 rtx to, from;
493 int unsignedp;
494 {
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
532 {
533 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
534 abort ();
535
536 if (VECTOR_MODE_P (to_mode))
537 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
538 else
539 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
540
541 emit_move_insn (to, from);
542 return;
543 }
544
545 if (to_real != from_real)
546 abort ();
547
548 if (to_real)
549 {
550 rtx value, insns;
551
552 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
553 {
554 /* Try converting directly if the insn is supported. */
555 if ((code = can_extend_p (to_mode, from_mode, 0))
556 != CODE_FOR_nothing)
557 {
558 emit_unop_insn (code, to, from, UNKNOWN);
559 return;
560 }
561 }
562
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706
707 libcall = (rtx) 0;
708 switch (from_mode)
709 {
710 case SFmode:
711 switch (to_mode)
712 {
713 case DFmode:
714 libcall = extendsfdf2_libfunc;
715 break;
716
717 case XFmode:
718 libcall = extendsfxf2_libfunc;
719 break;
720
721 case TFmode:
722 libcall = extendsftf2_libfunc;
723 break;
724
725 default:
726 break;
727 }
728 break;
729
730 case DFmode:
731 switch (to_mode)
732 {
733 case SFmode:
734 libcall = truncdfsf2_libfunc;
735 break;
736
737 case XFmode:
738 libcall = extenddfxf2_libfunc;
739 break;
740
741 case TFmode:
742 libcall = extenddftf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case XFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = truncxfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = truncxfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case TFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = trunctfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = trunctfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 default:
783 break;
784 }
785
786 if (libcall == (rtx) 0)
787 /* This conversion is not implemented yet. */
788 abort ();
789
790 start_sequence ();
791 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
792 1, from, from_mode);
793 insns = get_insns ();
794 end_sequence ();
795 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
796 from));
797 return;
798 }
799
800 /* Now both modes are integers. */
801
802 /* Handle expanding beyond a word. */
803 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
804 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
805 {
806 rtx insns;
807 rtx lowpart;
808 rtx fill_value;
809 rtx lowfrom;
810 int i;
811 enum machine_mode lowpart_mode;
812 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
813
814 /* Try converting directly if the insn is supported. */
815 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
816 != CODE_FOR_nothing)
817 {
818 /* If FROM is a SUBREG, put it into a register. Do this
819 so that we always generate the same set of insns for
820 better cse'ing; if an intermediate assignment occurred,
821 we won't be doing the operation directly on the SUBREG. */
822 if (optimize > 0 && GET_CODE (from) == SUBREG)
823 from = force_reg (from_mode, from);
824 emit_unop_insn (code, to, from, equiv_code);
825 return;
826 }
827 /* Next, try converting via full word. */
828 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
829 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
830 != CODE_FOR_nothing))
831 {
832 if (GET_CODE (to) == REG)
833 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
834 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
835 emit_unop_insn (code, to,
836 gen_lowpart (word_mode, to), equiv_code);
837 return;
838 }
839
840 /* No special multiword conversion insn; do it by hand. */
841 start_sequence ();
842
843 /* Since we will turn this into a no conflict block, we must ensure
844 that the source does not overlap the target. */
845
846 if (reg_overlap_mentioned_p (to, from))
847 from = force_reg (from_mode, from);
848
849 /* Get a copy of FROM widened to a word, if necessary. */
850 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
851 lowpart_mode = word_mode;
852 else
853 lowpart_mode = from_mode;
854
855 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
856
857 lowpart = gen_lowpart (lowpart_mode, to);
858 emit_move_insn (lowpart, lowfrom);
859
860 /* Compute the value to put in each remaining word. */
861 if (unsignedp)
862 fill_value = const0_rtx;
863 else
864 {
865 #ifdef HAVE_slt
866 if (HAVE_slt
867 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
868 && STORE_FLAG_VALUE == -1)
869 {
870 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
871 lowpart_mode, 0);
872 fill_value = gen_reg_rtx (word_mode);
873 emit_insn (gen_slt (fill_value));
874 }
875 else
876 #endif
877 {
878 fill_value
879 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
880 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
881 NULL_RTX, 0);
882 fill_value = convert_to_mode (word_mode, fill_value, 1);
883 }
884 }
885
886 /* Fill the remaining words. */
887 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
888 {
889 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
890 rtx subword = operand_subword (to, index, 1, to_mode);
891
892 if (subword == 0)
893 abort ();
894
895 if (fill_value != subword)
896 emit_move_insn (subword, fill_value);
897 }
898
899 insns = get_insns ();
900 end_sequence ();
901
902 emit_no_conflict_block (insns, to, from, NULL_RTX,
903 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
904 return;
905 }
906
907 /* Truncating multi-word to a word or less. */
908 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
909 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
910 {
911 if (!((GET_CODE (from) == MEM
912 && ! MEM_VOLATILE_P (from)
913 && direct_load[(int) to_mode]
914 && ! mode_dependent_address_p (XEXP (from, 0)))
915 || GET_CODE (from) == REG
916 || GET_CODE (from) == SUBREG))
917 from = force_reg (from_mode, from);
918 convert_move (to, gen_lowpart (word_mode, from), 0);
919 return;
920 }
921
922 /* Handle pointer conversion. */ /* SPEE 900220. */
923 if (to_mode == PQImode)
924 {
925 if (from_mode != QImode)
926 from = convert_to_mode (QImode, from, unsignedp);
927
928 #ifdef HAVE_truncqipqi2
929 if (HAVE_truncqipqi2)
930 {
931 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
932 return;
933 }
934 #endif /* HAVE_truncqipqi2 */
935 abort ();
936 }
937
938 if (from_mode == PQImode)
939 {
940 if (to_mode != QImode)
941 {
942 from = convert_to_mode (QImode, from, unsignedp);
943 from_mode = QImode;
944 }
945 else
946 {
947 #ifdef HAVE_extendpqiqi2
948 if (HAVE_extendpqiqi2)
949 {
950 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
951 return;
952 }
953 #endif /* HAVE_extendpqiqi2 */
954 abort ();
955 }
956 }
957
958 if (to_mode == PSImode)
959 {
960 if (from_mode != SImode)
961 from = convert_to_mode (SImode, from, unsignedp);
962
963 #ifdef HAVE_truncsipsi2
964 if (HAVE_truncsipsi2)
965 {
966 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
967 return;
968 }
969 #endif /* HAVE_truncsipsi2 */
970 abort ();
971 }
972
973 if (from_mode == PSImode)
974 {
975 if (to_mode != SImode)
976 {
977 from = convert_to_mode (SImode, from, unsignedp);
978 from_mode = SImode;
979 }
980 else
981 {
982 #ifdef HAVE_extendpsisi2
983 if (! unsignedp && HAVE_extendpsisi2)
984 {
985 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
986 return;
987 }
988 #endif /* HAVE_extendpsisi2 */
989 #ifdef HAVE_zero_extendpsisi2
990 if (unsignedp && HAVE_zero_extendpsisi2)
991 {
992 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
993 return;
994 }
995 #endif /* HAVE_zero_extendpsisi2 */
996 abort ();
997 }
998 }
999
1000 if (to_mode == PDImode)
1001 {
1002 if (from_mode != DImode)
1003 from = convert_to_mode (DImode, from, unsignedp);
1004
1005 #ifdef HAVE_truncdipdi2
1006 if (HAVE_truncdipdi2)
1007 {
1008 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1009 return;
1010 }
1011 #endif /* HAVE_truncdipdi2 */
1012 abort ();
1013 }
1014
1015 if (from_mode == PDImode)
1016 {
1017 if (to_mode != DImode)
1018 {
1019 from = convert_to_mode (DImode, from, unsignedp);
1020 from_mode = DImode;
1021 }
1022 else
1023 {
1024 #ifdef HAVE_extendpdidi2
1025 if (HAVE_extendpdidi2)
1026 {
1027 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1028 return;
1029 }
1030 #endif /* HAVE_extendpdidi2 */
1031 abort ();
1032 }
1033 }
1034
1035 /* Now follow all the conversions between integers
1036 no more than a word long. */
1037
1038 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1039 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1040 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1041 GET_MODE_BITSIZE (from_mode)))
1042 {
1043 if (!((GET_CODE (from) == MEM
1044 && ! MEM_VOLATILE_P (from)
1045 && direct_load[(int) to_mode]
1046 && ! mode_dependent_address_p (XEXP (from, 0)))
1047 || GET_CODE (from) == REG
1048 || GET_CODE (from) == SUBREG))
1049 from = force_reg (from_mode, from);
1050 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1051 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1052 from = copy_to_reg (from);
1053 emit_move_insn (to, gen_lowpart (to_mode, from));
1054 return;
1055 }
1056
1057 /* Handle extension. */
1058 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1059 {
1060 /* Convert directly if that works. */
1061 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1062 != CODE_FOR_nothing)
1063 {
1064 emit_unop_insn (code, to, from, equiv_code);
1065 return;
1066 }
1067 else
1068 {
1069 enum machine_mode intermediate;
1070 rtx tmp;
1071 tree shift_amount;
1072
1073 /* Search for a mode to convert via. */
1074 for (intermediate = from_mode; intermediate != VOIDmode;
1075 intermediate = GET_MODE_WIDER_MODE (intermediate))
1076 if (((can_extend_p (to_mode, intermediate, unsignedp)
1077 != CODE_FOR_nothing)
1078 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1079 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1080 GET_MODE_BITSIZE (intermediate))))
1081 && (can_extend_p (intermediate, from_mode, unsignedp)
1082 != CODE_FOR_nothing))
1083 {
1084 convert_move (to, convert_to_mode (intermediate, from,
1085 unsignedp), unsignedp);
1086 return;
1087 }
1088
1089 /* No suitable intermediate mode.
1090 Generate what we need with shifts. */
1091 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1092 - GET_MODE_BITSIZE (from_mode), 0);
1093 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1094 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1095 to, unsignedp);
1096 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1097 to, unsignedp);
1098 if (tmp != to)
1099 emit_move_insn (to, tmp);
1100 return;
1101 }
1102 }
1103
1104 /* Support special truncate insns for certain modes. */
1105
1106 if (from_mode == DImode && to_mode == SImode)
1107 {
1108 #ifdef HAVE_truncdisi2
1109 if (HAVE_truncdisi2)
1110 {
1111 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1112 return;
1113 }
1114 #endif
1115 convert_move (to, force_reg (from_mode, from), unsignedp);
1116 return;
1117 }
1118
1119 if (from_mode == DImode && to_mode == HImode)
1120 {
1121 #ifdef HAVE_truncdihi2
1122 if (HAVE_truncdihi2)
1123 {
1124 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1125 return;
1126 }
1127 #endif
1128 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 return;
1130 }
1131
1132 if (from_mode == DImode && to_mode == QImode)
1133 {
1134 #ifdef HAVE_truncdiqi2
1135 if (HAVE_truncdiqi2)
1136 {
1137 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1138 return;
1139 }
1140 #endif
1141 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 return;
1143 }
1144
1145 if (from_mode == SImode && to_mode == HImode)
1146 {
1147 #ifdef HAVE_truncsihi2
1148 if (HAVE_truncsihi2)
1149 {
1150 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1151 return;
1152 }
1153 #endif
1154 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 return;
1156 }
1157
1158 if (from_mode == SImode && to_mode == QImode)
1159 {
1160 #ifdef HAVE_truncsiqi2
1161 if (HAVE_truncsiqi2)
1162 {
1163 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1164 return;
1165 }
1166 #endif
1167 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 return;
1169 }
1170
1171 if (from_mode == HImode && to_mode == QImode)
1172 {
1173 #ifdef HAVE_trunchiqi2
1174 if (HAVE_trunchiqi2)
1175 {
1176 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1177 return;
1178 }
1179 #endif
1180 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 return;
1182 }
1183
1184 if (from_mode == TImode && to_mode == DImode)
1185 {
1186 #ifdef HAVE_trunctidi2
1187 if (HAVE_trunctidi2)
1188 {
1189 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1190 return;
1191 }
1192 #endif
1193 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 return;
1195 }
1196
1197 if (from_mode == TImode && to_mode == SImode)
1198 {
1199 #ifdef HAVE_trunctisi2
1200 if (HAVE_trunctisi2)
1201 {
1202 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1203 return;
1204 }
1205 #endif
1206 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 return;
1208 }
1209
1210 if (from_mode == TImode && to_mode == HImode)
1211 {
1212 #ifdef HAVE_trunctihi2
1213 if (HAVE_trunctihi2)
1214 {
1215 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1216 return;
1217 }
1218 #endif
1219 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 return;
1221 }
1222
1223 if (from_mode == TImode && to_mode == QImode)
1224 {
1225 #ifdef HAVE_trunctiqi2
1226 if (HAVE_trunctiqi2)
1227 {
1228 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1229 return;
1230 }
1231 #endif
1232 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 return;
1234 }
1235
1236 /* Handle truncation of volatile memrefs, and so on;
1237 the things that couldn't be truncated directly,
1238 and for which there was no special instruction. */
1239 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1240 {
1241 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1242 emit_move_insn (to, temp);
1243 return;
1244 }
1245
1246 /* Mode combination is not recognized. */
1247 abort ();
1248 }
1249
1250 /* Return an rtx for a value that would result
1251 from converting X to mode MODE.
1252 Both X and MODE may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1254 This can be done by referring to a part of X in place
1255 or by copying to a new temporary with conversion.
1256
1257 This function *must not* call protect_from_queue
1258 except when putting X into an insn (in which case convert_move does it). */
1259
1260 rtx
1261 convert_to_mode (mode, x, unsignedp)
1262 enum machine_mode mode;
1263 rtx x;
1264 int unsignedp;
1265 {
1266 return convert_modes (mode, VOIDmode, x, unsignedp);
1267 }
1268
1269 /* Return an rtx for a value that would result
1270 from converting X from mode OLDMODE to mode MODE.
1271 Both modes may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1276
1277 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1278
1279 This function *must not* call protect_from_queue
1280 except when putting X into an insn (in which case convert_move does it). */
1281
1282 rtx
1283 convert_modes (mode, oldmode, x, unsignedp)
1284 enum machine_mode mode, oldmode;
1285 rtx x;
1286 int unsignedp;
1287 {
1288 rtx temp;
1289
1290 /* If FROM is a SUBREG that indicates that we have already done at least
1291 the required extension, strip it. */
1292
1293 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1294 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1295 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1296 x = gen_lowpart (mode, x);
1297
1298 if (GET_MODE (x) != VOIDmode)
1299 oldmode = GET_MODE (x);
1300
1301 if (mode == oldmode)
1302 return x;
1303
1304 /* There is one case that we must handle specially: If we are converting
1305 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1306 we are to interpret the constant as unsigned, gen_lowpart will do
1307 the wrong if the constant appears negative. What we want to do is
1308 make the high-order word of the constant zero, not all ones. */
1309
1310 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1311 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1312 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1313 {
1314 HOST_WIDE_INT val = INTVAL (x);
1315
1316 if (oldmode != VOIDmode
1317 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1318 {
1319 int width = GET_MODE_BITSIZE (oldmode);
1320
1321 /* We need to zero extend VAL. */
1322 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1323 }
1324
1325 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1326 }
1327
1328 /* We can do this with a gen_lowpart if both desired and current modes
1329 are integer, and this is either a constant integer, a register, or a
1330 non-volatile MEM. Except for the constant case where MODE is no
1331 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1332
1333 if ((GET_CODE (x) == CONST_INT
1334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1335 || (GET_MODE_CLASS (mode) == MODE_INT
1336 && GET_MODE_CLASS (oldmode) == MODE_INT
1337 && (GET_CODE (x) == CONST_DOUBLE
1338 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1339 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1340 && direct_load[(int) mode])
1341 || (GET_CODE (x) == REG
1342 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1343 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1344 {
1345 /* ?? If we don't know OLDMODE, we have to assume here that
1346 X does not need sign- or zero-extension. This may not be
1347 the case, but it's the best we can do. */
1348 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1349 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1350 {
1351 HOST_WIDE_INT val = INTVAL (x);
1352 int width = GET_MODE_BITSIZE (oldmode);
1353
1354 /* We must sign or zero-extend in this case. Start by
1355 zero-extending, then sign extend if we need to. */
1356 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 if (! unsignedp
1358 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1359 val |= (HOST_WIDE_INT) (-1) << width;
1360
1361 return GEN_INT (trunc_int_for_mode (val, mode));
1362 }
1363
1364 return gen_lowpart (mode, x);
1365 }
1366
1367 temp = gen_reg_rtx (mode);
1368 convert_move (temp, x, unsignedp);
1369 return temp;
1370 }
1371 \f
1372 /* This macro is used to determine what the largest unit size that
1373 move_by_pieces can use is. */
1374
1375 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1376 move efficiently, as opposed to MOVE_MAX which is the maximum
1377 number of bytes we can move with a single instruction. */
1378
1379 #ifndef MOVE_MAX_PIECES
1380 #define MOVE_MAX_PIECES MOVE_MAX
1381 #endif
1382
1383 /* Generate several move instructions to copy LEN bytes from block FROM to
1384 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1385 and TO through protect_from_queue before calling.
1386
1387 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1388 used to push FROM to the stack.
1389
1390 ALIGN is maximum alignment we can assume. */
1391
1392 void
1393 move_by_pieces (to, from, len, align)
1394 rtx to, from;
1395 unsigned HOST_WIDE_INT len;
1396 unsigned int align;
1397 {
1398 struct move_by_pieces data;
1399 rtx to_addr, from_addr = XEXP (from, 0);
1400 unsigned int max_size = MOVE_MAX_PIECES + 1;
1401 enum machine_mode mode = VOIDmode, tmode;
1402 enum insn_code icode;
1403
1404 data.offset = 0;
1405 data.from_addr = from_addr;
1406 if (to)
1407 {
1408 to_addr = XEXP (to, 0);
1409 data.to = to;
1410 data.autinc_to
1411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1413 data.reverse
1414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 }
1416 else
1417 {
1418 to_addr = NULL_RTX;
1419 data.to = NULL_RTX;
1420 data.autinc_to = 1;
1421 #ifdef STACK_GROWS_DOWNWARD
1422 data.reverse = 1;
1423 #else
1424 data.reverse = 0;
1425 #endif
1426 }
1427 data.to_addr = to_addr;
1428 data.from = from;
1429 data.autinc_from
1430 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1431 || GET_CODE (from_addr) == POST_INC
1432 || GET_CODE (from_addr) == POST_DEC);
1433
1434 data.explicit_inc_from = 0;
1435 data.explicit_inc_to = 0;
1436 if (data.reverse) data.offset = len;
1437 data.len = len;
1438
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1444 {
1445 /* Find the mode of the largest move... */
1446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1448 if (GET_MODE_SIZE (tmode) < max_size)
1449 mode = tmode;
1450
1451 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 {
1453 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = -1;
1456 }
1457 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 {
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = 1;
1462 }
1463 if (!data.autinc_from && CONSTANT_P (from_addr))
1464 data.from_addr = copy_addr_to_reg (from_addr);
1465 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 {
1467 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = -1;
1470 }
1471 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (to_addr);
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = 1;
1476 }
1477 if (!data.autinc_to && CONSTANT_P (to_addr))
1478 data.to_addr = copy_addr_to_reg (to_addr);
1479 }
1480
1481 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1482 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1483 align = MOVE_MAX * BITS_PER_UNIT;
1484
1485 /* First move what we can in the largest integer mode, then go to
1486 successively smaller modes. */
1487
1488 while (max_size > 1)
1489 {
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1500 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501
1502 max_size = GET_MODE_SIZE (mode);
1503 }
1504
1505 /* The code above should have handled everything. */
1506 if (data.len > 0)
1507 abort ();
1508 }
1509
1510 /* Return number of insns required to move L bytes by pieces.
1511 ALIGN (in bits) is maximum alignment we can assume. */
1512
1513 static unsigned HOST_WIDE_INT
1514 move_by_pieces_ninsns (l, align)
1515 unsigned HOST_WIDE_INT l;
1516 unsigned int align;
1517 {
1518 unsigned HOST_WIDE_INT n_insns = 0;
1519 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520
1521 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1522 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1523 align = MOVE_MAX * BITS_PER_UNIT;
1524
1525 while (max_size > 1)
1526 {
1527 enum machine_mode mode = VOIDmode, tmode;
1528 enum insn_code icode;
1529
1530 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1531 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1532 if (GET_MODE_SIZE (tmode) < max_size)
1533 mode = tmode;
1534
1535 if (mode == VOIDmode)
1536 break;
1537
1538 icode = mov_optab->handlers[(int) mode].insn_code;
1539 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1540 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541
1542 max_size = GET_MODE_SIZE (mode);
1543 }
1544
1545 if (l)
1546 abort ();
1547 return n_insns;
1548 }
1549
1550 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1551 with move instructions for mode MODE. GENFUN is the gen_... function
1552 to make a move insn for that mode. DATA has all the other info. */
1553
1554 static void
1555 move_by_pieces_1 (genfun, mode, data)
1556 rtx (*genfun) PARAMS ((rtx, ...));
1557 enum machine_mode mode;
1558 struct move_by_pieces *data;
1559 {
1560 unsigned int size = GET_MODE_SIZE (mode);
1561 rtx to1 = NULL_RTX, from1;
1562
1563 while (data->len >= size)
1564 {
1565 if (data->reverse)
1566 data->offset -= size;
1567
1568 if (data->to)
1569 {
1570 if (data->autinc_to)
1571 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1572 data->offset);
1573 else
1574 to1 = adjust_address (data->to, mode, data->offset);
1575 }
1576
1577 if (data->autinc_from)
1578 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1579 data->offset);
1580 else
1581 from1 = adjust_address (data->from, mode, data->offset);
1582
1583 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1584 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1586 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1587
1588 if (data->to)
1589 emit_insn ((*genfun) (to1, from1));
1590 else
1591 {
1592 #ifdef PUSH_ROUNDING
1593 emit_single_push_insn (mode, from1, NULL);
1594 #else
1595 abort ();
1596 #endif
1597 }
1598
1599 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1600 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1601 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1602 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1603
1604 if (! data->reverse)
1605 data->offset += size;
1606
1607 data->len -= size;
1608 }
1609 }
1610 \f
1611 /* Emit code to move a block Y to a block X.
1612 This may be done with string-move instructions,
1613 with multiple scalar move instructions, or with a library call.
1614
1615 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1616 with mode BLKmode.
1617 SIZE is an rtx that says how long they are.
1618 ALIGN is the maximum alignment we can assume they have.
1619
1620 Return the address of the new block, if memcpy is called and returns it,
1621 0 otherwise. */
1622
1623 rtx
1624 emit_block_move (x, y, size)
1625 rtx x, y;
1626 rtx size;
1627 {
1628 rtx retval = 0;
1629 #ifdef TARGET_MEM_FUNCTIONS
1630 static tree fn;
1631 tree call_expr, arg_list;
1632 #endif
1633 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1634
1635 if (GET_MODE (x) != BLKmode)
1636 abort ();
1637
1638 if (GET_MODE (y) != BLKmode)
1639 abort ();
1640
1641 x = protect_from_queue (x, 1);
1642 y = protect_from_queue (y, 0);
1643 size = protect_from_queue (size, 0);
1644
1645 if (GET_CODE (x) != MEM)
1646 abort ();
1647 if (GET_CODE (y) != MEM)
1648 abort ();
1649 if (size == 0)
1650 abort ();
1651
1652 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1653 move_by_pieces (x, y, INTVAL (size), align);
1654 else
1655 {
1656 /* Try the most limited insn first, because there's no point
1657 including more than one in the machine description unless
1658 the more limited one has some advantage. */
1659
1660 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1661 enum machine_mode mode;
1662
1663 /* Since this is a move insn, we don't care about volatility. */
1664 volatile_ok = 1;
1665
1666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1667 mode = GET_MODE_WIDER_MODE (mode))
1668 {
1669 enum insn_code code = movstr_optab[(int) mode];
1670 insn_operand_predicate_fn pred;
1671
1672 if (code != CODE_FOR_nothing
1673 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674 here because if SIZE is less than the mode mask, as it is
1675 returned by the macro, it will definitely be less than the
1676 actual mode mask. */
1677 && ((GET_CODE (size) == CONST_INT
1678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1679 <= (GET_MODE_MASK (mode) >> 1)))
1680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1681 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1682 || (*pred) (x, BLKmode))
1683 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1684 || (*pred) (y, BLKmode))
1685 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1686 || (*pred) (opalign, VOIDmode)))
1687 {
1688 rtx op2;
1689 rtx last = get_last_insn ();
1690 rtx pat;
1691
1692 op2 = convert_to_mode (mode, size, 1);
1693 pred = insn_data[(int) code].operand[2].predicate;
1694 if (pred != 0 && ! (*pred) (op2, mode))
1695 op2 = copy_to_mode_reg (mode, op2);
1696
1697 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698 if (pat)
1699 {
1700 emit_insn (pat);
1701 volatile_ok = 0;
1702 return 0;
1703 }
1704 else
1705 delete_insns_since (last);
1706 }
1707 }
1708
1709 volatile_ok = 0;
1710
1711 /* X, Y, or SIZE may have been passed through protect_from_queue.
1712
1713 It is unsafe to save the value generated by protect_from_queue
1714 and reuse it later. Consider what happens if emit_queue is
1715 called before the return value from protect_from_queue is used.
1716
1717 Expansion of the CALL_EXPR below will call emit_queue before
1718 we are finished emitting RTL for argument setup. So if we are
1719 not careful we could get the wrong value for an argument.
1720
1721 To avoid this problem we go ahead and emit code to copy X, Y &
1722 SIZE into new pseudos. We can then place those new pseudos
1723 into an RTL_EXPR and use them later, even after a call to
1724 emit_queue.
1725
1726 Note this is not strictly needed for library calls since they
1727 do not call emit_queue before loading their arguments. However,
1728 we may need to have library calls call emit_queue in the future
1729 since failing to do so could cause problems for targets which
1730 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1731 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1732 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1733
1734 #ifdef TARGET_MEM_FUNCTIONS
1735 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1736 #else
1737 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1738 TREE_UNSIGNED (integer_type_node));
1739 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1740 #endif
1741
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 /* It is incorrect to use the libcall calling conventions to call
1744 memcpy in this context.
1745
1746 This could be a user call to memcpy and the user may wish to
1747 examine the return value from memcpy.
1748
1749 For targets where libcalls and normal calls have different conventions
1750 for returning pointers, we could end up generating incorrect code.
1751
1752 So instead of using a libcall sequence we build up a suitable
1753 CALL_EXPR and expand the call in the normal fashion. */
1754 if (fn == NULL_TREE)
1755 {
1756 tree fntype;
1757
1758 /* This was copied from except.c, I don't know if all this is
1759 necessary in this context or not. */
1760 fn = get_identifier ("memcpy");
1761 fntype = build_pointer_type (void_type_node);
1762 fntype = build_function_type (fntype, NULL_TREE);
1763 fn = build_decl (FUNCTION_DECL, fn, fntype);
1764 ggc_add_tree_root (&fn, 1);
1765 DECL_EXTERNAL (fn) = 1;
1766 TREE_PUBLIC (fn) = 1;
1767 DECL_ARTIFICIAL (fn) = 1;
1768 TREE_NOTHROW (fn) = 1;
1769 make_decl_rtl (fn, NULL);
1770 assemble_external (fn);
1771 }
1772
1773 /* We need to make an argument list for the function call.
1774
1775 memcpy has three arguments, the first two are void * addresses and
1776 the last is a size_t byte count for the copy. */
1777 arg_list
1778 = build_tree_list (NULL_TREE,
1779 make_tree (build_pointer_type (void_type_node), x));
1780 TREE_CHAIN (arg_list)
1781 = build_tree_list (NULL_TREE,
1782 make_tree (build_pointer_type (void_type_node), y));
1783 TREE_CHAIN (TREE_CHAIN (arg_list))
1784 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1785 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1786
1787 /* Now we have to build up the CALL_EXPR itself. */
1788 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1789 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1790 call_expr, arg_list, NULL_TREE);
1791 TREE_SIDE_EFFECTS (call_expr) = 1;
1792
1793 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1794 #else
1795 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1796 VOIDmode, 3, y, Pmode, x, Pmode,
1797 convert_to_mode (TYPE_MODE (integer_type_node), size,
1798 TREE_UNSIGNED (integer_type_node)),
1799 TYPE_MODE (integer_type_node));
1800 #endif
1801
1802 /* If we are initializing a readonly value, show the above call
1803 clobbered it. Otherwise, a load from it may erroneously be hoisted
1804 from a loop. */
1805 if (RTX_UNCHANGING_P (x))
1806 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1807 }
1808
1809 return retval;
1810 }
1811 \f
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1814
1815 void
1816 move_block_to_reg (regno, x, nregs, mode)
1817 int regno;
1818 rtx x;
1819 int nregs;
1820 enum machine_mode mode;
1821 {
1822 int i;
1823 #ifdef HAVE_load_multiple
1824 rtx pat;
1825 rtx last;
1826 #endif
1827
1828 if (nregs == 0)
1829 return;
1830
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1833
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1837 {
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840 GEN_INT (nregs));
1841 if (pat)
1842 {
1843 emit_insn (pat);
1844 return;
1845 }
1846 else
1847 delete_insns_since (last);
1848 }
1849 #endif
1850
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1854 }
1855
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1859
1860 void
1861 move_block_from_reg (regno, x, nregs, size)
1862 int regno;
1863 rtx x;
1864 int nregs;
1865 int size;
1866 {
1867 int i;
1868 #ifdef HAVE_store_multiple
1869 rtx pat;
1870 rtx last;
1871 #endif
1872 enum machine_mode mode;
1873
1874 if (nregs == 0)
1875 return;
1876
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1881 {
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 return;
1884 }
1885
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1890 {
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1892 rtx shift;
1893
1894 if (tem == 0)
1895 abort ();
1896
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1902 return;
1903 }
1904
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1908 {
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1911 GEN_INT (nregs));
1912 if (pat)
1913 {
1914 emit_insn (pat);
1915 return;
1916 }
1917 else
1918 delete_insns_since (last);
1919 }
1920 #endif
1921
1922 for (i = 0; i < nregs; i++)
1923 {
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1925
1926 if (tem == 0)
1927 abort ();
1928
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 }
1931 }
1932
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. */
1936 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1937 the balance will be in what would be the low-order memory addresses, i.e.
1938 left justified for big endian, right justified for little endian. This
1939 happens to be true for the targets currently using this support. If this
1940 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1941 would be needed. */
1942
1943 void
1944 emit_group_load (dst, orig_src, ssize)
1945 rtx dst, orig_src;
1946 int ssize;
1947 {
1948 rtx *tmps, src;
1949 int start, i;
1950
1951 if (GET_CODE (dst) != PARALLEL)
1952 abort ();
1953
1954 /* Check for a NULL entry, used to indicate that the parameter goes
1955 both on the stack and in registers. */
1956 if (XEXP (XVECEXP (dst, 0, 0), 0))
1957 start = 0;
1958 else
1959 start = 1;
1960
1961 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1965 {
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
1969 int shift = 0;
1970
1971 /* Handle trailing fragments that run over the size of the struct. */
1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 {
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
1977 abort ();
1978 }
1979
1980 /* If we won't be loading directly from memory, protect the real source
1981 from strange tricks we might play; but make sure that the source can
1982 be loaded directly into the destination. */
1983 src = orig_src;
1984 if (GET_CODE (orig_src) != MEM
1985 && (!CONSTANT_P (orig_src)
1986 || (GET_MODE (orig_src) != mode
1987 && GET_MODE (orig_src) != VOIDmode)))
1988 {
1989 if (GET_MODE (orig_src) == VOIDmode)
1990 src = gen_reg_rtx (mode);
1991 else
1992 src = gen_reg_rtx (GET_MODE (orig_src));
1993
1994 emit_move_insn (src, orig_src);
1995 }
1996
1997 /* Optimize the access just a bit. */
1998 if (GET_CODE (src) == MEM
1999 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2000 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2001 && bytelen == GET_MODE_SIZE (mode))
2002 {
2003 tmps[i] = gen_reg_rtx (mode);
2004 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 }
2006 else if (GET_CODE (src) == CONCAT)
2007 {
2008 if (bytepos == 0
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2010 tmps[i] = XEXP (src, 0);
2011 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2012 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2013 tmps[i] = XEXP (src, 1);
2014 else if (bytepos == 0)
2015 {
2016 rtx mem = assign_stack_temp (GET_MODE (src),
2017 GET_MODE_SIZE (GET_MODE (src)), 0);
2018 emit_move_insn (mem, src);
2019 tmps[i] = adjust_address (mem, mode, 0);
2020 }
2021 else
2022 abort ();
2023 }
2024 else if (CONSTANT_P (src)
2025 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2026 tmps[i] = src;
2027 else
2028 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2029 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2030 mode, mode, ssize);
2031
2032 if (BYTES_BIG_ENDIAN && shift)
2033 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2034 tmps[i], 0, OPTAB_WIDEN);
2035 }
2036
2037 emit_queue ();
2038
2039 /* Copy the extracted pieces into the proper (probable) hard regs. */
2040 for (i = start; i < XVECLEN (dst, 0); i++)
2041 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2042 }
2043
2044 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2045 registers represented by a PARALLEL. SSIZE represents the total size of
2046 block DST, or -1 if not known. */
2047
2048 void
2049 emit_group_store (orig_dst, src, ssize)
2050 rtx orig_dst, src;
2051 int ssize;
2052 {
2053 rtx *tmps, dst;
2054 int start, i;
2055
2056 if (GET_CODE (src) != PARALLEL)
2057 abort ();
2058
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src, 0, 0), 0))
2062 start = 0;
2063 else
2064 start = 1;
2065
2066 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2067
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i = start; i < XVECLEN (src, 0); i++)
2070 {
2071 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2072 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2073 emit_move_insn (tmps[i], reg);
2074 }
2075 emit_queue ();
2076
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2079 dst = orig_dst;
2080 if (GET_CODE (dst) == PARALLEL)
2081 {
2082 rtx temp;
2083
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst, src))
2088 return;
2089
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2092 the temporary. */
2093
2094 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2095 emit_group_store (temp, src, ssize);
2096 emit_group_load (dst, temp, ssize);
2097 return;
2098 }
2099 else if (GET_CODE (dst) != MEM)
2100 {
2101 dst = gen_reg_rtx (GET_MODE (orig_dst));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst, const0_rtx);
2104 }
2105
2106 /* Process the pieces. */
2107 for (i = start; i < XVECLEN (src, 0); i++)
2108 {
2109 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2110 enum machine_mode mode = GET_MODE (tmps[i]);
2111 unsigned int bytelen = GET_MODE_SIZE (mode);
2112
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2115 {
2116 if (BYTES_BIG_ENDIAN)
2117 {
2118 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2119 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2120 tmps[i], 0, OPTAB_WIDEN);
2121 }
2122 bytelen = ssize - bytepos;
2123 }
2124
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst) == MEM
2127 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2128 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2129 && bytelen == GET_MODE_SIZE (mode))
2130 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2131 else
2132 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2133 mode, tmps[i], ssize);
2134 }
2135
2136 emit_queue ();
2137
2138 /* Copy from the pseudo into the (probable) hard reg. */
2139 if (GET_CODE (dst) == REG)
2140 emit_move_insn (orig_dst, dst);
2141 }
2142
2143 /* Generate code to copy a BLKmode object of TYPE out of a
2144 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2145 is null, a stack temporary is created. TGTBLK is returned.
2146
2147 The primary purpose of this routine is to handle functions
2148 that return BLKmode structures in registers. Some machines
2149 (the PA for example) want to return all small structures
2150 in registers regardless of the structure's alignment. */
2151
2152 rtx
2153 copy_blkmode_from_reg (tgtblk, srcreg, type)
2154 rtx tgtblk;
2155 rtx srcreg;
2156 tree type;
2157 {
2158 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2159 rtx src = NULL, dst = NULL;
2160 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2161 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2162
2163 if (tgtblk == 0)
2164 {
2165 tgtblk = assign_temp (build_qualified_type (type,
2166 (TYPE_QUALS (type)
2167 | TYPE_QUAL_CONST)),
2168 0, 1, 1);
2169 preserve_temp_slots (tgtblk);
2170 }
2171
2172 /* This code assumes srcreg is at least a full word. If it isn't,
2173 copy it into a new pseudo which is a full word. */
2174 if (GET_MODE (srcreg) != BLKmode
2175 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2176 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2177
2178 /* Structures whose size is not a multiple of a word are aligned
2179 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2180 machine, this means we must skip the empty high order bytes when
2181 calculating the bit offset. */
2182 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2183 big_endian_correction
2184 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2185
2186 /* Copy the structure BITSIZE bites at a time.
2187
2188 We could probably emit more efficient code for machines which do not use
2189 strict alignment, but it doesn't seem worth the effort at the current
2190 time. */
2191 for (bitpos = 0, xbitpos = big_endian_correction;
2192 bitpos < bytes * BITS_PER_UNIT;
2193 bitpos += bitsize, xbitpos += bitsize)
2194 {
2195 /* We need a new source operand each time xbitpos is on a
2196 word boundary and when xbitpos == big_endian_correction
2197 (the first time through). */
2198 if (xbitpos % BITS_PER_WORD == 0
2199 || xbitpos == big_endian_correction)
2200 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2201 GET_MODE (srcreg));
2202
2203 /* We need a new destination operand each time bitpos is on
2204 a word boundary. */
2205 if (bitpos % BITS_PER_WORD == 0)
2206 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2207
2208 /* Use xbitpos for the source extraction (right justified) and
2209 xbitpos for the destination store (left justified). */
2210 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2211 extract_bit_field (src, bitsize,
2212 xbitpos % BITS_PER_WORD, 1,
2213 NULL_RTX, word_mode, word_mode,
2214 BITS_PER_WORD),
2215 BITS_PER_WORD);
2216 }
2217
2218 return tgtblk;
2219 }
2220
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2223
2224 void
2225 use_reg (call_fusage, reg)
2226 rtx *call_fusage, reg;
2227 {
2228 if (GET_CODE (reg) != REG
2229 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2230 abort ();
2231
2232 *call_fusage
2233 = gen_rtx_EXPR_LIST (VOIDmode,
2234 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2235 }
2236
2237 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2238 starting at REGNO. All of these registers must be hard registers. */
2239
2240 void
2241 use_regs (call_fusage, regno, nregs)
2242 rtx *call_fusage;
2243 int regno;
2244 int nregs;
2245 {
2246 int i;
2247
2248 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2249 abort ();
2250
2251 for (i = 0; i < nregs; i++)
2252 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2253 }
2254
2255 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2256 PARALLEL REGS. This is for calls that pass values in multiple
2257 non-contiguous locations. The Irix 6 ABI has examples of this. */
2258
2259 void
2260 use_group_regs (call_fusage, regs)
2261 rtx *call_fusage;
2262 rtx regs;
2263 {
2264 int i;
2265
2266 for (i = 0; i < XVECLEN (regs, 0); i++)
2267 {
2268 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2269
2270 /* A NULL entry means the parameter goes both on the stack and in
2271 registers. This can also be a MEM for targets that pass values
2272 partially on the stack and partially in registers. */
2273 if (reg != 0 && GET_CODE (reg) == REG)
2274 use_reg (call_fusage, reg);
2275 }
2276 }
2277 \f
2278
2279 int
2280 can_store_by_pieces (len, constfun, constfundata, align)
2281 unsigned HOST_WIDE_INT len;
2282 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2283 PTR constfundata;
2284 unsigned int align;
2285 {
2286 unsigned HOST_WIDE_INT max_size, l;
2287 HOST_WIDE_INT offset = 0;
2288 enum machine_mode mode, tmode;
2289 enum insn_code icode;
2290 int reverse;
2291 rtx cst;
2292
2293 if (! MOVE_BY_PIECES_P (len, align))
2294 return 0;
2295
2296 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2297 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2298 align = MOVE_MAX * BITS_PER_UNIT;
2299
2300 /* We would first store what we can in the largest integer mode, then go to
2301 successively smaller modes. */
2302
2303 for (reverse = 0;
2304 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2305 reverse++)
2306 {
2307 l = len;
2308 mode = VOIDmode;
2309 max_size = MOVE_MAX_PIECES + 1;
2310 while (max_size > 1)
2311 {
2312 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2313 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2314 if (GET_MODE_SIZE (tmode) < max_size)
2315 mode = tmode;
2316
2317 if (mode == VOIDmode)
2318 break;
2319
2320 icode = mov_optab->handlers[(int) mode].insn_code;
2321 if (icode != CODE_FOR_nothing
2322 && align >= GET_MODE_ALIGNMENT (mode))
2323 {
2324 unsigned int size = GET_MODE_SIZE (mode);
2325
2326 while (l >= size)
2327 {
2328 if (reverse)
2329 offset -= size;
2330
2331 cst = (*constfun) (constfundata, offset, mode);
2332 if (!LEGITIMATE_CONSTANT_P (cst))
2333 return 0;
2334
2335 if (!reverse)
2336 offset += size;
2337
2338 l -= size;
2339 }
2340 }
2341
2342 max_size = GET_MODE_SIZE (mode);
2343 }
2344
2345 /* The code above should have handled everything. */
2346 if (l != 0)
2347 abort ();
2348 }
2349
2350 return 1;
2351 }
2352
2353 /* Generate several move instructions to store LEN bytes generated by
2354 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2355 pointer which will be passed as argument in every CONSTFUN call.
2356 ALIGN is maximum alignment we can assume. */
2357
2358 void
2359 store_by_pieces (to, len, constfun, constfundata, align)
2360 rtx to;
2361 unsigned HOST_WIDE_INT len;
2362 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2363 PTR constfundata;
2364 unsigned int align;
2365 {
2366 struct store_by_pieces data;
2367
2368 if (! MOVE_BY_PIECES_P (len, align))
2369 abort ();
2370 to = protect_from_queue (to, 1);
2371 data.constfun = constfun;
2372 data.constfundata = constfundata;
2373 data.len = len;
2374 data.to = to;
2375 store_by_pieces_1 (&data, align);
2376 }
2377
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2381
2382 static void
2383 clear_by_pieces (to, len, align)
2384 rtx to;
2385 unsigned HOST_WIDE_INT len;
2386 unsigned int align;
2387 {
2388 struct store_by_pieces data;
2389
2390 data.constfun = clear_by_pieces_1;
2391 data.constfundata = NULL;
2392 data.len = len;
2393 data.to = to;
2394 store_by_pieces_1 (&data, align);
2395 }
2396
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2399
2400 static rtx
2401 clear_by_pieces_1 (data, offset, mode)
2402 PTR data ATTRIBUTE_UNUSED;
2403 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2404 enum machine_mode mode ATTRIBUTE_UNUSED;
2405 {
2406 return const0_rtx;
2407 }
2408
2409 /* Subroutine of clear_by_pieces and store_by_pieces.
2410 Generate several move instructions to store LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). The caller must pass TO through protect_from_queue
2412 before calling. ALIGN is maximum alignment we can assume. */
2413
2414 static void
2415 store_by_pieces_1 (data, align)
2416 struct store_by_pieces *data;
2417 unsigned int align;
2418 {
2419 rtx to_addr = XEXP (data->to, 0);
2420 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2421 enum machine_mode mode = VOIDmode, tmode;
2422 enum insn_code icode;
2423
2424 data->offset = 0;
2425 data->to_addr = to_addr;
2426 data->autinc_to
2427 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2428 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2429
2430 data->explicit_inc_to = 0;
2431 data->reverse
2432 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2433 if (data->reverse)
2434 data->offset = data->len;
2435
2436 /* If storing requires more than two move insns,
2437 copy addresses to registers (to make displacements shorter)
2438 and use post-increment if available. */
2439 if (!data->autinc_to
2440 && move_by_pieces_ninsns (data->len, align) > 2)
2441 {
2442 /* Determine the main mode we'll be using. */
2443 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2444 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2445 if (GET_MODE_SIZE (tmode) < max_size)
2446 mode = tmode;
2447
2448 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2449 {
2450 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2451 data->autinc_to = 1;
2452 data->explicit_inc_to = -1;
2453 }
2454
2455 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2456 && ! data->autinc_to)
2457 {
2458 data->to_addr = copy_addr_to_reg (to_addr);
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = 1;
2461 }
2462
2463 if ( !data->autinc_to && CONSTANT_P (to_addr))
2464 data->to_addr = copy_addr_to_reg (to_addr);
2465 }
2466
2467 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2468 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2469 align = MOVE_MAX * BITS_PER_UNIT;
2470
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2473
2474 while (max_size > 1)
2475 {
2476 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2477 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2478 if (GET_MODE_SIZE (tmode) < max_size)
2479 mode = tmode;
2480
2481 if (mode == VOIDmode)
2482 break;
2483
2484 icode = mov_optab->handlers[(int) mode].insn_code;
2485 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2486 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2487
2488 max_size = GET_MODE_SIZE (mode);
2489 }
2490
2491 /* The code above should have handled everything. */
2492 if (data->len != 0)
2493 abort ();
2494 }
2495
2496 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2497 with move instructions for mode MODE. GENFUN is the gen_... function
2498 to make a move insn for that mode. DATA has all the other info. */
2499
2500 static void
2501 store_by_pieces_2 (genfun, mode, data)
2502 rtx (*genfun) PARAMS ((rtx, ...));
2503 enum machine_mode mode;
2504 struct store_by_pieces *data;
2505 {
2506 unsigned int size = GET_MODE_SIZE (mode);
2507 rtx to1, cst;
2508
2509 while (data->len >= size)
2510 {
2511 if (data->reverse)
2512 data->offset -= size;
2513
2514 if (data->autinc_to)
2515 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2516 data->offset);
2517 else
2518 to1 = adjust_address (data->to, mode, data->offset);
2519
2520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2521 emit_insn (gen_add2_insn (data->to_addr,
2522 GEN_INT (-(HOST_WIDE_INT) size)));
2523
2524 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2525 emit_insn ((*genfun) (to1, cst));
2526
2527 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2529
2530 if (! data->reverse)
2531 data->offset += size;
2532
2533 data->len -= size;
2534 }
2535 }
2536 \f
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes. */
2539
2540 rtx
2541 clear_storage (object, size)
2542 rtx object;
2543 rtx size;
2544 {
2545 #ifdef TARGET_MEM_FUNCTIONS
2546 static tree fn;
2547 tree call_expr, arg_list;
2548 #endif
2549 rtx retval = 0;
2550 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2551 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2552
2553 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2554 just move a zero. Otherwise, do this a piece at a time. */
2555 if (GET_MODE (object) != BLKmode
2556 && GET_CODE (size) == CONST_INT
2557 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2558 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2559 else
2560 {
2561 object = protect_from_queue (object, 1);
2562 size = protect_from_queue (size, 0);
2563
2564 if (GET_CODE (size) == CONST_INT
2565 && MOVE_BY_PIECES_P (INTVAL (size), align))
2566 clear_by_pieces (object, INTVAL (size), align);
2567 else
2568 {
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2572
2573 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2574 enum machine_mode mode;
2575
2576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2577 mode = GET_MODE_WIDER_MODE (mode))
2578 {
2579 enum insn_code code = clrstr_optab[(int) mode];
2580 insn_operand_predicate_fn pred;
2581
2582 if (code != CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2589 <= (GET_MODE_MASK (mode) >> 1)))
2590 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2591 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2592 || (*pred) (object, BLKmode))
2593 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2594 || (*pred) (opalign, VOIDmode)))
2595 {
2596 rtx op1;
2597 rtx last = get_last_insn ();
2598 rtx pat;
2599
2600 op1 = convert_to_mode (mode, size, 1);
2601 pred = insn_data[(int) code].operand[1].predicate;
2602 if (pred != 0 && ! (*pred) (op1, mode))
2603 op1 = copy_to_mode_reg (mode, op1);
2604
2605 pat = GEN_FCN ((int) code) (object, op1, opalign);
2606 if (pat)
2607 {
2608 emit_insn (pat);
2609 return 0;
2610 }
2611 else
2612 delete_insns_since (last);
2613 }
2614 }
2615
2616 /* OBJECT or SIZE may have been passed through protect_from_queue.
2617
2618 It is unsafe to save the value generated by protect_from_queue
2619 and reuse it later. Consider what happens if emit_queue is
2620 called before the return value from protect_from_queue is used.
2621
2622 Expansion of the CALL_EXPR below will call emit_queue before
2623 we are finished emitting RTL for argument setup. So if we are
2624 not careful we could get the wrong value for an argument.
2625
2626 To avoid this problem we go ahead and emit code to copy OBJECT
2627 and SIZE into new pseudos. We can then place those new pseudos
2628 into an RTL_EXPR and use them later, even after a call to
2629 emit_queue.
2630
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2636 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2637
2638 #ifdef TARGET_MEM_FUNCTIONS
2639 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2640 #else
2641 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2642 TREE_UNSIGNED (integer_type_node));
2643 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2644 #endif
2645
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 /* It is incorrect to use the libcall calling conventions to call
2648 memset in this context.
2649
2650 This could be a user call to memset and the user may wish to
2651 examine the return value from memset.
2652
2653 For targets where libcalls and normal calls have different
2654 conventions for returning pointers, we could end up generating
2655 incorrect code.
2656
2657 So instead of using a libcall sequence we build up a suitable
2658 CALL_EXPR and expand the call in the normal fashion. */
2659 if (fn == NULL_TREE)
2660 {
2661 tree fntype;
2662
2663 /* This was copied from except.c, I don't know if all this is
2664 necessary in this context or not. */
2665 fn = get_identifier ("memset");
2666 fntype = build_pointer_type (void_type_node);
2667 fntype = build_function_type (fntype, NULL_TREE);
2668 fn = build_decl (FUNCTION_DECL, fn, fntype);
2669 ggc_add_tree_root (&fn, 1);
2670 DECL_EXTERNAL (fn) = 1;
2671 TREE_PUBLIC (fn) = 1;
2672 DECL_ARTIFICIAL (fn) = 1;
2673 TREE_NOTHROW (fn) = 1;
2674 make_decl_rtl (fn, NULL);
2675 assemble_external (fn);
2676 }
2677
2678 /* We need to make an argument list for the function call.
2679
2680 memset has three arguments, the first is a void * addresses, the
2681 second an integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2683 arg_list
2684 = build_tree_list (NULL_TREE,
2685 make_tree (build_pointer_type (void_type_node),
2686 object));
2687 TREE_CHAIN (arg_list)
2688 = build_tree_list (NULL_TREE,
2689 make_tree (integer_type_node, const0_rtx));
2690 TREE_CHAIN (TREE_CHAIN (arg_list))
2691 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2693
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr = build1 (ADDR_EXPR,
2696 build_pointer_type (TREE_TYPE (fn)), fn);
2697 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2698 call_expr, arg_list, NULL_TREE);
2699 TREE_SIDE_EFFECTS (call_expr) = 1;
2700
2701 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2702 #else
2703 emit_library_call (bzero_libfunc, LCT_NORMAL,
2704 VOIDmode, 2, object, Pmode, size,
2705 TYPE_MODE (integer_type_node));
2706 #endif
2707
2708 /* If we are initializing a readonly value, show the above call
2709 clobbered it. Otherwise, a load from it may erroneously be
2710 hoisted from a loop. */
2711 if (RTX_UNCHANGING_P (object))
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2713 }
2714 }
2715
2716 return retval;
2717 }
2718
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2723
2724 Return the last instruction emitted. */
2725
2726 rtx
2727 emit_move_insn (x, y)
2728 rtx x, y;
2729 {
2730 enum machine_mode mode = GET_MODE (x);
2731 rtx y_cst = NULL_RTX;
2732 rtx last_insn;
2733
2734 x = protect_from_queue (x, 1);
2735 y = protect_from_queue (y, 0);
2736
2737 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2738 abort ();
2739
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 ;
2743 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2744 {
2745 y_cst = y;
2746 y = force_const_mem (mode, y);
2747 }
2748
2749 /* If X or Y are memory references, verify that their addresses are valid
2750 for the machine. */
2751 if (GET_CODE (x) == MEM
2752 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2753 && ! push_operand (x, GET_MODE (x)))
2754 || (flag_force_addr
2755 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2756 x = validize_mem (x);
2757
2758 if (GET_CODE (y) == MEM
2759 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 || (flag_force_addr
2761 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2762 y = validize_mem (y);
2763
2764 if (mode == BLKmode)
2765 abort ();
2766
2767 last_insn = emit_move_insn_1 (x, y);
2768
2769 if (y_cst && GET_CODE (x) == REG)
2770 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2771
2772 return last_insn;
2773 }
2774
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2778
2779 rtx
2780 emit_move_insn_1 (x, y)
2781 rtx x, y;
2782 {
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2786 unsigned int i;
2787
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2789 abort ();
2790
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2792 return
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2794
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2798 * BITS_PER_UNIT),
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2801 0))
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2804 {
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2807
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2811 if (stack
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2813 {
2814 rtx temp;
2815 int offset1, offset2;
2816
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2821 sub_optab,
2822 #else
2823 add_optab,
2824 #endif
2825 stack_pointer_rtx,
2826 GEN_INT
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2828 stack_pointer_rtx,
2829 0,
2830 OPTAB_LIB_WIDEN);
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2834 offset1 = 0;
2835 offset2 = GET_MODE_SIZE (submode);
2836 #else
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2840 #endif
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2843 stack_pointer_rtx,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2851 }
2852 else
2853 #endif
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2856
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2859 if (stack)
2860 {
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2870 #else
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2877 #endif
2878 }
2879 else
2880 {
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2883
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2895 {
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2898
2899 if (packed_dest_p || packed_src_p)
2900 {
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2903
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2906
2907 if (reg_mode != BLKmode)
2908 {
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2912
2913 cfun->cannot_inline
2914 = N_("function using short complex types cannot be inline");
2915
2916 if (packed_dest_p)
2917 {
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2921 }
2922 else
2923 {
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2927 }
2928 }
2929 }
2930 }
2931
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2936
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2941 if (x != y
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2945 {
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2947 }
2948
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2953 }
2954
2955 return get_last_insn ();
2956 }
2957
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2962 {
2963 rtx last_insn = 0;
2964 rtx seq, inner;
2965 int need_clobber;
2966
2967 #ifdef PUSH_ROUNDING
2968
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2972 {
2973 rtx temp;
2974 enum rtx_code code;
2975
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2980 sub_optab,
2981 #else
2982 add_optab,
2983 #endif
2984 stack_pointer_rtx,
2985 GEN_INT
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2987 stack_pointer_rtx,
2988 0,
2989 OPTAB_LIB_WIDEN);
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2992
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3002 else
3003 temp = stack_pointer_rtx;
3004
3005 x = change_address (x, VOIDmode, temp);
3006 }
3007 #endif
3008
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3017
3018 start_sequence ();
3019
3020 need_clobber = 0;
3021 for (i = 0;
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3023 i++)
3024 {
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3027
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3032 {
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3035 }
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3038
3039 if (xpart == 0 || ypart == 0)
3040 abort ();
3041
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3043
3044 last_insn = emit_move_insn (xpart, ypart);
3045 }
3046
3047 seq = gen_sequence ();
3048 end_sequence ();
3049
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3054 if (x != y
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3057 {
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3059 }
3060
3061 emit_insn (seq);
3062
3063 return last_insn;
3064 }
3065 else
3066 abort ();
3067 }
3068 \f
3069 /* Pushing data onto the stack. */
3070
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3075
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3079
3080 rtx
3081 push_block (size, extra, below)
3082 rtx size;
3083 int extra, below;
3084 {
3085 rtx temp;
3086
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3092 else
3093 {
3094 temp = copy_to_mode_reg (Pmode, size);
3095 if (extra != 0)
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3099 }
3100
3101 #ifndef STACK_GROWS_DOWNWARD
3102 if (0)
3103 #else
3104 if (1)
3105 #endif
3106 {
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125 }
3126
3127
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131 static rtx
3132 get_push_address (size)
3133 int size;
3134 {
3135 rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3143
3144 return copy_to_reg (temp);
3145 }
3146
3147 #ifdef PUSH_ROUNDING
3148
3149 /* Emit single push insn. */
3150
3151 static void
3152 emit_single_push_insn (mode, x, type)
3153 rtx x;
3154 enum machine_mode mode;
3155 tree type;
3156 {
3157 rtx dest_addr;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 rtx dest;
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3162
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3168 {
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3173 return;
3174 }
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3177 else
3178 {
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3182 #else
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3185 #endif
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3187 }
3188
3189 dest = gen_rtx_MEM (mode, dest_addr);
3190
3191 if (type != 0)
3192 {
3193 set_mem_attributes (dest, type, 1);
3194
3195 if (flag_optimize_sibling_calls)
3196 /* Function incoming arguments may overlap with sibling call
3197 outgoing arguments and we cannot allow reordering of reads
3198 from function arguments with stores to outgoing arguments
3199 of sibling calls. */
3200 set_mem_alias_set (dest, 0);
3201 }
3202 emit_move_insn (dest, x);
3203 }
3204 #endif
3205
3206 /* Generate code to push X onto the stack, assuming it has mode MODE and
3207 type TYPE.
3208 MODE is redundant except when X is a CONST_INT (since they don't
3209 carry mode info).
3210 SIZE is an rtx for the size of data to be copied (in bytes),
3211 needed only if X is BLKmode.
3212
3213 ALIGN (in bits) is maximum alignment we can assume.
3214
3215 If PARTIAL and REG are both nonzero, then copy that many of the first
3216 words of X into registers starting with REG, and push the rest of X.
3217 The amount of space pushed is decreased by PARTIAL words,
3218 rounded *down* to a multiple of PARM_BOUNDARY.
3219 REG must be a hard register in this case.
3220 If REG is zero but PARTIAL is not, take any all others actions for an
3221 argument partially in registers, but do not actually load any
3222 registers.
3223
3224 EXTRA is the amount in bytes of extra space to leave next to this arg.
3225 This is ignored if an argument block has already been allocated.
3226
3227 On a machine that lacks real push insns, ARGS_ADDR is the address of
3228 the bottom of the argument block for this call. We use indexing off there
3229 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3230 argument block has not been preallocated.
3231
3232 ARGS_SO_FAR is the size of args previously pushed for this call.
3233
3234 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3235 for arguments passed in registers. If nonzero, it will be the number
3236 of bytes required. */
3237
3238 void
3239 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3240 args_addr, args_so_far, reg_parm_stack_space,
3241 alignment_pad)
3242 rtx x;
3243 enum machine_mode mode;
3244 tree type;
3245 rtx size;
3246 unsigned int align;
3247 int partial;
3248 rtx reg;
3249 int extra;
3250 rtx args_addr;
3251 rtx args_so_far;
3252 int reg_parm_stack_space;
3253 rtx alignment_pad;
3254 {
3255 rtx xinner;
3256 enum direction stack_direction
3257 #ifdef STACK_GROWS_DOWNWARD
3258 = downward;
3259 #else
3260 = upward;
3261 #endif
3262
3263 /* Decide where to pad the argument: `downward' for below,
3264 `upward' for above, or `none' for don't pad it.
3265 Default is below for small data on big-endian machines; else above. */
3266 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3267
3268 /* Invert direction if stack is post-decrement.
3269 FIXME: why? */
3270 if (STACK_PUSH_CODE == POST_DEC)
3271 if (where_pad != none)
3272 where_pad = (where_pad == downward ? upward : downward);
3273
3274 xinner = x = protect_from_queue (x, 0);
3275
3276 if (mode == BLKmode)
3277 {
3278 /* Copy a block into the stack, entirely or partially. */
3279
3280 rtx temp;
3281 int used = partial * UNITS_PER_WORD;
3282 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3283 int skip;
3284
3285 if (size == 0)
3286 abort ();
3287
3288 used -= offset;
3289
3290 /* USED is now the # of bytes we need not copy to the stack
3291 because registers will take care of them. */
3292
3293 if (partial != 0)
3294 xinner = adjust_address (xinner, BLKmode, used);
3295
3296 /* If the partial register-part of the arg counts in its stack size,
3297 skip the part of stack space corresponding to the registers.
3298 Otherwise, start copying to the beginning of the stack space,
3299 by setting SKIP to 0. */
3300 skip = (reg_parm_stack_space == 0) ? 0 : used;
3301
3302 #ifdef PUSH_ROUNDING
3303 /* Do it with several push insns if that doesn't take lots of insns
3304 and if there is no difficulty with push insns that skip bytes
3305 on the stack for alignment purposes. */
3306 if (args_addr == 0
3307 && PUSH_ARGS
3308 && GET_CODE (size) == CONST_INT
3309 && skip == 0
3310 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3311 /* Here we avoid the case of a structure whose weak alignment
3312 forces many pushes of a small amount of data,
3313 and such small pushes do rounding that causes trouble. */
3314 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3315 || align >= BIGGEST_ALIGNMENT
3316 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3317 == (align / BITS_PER_UNIT)))
3318 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3319 {
3320 /* Push padding now if padding above and stack grows down,
3321 or if padding below and stack grows up.
3322 But if space already allocated, this has already been done. */
3323 if (extra && args_addr == 0
3324 && where_pad != none && where_pad != stack_direction)
3325 anti_adjust_stack (GEN_INT (extra));
3326
3327 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3328
3329 if (current_function_check_memory_usage && ! in_check_memory_usage)
3330 {
3331 rtx temp;
3332
3333 in_check_memory_usage = 1;
3334 temp = get_push_address (INTVAL (size) - used);
3335 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3336 emit_library_call (chkr_copy_bitmap_libfunc,
3337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3338 Pmode, XEXP (xinner, 0), Pmode,
3339 GEN_INT (INTVAL (size) - used),
3340 TYPE_MODE (sizetype));
3341 else
3342 emit_library_call (chkr_set_right_libfunc,
3343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3344 Pmode, GEN_INT (INTVAL (size) - used),
3345 TYPE_MODE (sizetype),
3346 GEN_INT (MEMORY_USE_RW),
3347 TYPE_MODE (integer_type_node));
3348 in_check_memory_usage = 0;
3349 }
3350 }
3351 else
3352 #endif /* PUSH_ROUNDING */
3353 {
3354 rtx target;
3355
3356 /* Otherwise make space on the stack and copy the data
3357 to the address of that space. */
3358
3359 /* Deduct words put into registers from the size we must copy. */
3360 if (partial != 0)
3361 {
3362 if (GET_CODE (size) == CONST_INT)
3363 size = GEN_INT (INTVAL (size) - used);
3364 else
3365 size = expand_binop (GET_MODE (size), sub_optab, size,
3366 GEN_INT (used), NULL_RTX, 0,
3367 OPTAB_LIB_WIDEN);
3368 }
3369
3370 /* Get the address of the stack space.
3371 In this case, we do not deal with EXTRA separately.
3372 A single stack adjust will do. */
3373 if (! args_addr)
3374 {
3375 temp = push_block (size, extra, where_pad == downward);
3376 extra = 0;
3377 }
3378 else if (GET_CODE (args_so_far) == CONST_INT)
3379 temp = memory_address (BLKmode,
3380 plus_constant (args_addr,
3381 skip + INTVAL (args_so_far)));
3382 else
3383 temp = memory_address (BLKmode,
3384 plus_constant (gen_rtx_PLUS (Pmode,
3385 args_addr,
3386 args_so_far),
3387 skip));
3388 if (current_function_check_memory_usage && ! in_check_memory_usage)
3389 {
3390 in_check_memory_usage = 1;
3391 target = copy_to_reg (temp);
3392 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3393 emit_library_call (chkr_copy_bitmap_libfunc,
3394 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3395 target, Pmode,
3396 XEXP (xinner, 0), Pmode,
3397 size, TYPE_MODE (sizetype));
3398 else
3399 emit_library_call (chkr_set_right_libfunc,
3400 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3401 target, Pmode,
3402 size, TYPE_MODE (sizetype),
3403 GEN_INT (MEMORY_USE_RW),
3404 TYPE_MODE (integer_type_node));
3405 in_check_memory_usage = 0;
3406 }
3407
3408 target = gen_rtx_MEM (BLKmode, temp);
3409
3410 if (type != 0)
3411 {
3412 set_mem_attributes (target, type, 1);
3413 /* Function incoming arguments may overlap with sibling call
3414 outgoing arguments and we cannot allow reordering of reads
3415 from function arguments with stores to outgoing arguments
3416 of sibling calls. */
3417 set_mem_alias_set (target, 0);
3418 }
3419 else
3420 set_mem_align (target, align);
3421
3422 /* TEMP is the address of the block. Copy the data there. */
3423 if (GET_CODE (size) == CONST_INT
3424 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3425 {
3426 move_by_pieces (target, xinner, INTVAL (size), align);
3427 goto ret;
3428 }
3429 else
3430 {
3431 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3432 enum machine_mode mode;
3433
3434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3435 mode != VOIDmode;
3436 mode = GET_MODE_WIDER_MODE (mode))
3437 {
3438 enum insn_code code = movstr_optab[(int) mode];
3439 insn_operand_predicate_fn pred;
3440
3441 if (code != CODE_FOR_nothing
3442 && ((GET_CODE (size) == CONST_INT
3443 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3444 <= (GET_MODE_MASK (mode) >> 1)))
3445 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3446 && (!(pred = insn_data[(int) code].operand[0].predicate)
3447 || ((*pred) (target, BLKmode)))
3448 && (!(pred = insn_data[(int) code].operand[1].predicate)
3449 || ((*pred) (xinner, BLKmode)))
3450 && (!(pred = insn_data[(int) code].operand[3].predicate)
3451 || ((*pred) (opalign, VOIDmode))))
3452 {
3453 rtx op2 = convert_to_mode (mode, size, 1);
3454 rtx last = get_last_insn ();
3455 rtx pat;
3456
3457 pred = insn_data[(int) code].operand[2].predicate;
3458 if (pred != 0 && ! (*pred) (op2, mode))
3459 op2 = copy_to_mode_reg (mode, op2);
3460
3461 pat = GEN_FCN ((int) code) (target, xinner,
3462 op2, opalign);
3463 if (pat)
3464 {
3465 emit_insn (pat);
3466 goto ret;
3467 }
3468 else
3469 delete_insns_since (last);
3470 }
3471 }
3472 }
3473
3474 if (!ACCUMULATE_OUTGOING_ARGS)
3475 {
3476 /* If the source is referenced relative to the stack pointer,
3477 copy it to another register to stabilize it. We do not need
3478 to do this if we know that we won't be changing sp. */
3479
3480 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3481 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3482 temp = copy_to_reg (temp);
3483 }
3484
3485 /* Make inhibit_defer_pop nonzero around the library call
3486 to force it to pop the bcopy-arguments right away. */
3487 NO_DEFER_POP;
3488 #ifdef TARGET_MEM_FUNCTIONS
3489 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3490 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3491 convert_to_mode (TYPE_MODE (sizetype),
3492 size, TREE_UNSIGNED (sizetype)),
3493 TYPE_MODE (sizetype));
3494 #else
3495 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3496 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3497 convert_to_mode (TYPE_MODE (integer_type_node),
3498 size,
3499 TREE_UNSIGNED (integer_type_node)),
3500 TYPE_MODE (integer_type_node));
3501 #endif
3502 OK_DEFER_POP;
3503 }
3504 }
3505 else if (partial > 0)
3506 {
3507 /* Scalar partly in registers. */
3508
3509 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3510 int i;
3511 int not_stack;
3512 /* # words of start of argument
3513 that we must make space for but need not store. */
3514 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3515 int args_offset = INTVAL (args_so_far);
3516 int skip;
3517
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra && args_addr == 0
3522 && where_pad != none && where_pad != stack_direction)
3523 anti_adjust_stack (GEN_INT (extra));
3524
3525 /* If we make space by pushing it, we might as well push
3526 the real data. Otherwise, we can leave OFFSET nonzero
3527 and leave the space uninitialized. */
3528 if (args_addr == 0)
3529 offset = 0;
3530
3531 /* Now NOT_STACK gets the number of words that we don't need to
3532 allocate on the stack. */
3533 not_stack = partial - offset;
3534
3535 /* If the partial register-part of the arg counts in its stack size,
3536 skip the part of stack space corresponding to the registers.
3537 Otherwise, start copying to the beginning of the stack space,
3538 by setting SKIP to 0. */
3539 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3540
3541 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3542 x = validize_mem (force_const_mem (mode, x));
3543
3544 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3545 SUBREGs of such registers are not allowed. */
3546 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3547 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3548 x = copy_to_reg (x);
3549
3550 /* Loop over all the words allocated on the stack for this arg. */
3551 /* We can do it by words, because any scalar bigger than a word
3552 has a size a multiple of a word. */
3553 #ifndef PUSH_ARGS_REVERSED
3554 for (i = not_stack; i < size; i++)
3555 #else
3556 for (i = size - 1; i >= not_stack; i--)
3557 #endif
3558 if (i >= not_stack + offset)
3559 emit_push_insn (operand_subword_force (x, i, mode),
3560 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3561 0, args_addr,
3562 GEN_INT (args_offset + ((i - not_stack + skip)
3563 * UNITS_PER_WORD)),
3564 reg_parm_stack_space, alignment_pad);
3565 }
3566 else
3567 {
3568 rtx addr;
3569 rtx target = NULL_RTX;
3570 rtx dest;
3571
3572 /* Push padding now if padding above and stack grows down,
3573 or if padding below and stack grows up.
3574 But if space already allocated, this has already been done. */
3575 if (extra && args_addr == 0
3576 && where_pad != none && where_pad != stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3578
3579 #ifdef PUSH_ROUNDING
3580 if (args_addr == 0 && PUSH_ARGS)
3581 emit_single_push_insn (mode, x, type);
3582 else
3583 #endif
3584 {
3585 if (GET_CODE (args_so_far) == CONST_INT)
3586 addr
3587 = memory_address (mode,
3588 plus_constant (args_addr,
3589 INTVAL (args_so_far)));
3590 else
3591 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3592 args_so_far));
3593 target = addr;
3594 dest = gen_rtx_MEM (mode, addr);
3595 if (type != 0)
3596 {
3597 set_mem_attributes (dest, type, 1);
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest, 0);
3603 }
3604
3605 emit_move_insn (dest, x);
3606 }
3607
3608 if (current_function_check_memory_usage && ! in_check_memory_usage)
3609 {
3610 in_check_memory_usage = 1;
3611 if (target == 0)
3612 target = get_push_address (GET_MODE_SIZE (mode));
3613
3614 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3615 emit_library_call (chkr_copy_bitmap_libfunc,
3616 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3617 Pmode, XEXP (x, 0), Pmode,
3618 GEN_INT (GET_MODE_SIZE (mode)),
3619 TYPE_MODE (sizetype));
3620 else
3621 emit_library_call (chkr_set_right_libfunc,
3622 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3623 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3624 TYPE_MODE (sizetype),
3625 GEN_INT (MEMORY_USE_RW),
3626 TYPE_MODE (integer_type_node));
3627 in_check_memory_usage = 0;
3628 }
3629 }
3630
3631 ret:
3632 /* If part should go in registers, copy that part
3633 into the appropriate registers. Do this now, at the end,
3634 since mem-to-mem copies above may do function calls. */
3635 if (partial > 0 && reg != 0)
3636 {
3637 /* Handle calls that pass values in multiple non-contiguous locations.
3638 The Irix 6 ABI has examples of this. */
3639 if (GET_CODE (reg) == PARALLEL)
3640 emit_group_load (reg, x, -1); /* ??? size? */
3641 else
3642 move_block_to_reg (REGNO (reg), x, partial, mode);
3643 }
3644
3645 if (extra && args_addr == 0 && where_pad == stack_direction)
3646 anti_adjust_stack (GEN_INT (extra));
3647
3648 if (alignment_pad && args_addr == 0)
3649 anti_adjust_stack (alignment_pad);
3650 }
3651 \f
3652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3653 operations. */
3654
3655 static rtx
3656 get_subtarget (x)
3657 rtx x;
3658 {
3659 return ((x == 0
3660 /* Only registers can be subtargets. */
3661 || GET_CODE (x) != REG
3662 /* If the register is readonly, it can't be set more than once. */
3663 || RTX_UNCHANGING_P (x)
3664 /* Don't use hard regs to avoid extending their life. */
3665 || REGNO (x) < FIRST_PSEUDO_REGISTER
3666 /* Avoid subtargets inside loops,
3667 since they hide some invariant expressions. */
3668 || preserve_subexpressions_p ())
3669 ? 0 : x);
3670 }
3671
3672 /* Expand an assignment that stores the value of FROM into TO.
3673 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3674 (This may contain a QUEUED rtx;
3675 if the value is constant, this rtx is a constant.)
3676 Otherwise, the returned value is NULL_RTX.
3677
3678 SUGGEST_REG is no longer actually used.
3679 It used to mean, copy the value through a register
3680 and return that register, if that is possible.
3681 We now use WANT_VALUE to decide whether to do this. */
3682
3683 rtx
3684 expand_assignment (to, from, want_value, suggest_reg)
3685 tree to, from;
3686 int want_value;
3687 int suggest_reg ATTRIBUTE_UNUSED;
3688 {
3689 rtx to_rtx = 0;
3690 rtx result;
3691
3692 /* Don't crash if the lhs of the assignment was erroneous. */
3693
3694 if (TREE_CODE (to) == ERROR_MARK)
3695 {
3696 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3697 return want_value ? result : NULL_RTX;
3698 }
3699
3700 /* Assignment of a structure component needs special treatment
3701 if the structure component's rtx is not simply a MEM.
3702 Assignment of an array element at a constant index, and assignment of
3703 an array element in an unaligned packed structure field, has the same
3704 problem. */
3705
3706 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3707 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3708 {
3709 enum machine_mode mode1;
3710 HOST_WIDE_INT bitsize, bitpos;
3711 rtx orig_to_rtx;
3712 tree offset;
3713 int unsignedp;
3714 int volatilep = 0;
3715 tree tem;
3716
3717 push_temp_slots ();
3718 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3719 &unsignedp, &volatilep);
3720
3721 /* If we are going to use store_bit_field and extract_bit_field,
3722 make sure to_rtx will be safe for multiple use. */
3723
3724 if (mode1 == VOIDmode && want_value)
3725 tem = stabilize_reference (tem);
3726
3727 orig_to_rtx = to_rtx
3728 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3729 if (offset != 0)
3730 {
3731 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3732
3733 if (GET_CODE (to_rtx) != MEM)
3734 abort ();
3735
3736 if (GET_MODE (offset_rtx) != ptr_mode)
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3738
3739 #ifdef POINTERS_EXTEND_UNSIGNED
3740 if (GET_MODE (offset_rtx) != Pmode)
3741 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3742 #endif
3743
3744 /* A constant address in TO_RTX can have VOIDmode, we must not try
3745 to call force_reg for that case. Avoid that case. */
3746 if (GET_CODE (to_rtx) == MEM
3747 && GET_MODE (to_rtx) == BLKmode
3748 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3749 && bitsize > 0
3750 && (bitpos % bitsize) == 0
3751 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3752 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3753 {
3754 rtx temp
3755 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3756
3757 if (GET_CODE (XEXP (temp, 0)) == REG)
3758 to_rtx = temp;
3759 else
3760 to_rtx = (replace_equiv_address
3761 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3762 XEXP (temp, 0))));
3763 bitpos = 0;
3764 }
3765
3766 to_rtx = offset_address (to_rtx, offset_rtx,
3767 highest_pow2_factor (offset));
3768 }
3769
3770 if (GET_CODE (to_rtx) == MEM)
3771 {
3772 tree old_expr = MEM_EXPR (to_rtx);
3773
3774 /* If the field is at offset zero, we could have been given the
3775 DECL_RTX of the parent struct. Don't munge it. */
3776 to_rtx = shallow_copy_rtx (to_rtx);
3777
3778 set_mem_attributes (to_rtx, to, 0);
3779
3780 /* If we changed MEM_EXPR, that means we're now referencing
3781 the COMPONENT_REF, which means that MEM_OFFSET must be
3782 relative to that field. But we've not yet reflected BITPOS
3783 in TO_RTX. This will be done in store_field. Adjust for
3784 that by biasing MEM_OFFSET by -bitpos. */
3785 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3786 && (bitpos / BITS_PER_UNIT) != 0)
3787 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3788 - (bitpos / BITS_PER_UNIT)));
3789 }
3790
3791 /* Deal with volatile and readonly fields. The former is only done
3792 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3793 if (volatilep && GET_CODE (to_rtx) == MEM)
3794 {
3795 if (to_rtx == orig_to_rtx)
3796 to_rtx = copy_rtx (to_rtx);
3797 MEM_VOLATILE_P (to_rtx) = 1;
3798 }
3799
3800 if (TREE_CODE (to) == COMPONENT_REF
3801 && TREE_READONLY (TREE_OPERAND (to, 1)))
3802 {
3803 if (to_rtx == orig_to_rtx)
3804 to_rtx = copy_rtx (to_rtx);
3805 RTX_UNCHANGING_P (to_rtx) = 1;
3806 }
3807
3808 if (! can_address_p (to))
3809 {
3810 if (to_rtx == orig_to_rtx)
3811 to_rtx = copy_rtx (to_rtx);
3812 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3813 }
3814
3815 /* Check the access. */
3816 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3817 {
3818 rtx to_addr;
3819 int size;
3820 int best_mode_size;
3821 enum machine_mode best_mode;
3822
3823 best_mode = get_best_mode (bitsize, bitpos,
3824 TYPE_ALIGN (TREE_TYPE (tem)),
3825 mode1, volatilep);
3826 if (best_mode == VOIDmode)
3827 best_mode = QImode;
3828
3829 best_mode_size = GET_MODE_BITSIZE (best_mode);
3830 to_addr = plus_constant (XEXP (to_rtx, 0), bitpos / BITS_PER_UNIT);
3831 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3832 size *= GET_MODE_SIZE (best_mode);
3833
3834 /* Check the access right of the pointer. */
3835 in_check_memory_usage = 1;
3836 if (size)
3837 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3838 VOIDmode, 3, to_addr, Pmode,
3839 GEN_INT (size), TYPE_MODE (sizetype),
3840 GEN_INT (MEMORY_USE_WO),
3841 TYPE_MODE (integer_type_node));
3842 in_check_memory_usage = 0;
3843 }
3844
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3846 (want_value
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3850 : VOIDmode),
3851 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3852
3853 preserve_temp_slots (result);
3854 free_temp_slots ();
3855 pop_temp_slots ();
3856
3857 /* If the value is meaningful, convert RESULT to the proper mode.
3858 Otherwise, return nothing. */
3859 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3860 TYPE_MODE (TREE_TYPE (from)),
3861 result,
3862 TREE_UNSIGNED (TREE_TYPE (to)))
3863 : NULL_RTX);
3864 }
3865
3866 /* If the rhs is a function call and its value is not an aggregate,
3867 call the function before we start to compute the lhs.
3868 This is needed for correct code for cases such as
3869 val = setjmp (buf) on machines where reference to val
3870 requires loading up part of an address in a separate insn.
3871
3872 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3873 since it might be a promoted variable where the zero- or sign- extension
3874 needs to be done. Handling this in the normal way is safe because no
3875 computation is done before the call. */
3876 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3878 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3879 && GET_CODE (DECL_RTL (to)) == REG))
3880 {
3881 rtx value;
3882
3883 push_temp_slots ();
3884 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3885 if (to_rtx == 0)
3886 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3887
3888 /* Handle calls that return values in multiple non-contiguous locations.
3889 The Irix 6 ABI has examples of this. */
3890 if (GET_CODE (to_rtx) == PARALLEL)
3891 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3892 else if (GET_MODE (to_rtx) == BLKmode)
3893 emit_block_move (to_rtx, value, expr_size (from));
3894 else
3895 {
3896 #ifdef POINTERS_EXTEND_UNSIGNED
3897 if (POINTER_TYPE_P (TREE_TYPE (to))
3898 && GET_MODE (to_rtx) != GET_MODE (value))
3899 value = convert_memory_address (GET_MODE (to_rtx), value);
3900 #endif
3901 emit_move_insn (to_rtx, value);
3902 }
3903 preserve_temp_slots (to_rtx);
3904 free_temp_slots ();
3905 pop_temp_slots ();
3906 return want_value ? to_rtx : NULL_RTX;
3907 }
3908
3909 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3910 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3911
3912 if (to_rtx == 0)
3913 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3914
3915 /* Don't move directly into a return register. */
3916 if (TREE_CODE (to) == RESULT_DECL
3917 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3918 {
3919 rtx temp;
3920
3921 push_temp_slots ();
3922 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3923
3924 if (GET_CODE (to_rtx) == PARALLEL)
3925 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3926 else
3927 emit_move_insn (to_rtx, temp);
3928
3929 preserve_temp_slots (to_rtx);
3930 free_temp_slots ();
3931 pop_temp_slots ();
3932 return want_value ? to_rtx : NULL_RTX;
3933 }
3934
3935 /* In case we are returning the contents of an object which overlaps
3936 the place the value is being stored, use a safe function when copying
3937 a value through a pointer into a structure value return block. */
3938 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3939 && current_function_returns_struct
3940 && !current_function_returns_pcc_struct)
3941 {
3942 rtx from_rtx, size;
3943
3944 push_temp_slots ();
3945 size = expr_size (from);
3946 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3947 EXPAND_MEMORY_USE_DONT);
3948
3949 /* Copy the rights of the bitmap. */
3950 if (current_function_check_memory_usage)
3951 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3952 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3953 XEXP (from_rtx, 0), Pmode,
3954 convert_to_mode (TYPE_MODE (sizetype),
3955 size, TREE_UNSIGNED (sizetype)),
3956 TYPE_MODE (sizetype));
3957
3958 #ifdef TARGET_MEM_FUNCTIONS
3959 emit_library_call (memmove_libfunc, LCT_NORMAL,
3960 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3961 XEXP (from_rtx, 0), Pmode,
3962 convert_to_mode (TYPE_MODE (sizetype),
3963 size, TREE_UNSIGNED (sizetype)),
3964 TYPE_MODE (sizetype));
3965 #else
3966 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3967 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3968 XEXP (to_rtx, 0), Pmode,
3969 convert_to_mode (TYPE_MODE (integer_type_node),
3970 size, TREE_UNSIGNED (integer_type_node)),
3971 TYPE_MODE (integer_type_node));
3972 #endif
3973
3974 preserve_temp_slots (to_rtx);
3975 free_temp_slots ();
3976 pop_temp_slots ();
3977 return want_value ? to_rtx : NULL_RTX;
3978 }
3979
3980 /* Compute FROM and store the value in the rtx we got. */
3981
3982 push_temp_slots ();
3983 result = store_expr (from, to_rtx, want_value);
3984 preserve_temp_slots (result);
3985 free_temp_slots ();
3986 pop_temp_slots ();
3987 return want_value ? result : NULL_RTX;
3988 }
3989
3990 /* Generate code for computing expression EXP,
3991 and storing the value into TARGET.
3992 TARGET may contain a QUEUED rtx.
3993
3994 If WANT_VALUE is nonzero, return a copy of the value
3995 not in TARGET, so that we can be sure to use the proper
3996 value in a containing expression even if TARGET has something
3997 else stored in it. If possible, we copy the value through a pseudo
3998 and return that pseudo. Or, if the value is constant, we try to
3999 return the constant. In some cases, we return a pseudo
4000 copied *from* TARGET.
4001
4002 If the mode is BLKmode then we may return TARGET itself.
4003 It turns out that in BLKmode it doesn't cause a problem.
4004 because C has no operators that could combine two different
4005 assignments into the same BLKmode object with different values
4006 with no sequence point. Will other languages need this to
4007 be more thorough?
4008
4009 If WANT_VALUE is 0, we return NULL, to make sure
4010 to catch quickly any cases where the caller uses the value
4011 and fails to set WANT_VALUE. */
4012
4013 rtx
4014 store_expr (exp, target, want_value)
4015 tree exp;
4016 rtx target;
4017 int want_value;
4018 {
4019 rtx temp;
4020 int dont_return_target = 0;
4021 int dont_store_target = 0;
4022
4023 if (TREE_CODE (exp) == COMPOUND_EXPR)
4024 {
4025 /* Perform first part of compound expression, then assign from second
4026 part. */
4027 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4028 emit_queue ();
4029 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4030 }
4031 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4032 {
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4037
4038 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4039
4040 emit_queue ();
4041 target = protect_from_queue (target, 1);
4042
4043 do_pending_stack_adjust ();
4044 NO_DEFER_POP;
4045 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp, 1), target, 0);
4048 end_cleanup_deferral ();
4049 emit_queue ();
4050 emit_jump_insn (gen_jump (lab2));
4051 emit_barrier ();
4052 emit_label (lab1);
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp, 2), target, 0);
4055 end_cleanup_deferral ();
4056 emit_queue ();
4057 emit_label (lab2);
4058 OK_DEFER_POP;
4059
4060 return want_value ? target : NULL_RTX;
4061 }
4062 else if (queued_subexp_p (target))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4065 {
4066 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4067 {
4068 /* Expand EXP into a new pseudo. */
4069 temp = gen_reg_rtx (GET_MODE (target));
4070 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4071 }
4072 else
4073 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4074
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target) && want_value)
4079 dont_return_target = 1;
4080 }
4081 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4082 && GET_MODE (target) != BLKmode)
4083 /* If target is in memory and caller wants value in a register instead,
4084 arrange that. Pass TARGET as target for expand_expr so that,
4085 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4086 We know expand_expr will not use the target in that case.
4087 Don't do this if TARGET is volatile because we are supposed
4088 to write it and then read it. */
4089 {
4090 temp = expand_expr (exp, target, GET_MODE (target), 0);
4091 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4092 {
4093 /* If TEMP is already in the desired TARGET, only copy it from
4094 memory and don't store it there again. */
4095 if (temp == target
4096 || (rtx_equal_p (temp, target)
4097 && ! side_effects_p (temp) && ! side_effects_p (target)))
4098 dont_store_target = 1;
4099 temp = copy_to_reg (temp);
4100 }
4101 dont_return_target = 1;
4102 }
4103 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4104 /* If this is an scalar in a register that is stored in a wider mode
4105 than the declared mode, compute the result into its declared mode
4106 and then convert to the wider mode. Our value is the computed
4107 expression. */
4108 {
4109 /* If we don't want a value, we can do the conversion inside EXP,
4110 which will often result in some optimizations. Do the conversion
4111 in two steps: first change the signedness, if needed, then
4112 the extend. But don't do this if the type of EXP is a subtype
4113 of something else since then the conversion might involve
4114 more than just converting modes. */
4115 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4116 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4117 {
4118 if (TREE_UNSIGNED (TREE_TYPE (exp))
4119 != SUBREG_PROMOTED_UNSIGNED_P (target))
4120 exp
4121 = convert
4122 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4123 TREE_TYPE (exp)),
4124 exp);
4125
4126 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4127 SUBREG_PROMOTED_UNSIGNED_P (target)),
4128 exp);
4129 }
4130
4131 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4132
4133 /* If TEMP is a volatile MEM and we want a result value, make
4134 the access now so it gets done only once. Likewise if
4135 it contains TARGET. */
4136 if (GET_CODE (temp) == MEM && want_value
4137 && (MEM_VOLATILE_P (temp)
4138 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4139 temp = copy_to_reg (temp);
4140
4141 /* If TEMP is a VOIDmode constant, use convert_modes to make
4142 sure that we properly convert it. */
4143 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4144 {
4145 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4146 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4147 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4148 GET_MODE (target), temp,
4149 SUBREG_PROMOTED_UNSIGNED_P (target));
4150 }
4151
4152 convert_move (SUBREG_REG (target), temp,
4153 SUBREG_PROMOTED_UNSIGNED_P (target));
4154
4155 /* If we promoted a constant, change the mode back down to match
4156 target. Otherwise, the caller might get confused by a result whose
4157 mode is larger than expected. */
4158
4159 if (want_value && GET_MODE (temp) != GET_MODE (target)
4160 && GET_MODE (temp) != VOIDmode)
4161 {
4162 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4163 SUBREG_PROMOTED_VAR_P (temp) = 1;
4164 SUBREG_PROMOTED_UNSIGNED_P (temp)
4165 = SUBREG_PROMOTED_UNSIGNED_P (target);
4166 }
4167
4168 return want_value ? temp : NULL_RTX;
4169 }
4170 else
4171 {
4172 temp = expand_expr (exp, target, GET_MODE (target), 0);
4173 /* Return TARGET if it's a specified hardware register.
4174 If TARGET is a volatile mem ref, either return TARGET
4175 or return a reg copied *from* TARGET; ANSI requires this.
4176
4177 Otherwise, if TEMP is not TARGET, return TEMP
4178 if it is constant (for efficiency),
4179 or if we really want the correct value. */
4180 if (!(target && GET_CODE (target) == REG
4181 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4182 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4183 && ! rtx_equal_p (temp, target)
4184 && (CONSTANT_P (temp) || want_value))
4185 dont_return_target = 1;
4186 }
4187
4188 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4189 the same as that of TARGET, adjust the constant. This is needed, for
4190 example, in case it is a CONST_DOUBLE and we want only a word-sized
4191 value. */
4192 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4193 && TREE_CODE (exp) != ERROR_MARK
4194 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4195 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4196 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4197
4198 if (current_function_check_memory_usage
4199 && GET_CODE (target) == MEM
4200 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4201 {
4202 in_check_memory_usage = 1;
4203 if (GET_CODE (temp) == MEM)
4204 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4205 VOIDmode, 3, XEXP (target, 0), Pmode,
4206 XEXP (temp, 0), Pmode,
4207 expr_size (exp), TYPE_MODE (sizetype));
4208 else
4209 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4210 VOIDmode, 3, XEXP (target, 0), Pmode,
4211 expr_size (exp), TYPE_MODE (sizetype),
4212 GEN_INT (MEMORY_USE_WO),
4213 TYPE_MODE (integer_type_node));
4214 in_check_memory_usage = 0;
4215 }
4216
4217 /* If value was not generated in the target, store it there.
4218 Convert the value to TARGET's type first if nec. */
4219 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4220 one or both of them are volatile memory refs, we have to distinguish
4221 two cases:
4222 - expand_expr has used TARGET. In this case, we must not generate
4223 another copy. This can be detected by TARGET being equal according
4224 to == .
4225 - expand_expr has not used TARGET - that means that the source just
4226 happens to have the same RTX form. Since temp will have been created
4227 by expand_expr, it will compare unequal according to == .
4228 We must generate a copy in this case, to reach the correct number
4229 of volatile memory references. */
4230
4231 if ((! rtx_equal_p (temp, target)
4232 || (temp != target && (side_effects_p (temp)
4233 || side_effects_p (target))))
4234 && TREE_CODE (exp) != ERROR_MARK
4235 && ! dont_store_target)
4236 {
4237 target = protect_from_queue (target, 1);
4238 if (GET_MODE (temp) != GET_MODE (target)
4239 && GET_MODE (temp) != VOIDmode)
4240 {
4241 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4242 if (dont_return_target)
4243 {
4244 /* In this case, we will return TEMP,
4245 so make sure it has the proper mode.
4246 But don't forget to store the value into TARGET. */
4247 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4248 emit_move_insn (target, temp);
4249 }
4250 else
4251 convert_move (target, temp, unsignedp);
4252 }
4253
4254 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4255 {
4256 /* Handle copying a string constant into an array. The string
4257 constant may be shorter than the array. So copy just the string's
4258 actual length, and clear the rest. First get the size of the data
4259 type of the string, which is actually the size of the target. */
4260 rtx size = expr_size (exp);
4261
4262 if (GET_CODE (size) == CONST_INT
4263 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4264 emit_block_move (target, temp, size);
4265 else
4266 {
4267 /* Compute the size of the data to copy from the string. */
4268 tree copy_size
4269 = size_binop (MIN_EXPR,
4270 make_tree (sizetype, size),
4271 size_int (TREE_STRING_LENGTH (exp)));
4272 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4273 VOIDmode, 0);
4274 rtx label = 0;
4275
4276 /* Copy that much. */
4277 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4278 emit_block_move (target, temp, copy_size_rtx);
4279
4280 /* Figure out how much is left in TARGET that we have to clear.
4281 Do all calculations in ptr_mode. */
4282 if (GET_CODE (copy_size_rtx) == CONST_INT)
4283 {
4284 size = plus_constant (size, -INTVAL (copy_size_rtx));
4285 target = adjust_address (target, BLKmode,
4286 INTVAL (copy_size_rtx));
4287 }
4288 else
4289 {
4290 size = expand_binop (ptr_mode, sub_optab, size,
4291 copy_size_rtx, NULL_RTX, 0,
4292 OPTAB_LIB_WIDEN);
4293
4294 #ifdef POINTERS_EXTEND_UNSIGNED
4295 if (GET_MODE (copy_size_rtx) != Pmode)
4296 copy_size_rtx = convert_memory_address (Pmode,
4297 copy_size_rtx);
4298 #endif
4299
4300 target = offset_address (target, copy_size_rtx,
4301 highest_pow2_factor (copy_size));
4302 label = gen_label_rtx ();
4303 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4304 GET_MODE (size), 0, label);
4305 }
4306
4307 if (size != const0_rtx)
4308 {
4309 /* Be sure we can write on ADDR. */
4310 in_check_memory_usage = 1;
4311 if (current_function_check_memory_usage)
4312 emit_library_call (chkr_check_addr_libfunc,
4313 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4314 XEXP (target, 0), Pmode,
4315 size, TYPE_MODE (sizetype),
4316 GEN_INT (MEMORY_USE_WO),
4317 TYPE_MODE (integer_type_node));
4318 in_check_memory_usage = 0;
4319 clear_storage (target, size);
4320 }
4321
4322 if (label)
4323 emit_label (label);
4324 }
4325 }
4326 /* Handle calls that return values in multiple non-contiguous locations.
4327 The Irix 6 ABI has examples of this. */
4328 else if (GET_CODE (target) == PARALLEL)
4329 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4330 else if (GET_MODE (temp) == BLKmode)
4331 emit_block_move (target, temp, expr_size (exp));
4332 else
4333 emit_move_insn (target, temp);
4334 }
4335
4336 /* If we don't want a value, return NULL_RTX. */
4337 if (! want_value)
4338 return NULL_RTX;
4339
4340 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4341 ??? The latter test doesn't seem to make sense. */
4342 else if (dont_return_target && GET_CODE (temp) != MEM)
4343 return temp;
4344
4345 /* Return TARGET itself if it is a hard register. */
4346 else if (want_value && GET_MODE (target) != BLKmode
4347 && ! (GET_CODE (target) == REG
4348 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4349 return copy_to_reg (target);
4350
4351 else
4352 return target;
4353 }
4354 \f
4355 /* Return 1 if EXP just contains zeros. */
4356
4357 static int
4358 is_zeros_p (exp)
4359 tree exp;
4360 {
4361 tree elt;
4362
4363 switch (TREE_CODE (exp))
4364 {
4365 case CONVERT_EXPR:
4366 case NOP_EXPR:
4367 case NON_LVALUE_EXPR:
4368 case VIEW_CONVERT_EXPR:
4369 return is_zeros_p (TREE_OPERAND (exp, 0));
4370
4371 case INTEGER_CST:
4372 return integer_zerop (exp);
4373
4374 case COMPLEX_CST:
4375 return
4376 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4377
4378 case REAL_CST:
4379 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4380
4381 case CONSTRUCTOR:
4382 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4383 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4384 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4385 if (! is_zeros_p (TREE_VALUE (elt)))
4386 return 0;
4387
4388 return 1;
4389
4390 default:
4391 return 0;
4392 }
4393 }
4394
4395 /* Return 1 if EXP contains mostly (3/4) zeros. */
4396
4397 static int
4398 mostly_zeros_p (exp)
4399 tree exp;
4400 {
4401 if (TREE_CODE (exp) == CONSTRUCTOR)
4402 {
4403 int elts = 0, zeros = 0;
4404 tree elt = CONSTRUCTOR_ELTS (exp);
4405 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4406 {
4407 /* If there are no ranges of true bits, it is all zero. */
4408 return elt == NULL_TREE;
4409 }
4410 for (; elt; elt = TREE_CHAIN (elt))
4411 {
4412 /* We do not handle the case where the index is a RANGE_EXPR,
4413 so the statistic will be somewhat inaccurate.
4414 We do make a more accurate count in store_constructor itself,
4415 so since this function is only used for nested array elements,
4416 this should be close enough. */
4417 if (mostly_zeros_p (TREE_VALUE (elt)))
4418 zeros++;
4419 elts++;
4420 }
4421
4422 return 4 * zeros >= 3 * elts;
4423 }
4424
4425 return is_zeros_p (exp);
4426 }
4427 \f
4428 /* Helper function for store_constructor.
4429 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4430 TYPE is the type of the CONSTRUCTOR, not the element type.
4431 CLEARED is as for store_constructor.
4432 ALIAS_SET is the alias set to use for any stores.
4433
4434 This provides a recursive shortcut back to store_constructor when it isn't
4435 necessary to go through store_field. This is so that we can pass through
4436 the cleared field to let store_constructor know that we may not have to
4437 clear a substructure if the outer structure has already been cleared. */
4438
4439 static void
4440 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4441 alias_set)
4442 rtx target;
4443 unsigned HOST_WIDE_INT bitsize;
4444 HOST_WIDE_INT bitpos;
4445 enum machine_mode mode;
4446 tree exp, type;
4447 int cleared;
4448 int alias_set;
4449 {
4450 if (TREE_CODE (exp) == CONSTRUCTOR
4451 && bitpos % BITS_PER_UNIT == 0
4452 /* If we have a non-zero bitpos for a register target, then we just
4453 let store_field do the bitfield handling. This is unlikely to
4454 generate unnecessary clear instructions anyways. */
4455 && (bitpos == 0 || GET_CODE (target) == MEM))
4456 {
4457 if (GET_CODE (target) == MEM)
4458 target
4459 = adjust_address (target,
4460 GET_MODE (target) == BLKmode
4461 || 0 != (bitpos
4462 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4463 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4464
4465
4466 /* Update the alias set, if required. */
4467 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4468 && MEM_ALIAS_SET (target) != 0)
4469 {
4470 target = copy_rtx (target);
4471 set_mem_alias_set (target, alias_set);
4472 }
4473
4474 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4475 }
4476 else
4477 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4478 alias_set);
4479 }
4480
4481 /* Store the value of constructor EXP into the rtx TARGET.
4482 TARGET is either a REG or a MEM; we know it cannot conflict, since
4483 safe_from_p has been called.
4484 CLEARED is true if TARGET is known to have been zero'd.
4485 SIZE is the number of bytes of TARGET we are allowed to modify: this
4486 may not be the same as the size of EXP if we are assigning to a field
4487 which has been packed to exclude padding bits. */
4488
4489 static void
4490 store_constructor (exp, target, cleared, size)
4491 tree exp;
4492 rtx target;
4493 int cleared;
4494 HOST_WIDE_INT size;
4495 {
4496 tree type = TREE_TYPE (exp);
4497 #ifdef WORD_REGISTER_OPERATIONS
4498 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4499 #endif
4500
4501 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4502 || TREE_CODE (type) == QUAL_UNION_TYPE)
4503 {
4504 tree elt;
4505
4506 /* We either clear the aggregate or indicate the value is dead. */
4507 if ((TREE_CODE (type) == UNION_TYPE
4508 || TREE_CODE (type) == QUAL_UNION_TYPE)
4509 && ! cleared
4510 && ! CONSTRUCTOR_ELTS (exp))
4511 /* If the constructor is empty, clear the union. */
4512 {
4513 clear_storage (target, expr_size (exp));
4514 cleared = 1;
4515 }
4516
4517 /* If we are building a static constructor into a register,
4518 set the initial value as zero so we can fold the value into
4519 a constant. But if more than one register is involved,
4520 this probably loses. */
4521 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4522 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4523 {
4524 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4525 cleared = 1;
4526 }
4527
4528 /* If the constructor has fewer fields than the structure
4529 or if we are initializing the structure to mostly zeros,
4530 clear the whole structure first. Don't do this if TARGET is a
4531 register whose mode size isn't equal to SIZE since clear_storage
4532 can't handle this case. */
4533 else if (! cleared && size > 0
4534 && ((list_length (CONSTRUCTOR_ELTS (exp))
4535 != fields_length (type))
4536 || mostly_zeros_p (exp))
4537 && (GET_CODE (target) != REG
4538 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4539 == size)))
4540 {
4541 clear_storage (target, GEN_INT (size));
4542 cleared = 1;
4543 }
4544
4545 if (! cleared)
4546 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4547
4548 /* Store each element of the constructor into
4549 the corresponding field of TARGET. */
4550
4551 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4552 {
4553 tree field = TREE_PURPOSE (elt);
4554 tree value = TREE_VALUE (elt);
4555 enum machine_mode mode;
4556 HOST_WIDE_INT bitsize;
4557 HOST_WIDE_INT bitpos = 0;
4558 int unsignedp;
4559 tree offset;
4560 rtx to_rtx = target;
4561
4562 /* Just ignore missing fields.
4563 We cleared the whole structure, above,
4564 if any fields are missing. */
4565 if (field == 0)
4566 continue;
4567
4568 if (cleared && is_zeros_p (value))
4569 continue;
4570
4571 if (host_integerp (DECL_SIZE (field), 1))
4572 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4573 else
4574 bitsize = -1;
4575
4576 unsignedp = TREE_UNSIGNED (field);
4577 mode = DECL_MODE (field);
4578 if (DECL_BIT_FIELD (field))
4579 mode = VOIDmode;
4580
4581 offset = DECL_FIELD_OFFSET (field);
4582 if (host_integerp (offset, 0)
4583 && host_integerp (bit_position (field), 0))
4584 {
4585 bitpos = int_bit_position (field);
4586 offset = 0;
4587 }
4588 else
4589 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4590
4591 if (offset)
4592 {
4593 rtx offset_rtx;
4594
4595 if (contains_placeholder_p (offset))
4596 offset = build (WITH_RECORD_EXPR, sizetype,
4597 offset, make_tree (TREE_TYPE (exp), target));
4598
4599 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4600 if (GET_CODE (to_rtx) != MEM)
4601 abort ();
4602
4603 if (GET_MODE (offset_rtx) != ptr_mode)
4604 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4605
4606 #ifdef POINTERS_EXTEND_UNSIGNED
4607 if (GET_MODE (offset_rtx) != Pmode)
4608 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4609 #endif
4610
4611 to_rtx = offset_address (to_rtx, offset_rtx,
4612 highest_pow2_factor (offset));
4613 }
4614
4615 if (TREE_READONLY (field))
4616 {
4617 if (GET_CODE (to_rtx) == MEM)
4618 to_rtx = copy_rtx (to_rtx);
4619
4620 RTX_UNCHANGING_P (to_rtx) = 1;
4621 }
4622
4623 #ifdef WORD_REGISTER_OPERATIONS
4624 /* If this initializes a field that is smaller than a word, at the
4625 start of a word, try to widen it to a full word.
4626 This special case allows us to output C++ member function
4627 initializations in a form that the optimizers can understand. */
4628 if (GET_CODE (target) == REG
4629 && bitsize < BITS_PER_WORD
4630 && bitpos % BITS_PER_WORD == 0
4631 && GET_MODE_CLASS (mode) == MODE_INT
4632 && TREE_CODE (value) == INTEGER_CST
4633 && exp_size >= 0
4634 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4635 {
4636 tree type = TREE_TYPE (value);
4637
4638 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4639 {
4640 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4641 value = convert (type, value);
4642 }
4643
4644 if (BYTES_BIG_ENDIAN)
4645 value
4646 = fold (build (LSHIFT_EXPR, type, value,
4647 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4648 bitsize = BITS_PER_WORD;
4649 mode = word_mode;
4650 }
4651 #endif
4652
4653 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4654 && DECL_NONADDRESSABLE_P (field))
4655 {
4656 to_rtx = copy_rtx (to_rtx);
4657 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4658 }
4659
4660 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4661 value, type, cleared,
4662 get_alias_set (TREE_TYPE (field)));
4663 }
4664 }
4665 else if (TREE_CODE (type) == ARRAY_TYPE)
4666 {
4667 tree elt;
4668 int i;
4669 int need_to_clear;
4670 tree domain = TYPE_DOMAIN (type);
4671 tree elttype = TREE_TYPE (type);
4672 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4673 && TYPE_MAX_VALUE (domain)
4674 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4675 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4676 HOST_WIDE_INT minelt = 0;
4677 HOST_WIDE_INT maxelt = 0;
4678
4679 /* If we have constant bounds for the range of the type, get them. */
4680 if (const_bounds_p)
4681 {
4682 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4683 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4684 }
4685
4686 /* If the constructor has fewer elements than the array,
4687 clear the whole array first. Similarly if this is
4688 static constructor of a non-BLKmode object. */
4689 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4690 need_to_clear = 1;
4691 else
4692 {
4693 HOST_WIDE_INT count = 0, zero_count = 0;
4694 need_to_clear = ! const_bounds_p;
4695
4696 /* This loop is a more accurate version of the loop in
4697 mostly_zeros_p (it handles RANGE_EXPR in an index).
4698 It is also needed to check for missing elements. */
4699 for (elt = CONSTRUCTOR_ELTS (exp);
4700 elt != NULL_TREE && ! need_to_clear;
4701 elt = TREE_CHAIN (elt))
4702 {
4703 tree index = TREE_PURPOSE (elt);
4704 HOST_WIDE_INT this_node_count;
4705
4706 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4707 {
4708 tree lo_index = TREE_OPERAND (index, 0);
4709 tree hi_index = TREE_OPERAND (index, 1);
4710
4711 if (! host_integerp (lo_index, 1)
4712 || ! host_integerp (hi_index, 1))
4713 {
4714 need_to_clear = 1;
4715 break;
4716 }
4717
4718 this_node_count = (tree_low_cst (hi_index, 1)
4719 - tree_low_cst (lo_index, 1) + 1);
4720 }
4721 else
4722 this_node_count = 1;
4723
4724 count += this_node_count;
4725 if (mostly_zeros_p (TREE_VALUE (elt)))
4726 zero_count += this_node_count;
4727 }
4728
4729 /* Clear the entire array first if there are any missing elements,
4730 or if the incidence of zero elements is >= 75%. */
4731 if (! need_to_clear
4732 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4733 need_to_clear = 1;
4734 }
4735
4736 if (need_to_clear && size > 0)
4737 {
4738 if (! cleared)
4739 clear_storage (target, GEN_INT (size));
4740 cleared = 1;
4741 }
4742 else if (REG_P (target))
4743 /* Inform later passes that the old value is dead. */
4744 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4745
4746 /* Store each element of the constructor into
4747 the corresponding element of TARGET, determined
4748 by counting the elements. */
4749 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4750 elt;
4751 elt = TREE_CHAIN (elt), i++)
4752 {
4753 enum machine_mode mode;
4754 HOST_WIDE_INT bitsize;
4755 HOST_WIDE_INT bitpos;
4756 int unsignedp;
4757 tree value = TREE_VALUE (elt);
4758 tree index = TREE_PURPOSE (elt);
4759 rtx xtarget = target;
4760
4761 if (cleared && is_zeros_p (value))
4762 continue;
4763
4764 unsignedp = TREE_UNSIGNED (elttype);
4765 mode = TYPE_MODE (elttype);
4766 if (mode == BLKmode)
4767 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4768 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4769 : -1);
4770 else
4771 bitsize = GET_MODE_BITSIZE (mode);
4772
4773 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4774 {
4775 tree lo_index = TREE_OPERAND (index, 0);
4776 tree hi_index = TREE_OPERAND (index, 1);
4777 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4778 struct nesting *loop;
4779 HOST_WIDE_INT lo, hi, count;
4780 tree position;
4781
4782 /* If the range is constant and "small", unroll the loop. */
4783 if (const_bounds_p
4784 && host_integerp (lo_index, 0)
4785 && host_integerp (hi_index, 0)
4786 && (lo = tree_low_cst (lo_index, 0),
4787 hi = tree_low_cst (hi_index, 0),
4788 count = hi - lo + 1,
4789 (GET_CODE (target) != MEM
4790 || count <= 2
4791 || (host_integerp (TYPE_SIZE (elttype), 1)
4792 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4793 <= 40 * 8)))))
4794 {
4795 lo -= minelt; hi -= minelt;
4796 for (; lo <= hi; lo++)
4797 {
4798 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4799
4800 if (GET_CODE (target) == MEM
4801 && !MEM_KEEP_ALIAS_SET_P (target)
4802 && TYPE_NONALIASED_COMPONENT (type))
4803 {
4804 target = copy_rtx (target);
4805 MEM_KEEP_ALIAS_SET_P (target) = 1;
4806 }
4807
4808 store_constructor_field
4809 (target, bitsize, bitpos, mode, value, type, cleared,
4810 get_alias_set (elttype));
4811 }
4812 }
4813 else
4814 {
4815 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4816 loop_top = gen_label_rtx ();
4817 loop_end = gen_label_rtx ();
4818
4819 unsignedp = TREE_UNSIGNED (domain);
4820
4821 index = build_decl (VAR_DECL, NULL_TREE, domain);
4822
4823 index_r
4824 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4825 &unsignedp, 0));
4826 SET_DECL_RTL (index, index_r);
4827 if (TREE_CODE (value) == SAVE_EXPR
4828 && SAVE_EXPR_RTL (value) == 0)
4829 {
4830 /* Make sure value gets expanded once before the
4831 loop. */
4832 expand_expr (value, const0_rtx, VOIDmode, 0);
4833 emit_queue ();
4834 }
4835 store_expr (lo_index, index_r, 0);
4836 loop = expand_start_loop (0);
4837
4838 /* Assign value to element index. */
4839 position
4840 = convert (ssizetype,
4841 fold (build (MINUS_EXPR, TREE_TYPE (index),
4842 index, TYPE_MIN_VALUE (domain))));
4843 position = size_binop (MULT_EXPR, position,
4844 convert (ssizetype,
4845 TYPE_SIZE_UNIT (elttype)));
4846
4847 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4848 xtarget = offset_address (target, pos_rtx,
4849 highest_pow2_factor (position));
4850 xtarget = adjust_address (xtarget, mode, 0);
4851 if (TREE_CODE (value) == CONSTRUCTOR)
4852 store_constructor (value, xtarget, cleared,
4853 bitsize / BITS_PER_UNIT);
4854 else
4855 store_expr (value, xtarget, 0);
4856
4857 expand_exit_loop_if_false (loop,
4858 build (LT_EXPR, integer_type_node,
4859 index, hi_index));
4860
4861 expand_increment (build (PREINCREMENT_EXPR,
4862 TREE_TYPE (index),
4863 index, integer_one_node), 0, 0);
4864 expand_end_loop ();
4865 emit_label (loop_end);
4866 }
4867 }
4868 else if ((index != 0 && ! host_integerp (index, 0))
4869 || ! host_integerp (TYPE_SIZE (elttype), 1))
4870 {
4871 tree position;
4872
4873 if (index == 0)
4874 index = ssize_int (1);
4875
4876 if (minelt)
4877 index = convert (ssizetype,
4878 fold (build (MINUS_EXPR, index,
4879 TYPE_MIN_VALUE (domain))));
4880
4881 position = size_binop (MULT_EXPR, index,
4882 convert (ssizetype,
4883 TYPE_SIZE_UNIT (elttype)));
4884 xtarget = offset_address (target,
4885 expand_expr (position, 0, VOIDmode, 0),
4886 highest_pow2_factor (position));
4887 xtarget = adjust_address (xtarget, mode, 0);
4888 store_expr (value, xtarget, 0);
4889 }
4890 else
4891 {
4892 if (index != 0)
4893 bitpos = ((tree_low_cst (index, 0) - minelt)
4894 * tree_low_cst (TYPE_SIZE (elttype), 1));
4895 else
4896 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4897
4898 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4899 && TYPE_NONALIASED_COMPONENT (type))
4900 {
4901 target = copy_rtx (target);
4902 MEM_KEEP_ALIAS_SET_P (target) = 1;
4903 }
4904
4905 store_constructor_field (target, bitsize, bitpos, mode, value,
4906 type, cleared, get_alias_set (elttype));
4907
4908 }
4909 }
4910 }
4911
4912 /* Set constructor assignments. */
4913 else if (TREE_CODE (type) == SET_TYPE)
4914 {
4915 tree elt = CONSTRUCTOR_ELTS (exp);
4916 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4917 tree domain = TYPE_DOMAIN (type);
4918 tree domain_min, domain_max, bitlength;
4919
4920 /* The default implementation strategy is to extract the constant
4921 parts of the constructor, use that to initialize the target,
4922 and then "or" in whatever non-constant ranges we need in addition.
4923
4924 If a large set is all zero or all ones, it is
4925 probably better to set it using memset (if available) or bzero.
4926 Also, if a large set has just a single range, it may also be
4927 better to first clear all the first clear the set (using
4928 bzero/memset), and set the bits we want. */
4929
4930 /* Check for all zeros. */
4931 if (elt == NULL_TREE && size > 0)
4932 {
4933 if (!cleared)
4934 clear_storage (target, GEN_INT (size));
4935 return;
4936 }
4937
4938 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4939 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4940 bitlength = size_binop (PLUS_EXPR,
4941 size_diffop (domain_max, domain_min),
4942 ssize_int (1));
4943
4944 nbits = tree_low_cst (bitlength, 1);
4945
4946 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4947 are "complicated" (more than one range), initialize (the
4948 constant parts) by copying from a constant. */
4949 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4950 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4951 {
4952 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4953 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4954 char *bit_buffer = (char *) alloca (nbits);
4955 HOST_WIDE_INT word = 0;
4956 unsigned int bit_pos = 0;
4957 unsigned int ibit = 0;
4958 unsigned int offset = 0; /* In bytes from beginning of set. */
4959
4960 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4961 for (;;)
4962 {
4963 if (bit_buffer[ibit])
4964 {
4965 if (BYTES_BIG_ENDIAN)
4966 word |= (1 << (set_word_size - 1 - bit_pos));
4967 else
4968 word |= 1 << bit_pos;
4969 }
4970
4971 bit_pos++; ibit++;
4972 if (bit_pos >= set_word_size || ibit == nbits)
4973 {
4974 if (word != 0 || ! cleared)
4975 {
4976 rtx datum = GEN_INT (word);
4977 rtx to_rtx;
4978
4979 /* The assumption here is that it is safe to use
4980 XEXP if the set is multi-word, but not if
4981 it's single-word. */
4982 if (GET_CODE (target) == MEM)
4983 to_rtx = adjust_address (target, mode, offset);
4984 else if (offset == 0)
4985 to_rtx = target;
4986 else
4987 abort ();
4988 emit_move_insn (to_rtx, datum);
4989 }
4990
4991 if (ibit == nbits)
4992 break;
4993 word = 0;
4994 bit_pos = 0;
4995 offset += set_word_size / BITS_PER_UNIT;
4996 }
4997 }
4998 }
4999 else if (!cleared)
5000 /* Don't bother clearing storage if the set is all ones. */
5001 if (TREE_CHAIN (elt) != NULL_TREE
5002 || (TREE_PURPOSE (elt) == NULL_TREE
5003 ? nbits != 1
5004 : ( ! host_integerp (TREE_VALUE (elt), 0)
5005 || ! host_integerp (TREE_PURPOSE (elt), 0)
5006 || (tree_low_cst (TREE_VALUE (elt), 0)
5007 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5008 != (HOST_WIDE_INT) nbits))))
5009 clear_storage (target, expr_size (exp));
5010
5011 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5012 {
5013 /* Start of range of element or NULL. */
5014 tree startbit = TREE_PURPOSE (elt);
5015 /* End of range of element, or element value. */
5016 tree endbit = TREE_VALUE (elt);
5017 #ifdef TARGET_MEM_FUNCTIONS
5018 HOST_WIDE_INT startb, endb;
5019 #endif
5020 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5021
5022 bitlength_rtx = expand_expr (bitlength,
5023 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5024
5025 /* Handle non-range tuple element like [ expr ]. */
5026 if (startbit == NULL_TREE)
5027 {
5028 startbit = save_expr (endbit);
5029 endbit = startbit;
5030 }
5031
5032 startbit = convert (sizetype, startbit);
5033 endbit = convert (sizetype, endbit);
5034 if (! integer_zerop (domain_min))
5035 {
5036 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5037 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5038 }
5039 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5040 EXPAND_CONST_ADDRESS);
5041 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5042 EXPAND_CONST_ADDRESS);
5043
5044 if (REG_P (target))
5045 {
5046 targetx
5047 = assign_temp
5048 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5049 TYPE_QUAL_CONST)),
5050 0, 1, 1);
5051 emit_move_insn (targetx, target);
5052 }
5053
5054 else if (GET_CODE (target) == MEM)
5055 targetx = target;
5056 else
5057 abort ();
5058
5059 #ifdef TARGET_MEM_FUNCTIONS
5060 /* Optimization: If startbit and endbit are
5061 constants divisible by BITS_PER_UNIT,
5062 call memset instead. */
5063 if (TREE_CODE (startbit) == INTEGER_CST
5064 && TREE_CODE (endbit) == INTEGER_CST
5065 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5066 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5067 {
5068 emit_library_call (memset_libfunc, LCT_NORMAL,
5069 VOIDmode, 3,
5070 plus_constant (XEXP (targetx, 0),
5071 startb / BITS_PER_UNIT),
5072 Pmode,
5073 constm1_rtx, TYPE_MODE (integer_type_node),
5074 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5075 TYPE_MODE (sizetype));
5076 }
5077 else
5078 #endif
5079 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5080 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5081 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5082 startbit_rtx, TYPE_MODE (sizetype),
5083 endbit_rtx, TYPE_MODE (sizetype));
5084
5085 if (REG_P (target))
5086 emit_move_insn (target, targetx);
5087 }
5088 }
5089
5090 else
5091 abort ();
5092 }
5093
5094 /* Store the value of EXP (an expression tree)
5095 into a subfield of TARGET which has mode MODE and occupies
5096 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5097 If MODE is VOIDmode, it means that we are storing into a bit-field.
5098
5099 If VALUE_MODE is VOIDmode, return nothing in particular.
5100 UNSIGNEDP is not used in this case.
5101
5102 Otherwise, return an rtx for the value stored. This rtx
5103 has mode VALUE_MODE if that is convenient to do.
5104 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5105
5106 TYPE is the type of the underlying object,
5107
5108 ALIAS_SET is the alias set for the destination. This value will
5109 (in general) be different from that for TARGET, since TARGET is a
5110 reference to the containing structure. */
5111
5112 static rtx
5113 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5114 alias_set)
5115 rtx target;
5116 HOST_WIDE_INT bitsize;
5117 HOST_WIDE_INT bitpos;
5118 enum machine_mode mode;
5119 tree exp;
5120 enum machine_mode value_mode;
5121 int unsignedp;
5122 tree type;
5123 int alias_set;
5124 {
5125 HOST_WIDE_INT width_mask = 0;
5126
5127 if (TREE_CODE (exp) == ERROR_MARK)
5128 return const0_rtx;
5129
5130 /* If we have nothing to store, do nothing unless the expression has
5131 side-effects. */
5132 if (bitsize == 0)
5133 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5134 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5135 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5136
5137 /* If we are storing into an unaligned field of an aligned union that is
5138 in a register, we may have the mode of TARGET being an integer mode but
5139 MODE == BLKmode. In that case, get an aligned object whose size and
5140 alignment are the same as TARGET and store TARGET into it (we can avoid
5141 the store if the field being stored is the entire width of TARGET). Then
5142 call ourselves recursively to store the field into a BLKmode version of
5143 that object. Finally, load from the object into TARGET. This is not
5144 very efficient in general, but should only be slightly more expensive
5145 than the otherwise-required unaligned accesses. Perhaps this can be
5146 cleaned up later. */
5147
5148 if (mode == BLKmode
5149 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5150 {
5151 rtx object
5152 = assign_temp
5153 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5154 0, 1, 1);
5155 rtx blk_object = copy_rtx (object);
5156
5157 PUT_MODE (blk_object, BLKmode);
5158
5159 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5160 emit_move_insn (object, target);
5161
5162 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5163 alias_set);
5164
5165 emit_move_insn (target, object);
5166
5167 /* We want to return the BLKmode version of the data. */
5168 return blk_object;
5169 }
5170
5171 if (GET_CODE (target) == CONCAT)
5172 {
5173 /* We're storing into a struct containing a single __complex. */
5174
5175 if (bitpos != 0)
5176 abort ();
5177 return store_expr (exp, target, 0);
5178 }
5179
5180 /* If the structure is in a register or if the component
5181 is a bit field, we cannot use addressing to access it.
5182 Use bit-field techniques or SUBREG to store in it. */
5183
5184 if (mode == VOIDmode
5185 || (mode != BLKmode && ! direct_store[(int) mode]
5186 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5187 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5188 || GET_CODE (target) == REG
5189 || GET_CODE (target) == SUBREG
5190 /* If the field isn't aligned enough to store as an ordinary memref,
5191 store it as a bit field. */
5192 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5193 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5194 || bitpos % GET_MODE_ALIGNMENT (mode)))
5195 /* If the RHS and field are a constant size and the size of the
5196 RHS isn't the same size as the bitfield, we must use bitfield
5197 operations. */
5198 || (bitsize >= 0
5199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5200 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5201 {
5202 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5203
5204 /* If BITSIZE is narrower than the size of the type of EXP
5205 we will be narrowing TEMP. Normally, what's wanted are the
5206 low-order bits. However, if EXP's type is a record and this is
5207 big-endian machine, we want the upper BITSIZE bits. */
5208 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5209 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5210 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5211 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5212 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5213 - bitsize),
5214 temp, 1);
5215
5216 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5217 MODE. */
5218 if (mode != VOIDmode && mode != BLKmode
5219 && mode != TYPE_MODE (TREE_TYPE (exp)))
5220 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5221
5222 /* If the modes of TARGET and TEMP are both BLKmode, both
5223 must be in memory and BITPOS must be aligned on a byte
5224 boundary. If so, we simply do a block copy. */
5225 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5226 {
5227 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5228 || bitpos % BITS_PER_UNIT != 0)
5229 abort ();
5230
5231 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5232 emit_block_move (target, temp,
5233 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5234 / BITS_PER_UNIT));
5235
5236 return value_mode == VOIDmode ? const0_rtx : target;
5237 }
5238
5239 /* Store the value in the bitfield. */
5240 store_bit_field (target, bitsize, bitpos, mode, temp,
5241 int_size_in_bytes (type));
5242
5243 if (value_mode != VOIDmode)
5244 {
5245 /* The caller wants an rtx for the value.
5246 If possible, avoid refetching from the bitfield itself. */
5247 if (width_mask != 0
5248 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5249 {
5250 tree count;
5251 enum machine_mode tmode;
5252
5253 if (unsignedp)
5254 return expand_and (temp,
5255 GEN_INT
5256 (trunc_int_for_mode
5257 (width_mask,
5258 GET_MODE (temp) == VOIDmode
5259 ? value_mode
5260 : GET_MODE (temp))), NULL_RTX);
5261
5262 tmode = GET_MODE (temp);
5263 if (tmode == VOIDmode)
5264 tmode = value_mode;
5265 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5266 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5267 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5268 }
5269
5270 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5271 NULL_RTX, value_mode, VOIDmode,
5272 int_size_in_bytes (type));
5273 }
5274 return const0_rtx;
5275 }
5276 else
5277 {
5278 rtx addr = XEXP (target, 0);
5279 rtx to_rtx = target;
5280
5281 /* If a value is wanted, it must be the lhs;
5282 so make the address stable for multiple use. */
5283
5284 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5285 && ! CONSTANT_ADDRESS_P (addr)
5286 /* A frame-pointer reference is already stable. */
5287 && ! (GET_CODE (addr) == PLUS
5288 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5289 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5290 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5291 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5292
5293 /* Now build a reference to just the desired component. */
5294
5295 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5296
5297 if (to_rtx == target)
5298 to_rtx = copy_rtx (to_rtx);
5299
5300 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5301 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5302 set_mem_alias_set (to_rtx, alias_set);
5303
5304 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5305 }
5306 }
5307 \f
5308 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5309 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5310 codes and find the ultimate containing object, which we return.
5311
5312 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5313 bit position, and *PUNSIGNEDP to the signedness of the field.
5314 If the position of the field is variable, we store a tree
5315 giving the variable offset (in units) in *POFFSET.
5316 This offset is in addition to the bit position.
5317 If the position is not variable, we store 0 in *POFFSET.
5318
5319 If any of the extraction expressions is volatile,
5320 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5321
5322 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5323 is a mode that can be used to access the field. In that case, *PBITSIZE
5324 is redundant.
5325
5326 If the field describes a variable-sized object, *PMODE is set to
5327 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5328 this case, but the address of the object can be found. */
5329
5330 tree
5331 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5332 punsignedp, pvolatilep)
5333 tree exp;
5334 HOST_WIDE_INT *pbitsize;
5335 HOST_WIDE_INT *pbitpos;
5336 tree *poffset;
5337 enum machine_mode *pmode;
5338 int *punsignedp;
5339 int *pvolatilep;
5340 {
5341 tree size_tree = 0;
5342 enum machine_mode mode = VOIDmode;
5343 tree offset = size_zero_node;
5344 tree bit_offset = bitsize_zero_node;
5345 tree placeholder_ptr = 0;
5346 tree tem;
5347
5348 /* First get the mode, signedness, and size. We do this from just the
5349 outermost expression. */
5350 if (TREE_CODE (exp) == COMPONENT_REF)
5351 {
5352 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5353 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5354 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5355
5356 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5357 }
5358 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5359 {
5360 size_tree = TREE_OPERAND (exp, 1);
5361 *punsignedp = TREE_UNSIGNED (exp);
5362 }
5363 else
5364 {
5365 mode = TYPE_MODE (TREE_TYPE (exp));
5366 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5367
5368 if (mode == BLKmode)
5369 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5370 else
5371 *pbitsize = GET_MODE_BITSIZE (mode);
5372 }
5373
5374 if (size_tree != 0)
5375 {
5376 if (! host_integerp (size_tree, 1))
5377 mode = BLKmode, *pbitsize = -1;
5378 else
5379 *pbitsize = tree_low_cst (size_tree, 1);
5380 }
5381
5382 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5383 and find the ultimate containing object. */
5384 while (1)
5385 {
5386 if (TREE_CODE (exp) == BIT_FIELD_REF)
5387 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5388 else if (TREE_CODE (exp) == COMPONENT_REF)
5389 {
5390 tree field = TREE_OPERAND (exp, 1);
5391 tree this_offset = DECL_FIELD_OFFSET (field);
5392
5393 /* If this field hasn't been filled in yet, don't go
5394 past it. This should only happen when folding expressions
5395 made during type construction. */
5396 if (this_offset == 0)
5397 break;
5398 else if (! TREE_CONSTANT (this_offset)
5399 && contains_placeholder_p (this_offset))
5400 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5401
5402 offset = size_binop (PLUS_EXPR, offset, this_offset);
5403 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5404 DECL_FIELD_BIT_OFFSET (field));
5405
5406 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5407 }
5408
5409 else if (TREE_CODE (exp) == ARRAY_REF
5410 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5411 {
5412 tree index = TREE_OPERAND (exp, 1);
5413 tree array = TREE_OPERAND (exp, 0);
5414 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5415 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5416 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5417
5418 /* We assume all arrays have sizes that are a multiple of a byte.
5419 First subtract the lower bound, if any, in the type of the
5420 index, then convert to sizetype and multiply by the size of the
5421 array element. */
5422 if (low_bound != 0 && ! integer_zerop (low_bound))
5423 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5424 index, low_bound));
5425
5426 /* If the index has a self-referential type, pass it to a
5427 WITH_RECORD_EXPR; if the component size is, pass our
5428 component to one. */
5429 if (! TREE_CONSTANT (index)
5430 && contains_placeholder_p (index))
5431 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5432 if (! TREE_CONSTANT (unit_size)
5433 && contains_placeholder_p (unit_size))
5434 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5435
5436 offset = size_binop (PLUS_EXPR, offset,
5437 size_binop (MULT_EXPR,
5438 convert (sizetype, index),
5439 unit_size));
5440 }
5441
5442 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5443 {
5444 tree new = find_placeholder (exp, &placeholder_ptr);
5445
5446 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5447 We might have been called from tree optimization where we
5448 haven't set up an object yet. */
5449 if (new == 0)
5450 break;
5451 else
5452 exp = new;
5453
5454 continue;
5455 }
5456 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5457 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5458 && ! ((TREE_CODE (exp) == NOP_EXPR
5459 || TREE_CODE (exp) == CONVERT_EXPR)
5460 && (TYPE_MODE (TREE_TYPE (exp))
5461 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5462 break;
5463
5464 /* If any reference in the chain is volatile, the effect is volatile. */
5465 if (TREE_THIS_VOLATILE (exp))
5466 *pvolatilep = 1;
5467
5468 exp = TREE_OPERAND (exp, 0);
5469 }
5470
5471 /* If OFFSET is constant, see if we can return the whole thing as a
5472 constant bit position. Otherwise, split it up. */
5473 if (host_integerp (offset, 0)
5474 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5475 bitsize_unit_node))
5476 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5477 && host_integerp (tem, 0))
5478 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5479 else
5480 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5481
5482 *pmode = mode;
5483 return exp;
5484 }
5485
5486 /* Return 1 if T is an expression that get_inner_reference handles. */
5487
5488 int
5489 handled_component_p (t)
5490 tree t;
5491 {
5492 switch (TREE_CODE (t))
5493 {
5494 case BIT_FIELD_REF:
5495 case COMPONENT_REF:
5496 case ARRAY_REF:
5497 case ARRAY_RANGE_REF:
5498 case NON_LVALUE_EXPR:
5499 case VIEW_CONVERT_EXPR:
5500 return 1;
5501
5502 case NOP_EXPR:
5503 case CONVERT_EXPR:
5504 return (TYPE_MODE (TREE_TYPE (t))
5505 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5506
5507 default:
5508 return 0;
5509 }
5510 }
5511
5512 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5513
5514 static enum memory_use_mode
5515 get_memory_usage_from_modifier (modifier)
5516 enum expand_modifier modifier;
5517 {
5518 switch (modifier)
5519 {
5520 case EXPAND_NORMAL:
5521 case EXPAND_SUM:
5522 return MEMORY_USE_RO;
5523 break;
5524 case EXPAND_MEMORY_USE_WO:
5525 return MEMORY_USE_WO;
5526 break;
5527 case EXPAND_MEMORY_USE_RW:
5528 return MEMORY_USE_RW;
5529 break;
5530 case EXPAND_MEMORY_USE_DONT:
5531 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5532 MEMORY_USE_DONT, because they are modifiers to a call of
5533 expand_expr in the ADDR_EXPR case of expand_expr. */
5534 case EXPAND_CONST_ADDRESS:
5535 case EXPAND_INITIALIZER:
5536 return MEMORY_USE_DONT;
5537 case EXPAND_MEMORY_USE_BAD:
5538 default:
5539 abort ();
5540 }
5541 }
5542 \f
5543 /* Given an rtx VALUE that may contain additions and multiplications, return
5544 an equivalent value that just refers to a register, memory, or constant.
5545 This is done by generating instructions to perform the arithmetic and
5546 returning a pseudo-register containing the value.
5547
5548 The returned value may be a REG, SUBREG, MEM or constant. */
5549
5550 rtx
5551 force_operand (value, target)
5552 rtx value, target;
5553 {
5554 optab binoptab = 0;
5555 /* Use a temporary to force order of execution of calls to
5556 `force_operand'. */
5557 rtx tmp;
5558 rtx op2;
5559 /* Use subtarget as the target for operand 0 of a binary operation. */
5560 rtx subtarget = get_subtarget (target);
5561
5562 /* Check for a PIC address load. */
5563 if (flag_pic
5564 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5565 && XEXP (value, 0) == pic_offset_table_rtx
5566 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5567 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5568 || GET_CODE (XEXP (value, 1)) == CONST))
5569 {
5570 if (!subtarget)
5571 subtarget = gen_reg_rtx (GET_MODE (value));
5572 emit_move_insn (subtarget, value);
5573 return subtarget;
5574 }
5575
5576 if (GET_CODE (value) == PLUS)
5577 binoptab = add_optab;
5578 else if (GET_CODE (value) == MINUS)
5579 binoptab = sub_optab;
5580 else if (GET_CODE (value) == MULT)
5581 {
5582 op2 = XEXP (value, 1);
5583 if (!CONSTANT_P (op2)
5584 && !(GET_CODE (op2) == REG && op2 != subtarget))
5585 subtarget = 0;
5586 tmp = force_operand (XEXP (value, 0), subtarget);
5587 return expand_mult (GET_MODE (value), tmp,
5588 force_operand (op2, NULL_RTX),
5589 target, 1);
5590 }
5591
5592 if (binoptab)
5593 {
5594 op2 = XEXP (value, 1);
5595 if (!CONSTANT_P (op2)
5596 && !(GET_CODE (op2) == REG && op2 != subtarget))
5597 subtarget = 0;
5598 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5599 {
5600 binoptab = add_optab;
5601 op2 = negate_rtx (GET_MODE (value), op2);
5602 }
5603
5604 /* Check for an addition with OP2 a constant integer and our first
5605 operand a PLUS of a virtual register and something else. In that
5606 case, we want to emit the sum of the virtual register and the
5607 constant first and then add the other value. This allows virtual
5608 register instantiation to simply modify the constant rather than
5609 creating another one around this addition. */
5610 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5611 && GET_CODE (XEXP (value, 0)) == PLUS
5612 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5613 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5614 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5615 {
5616 rtx temp = expand_binop (GET_MODE (value), binoptab,
5617 XEXP (XEXP (value, 0), 0), op2,
5618 subtarget, 0, OPTAB_LIB_WIDEN);
5619 return expand_binop (GET_MODE (value), binoptab, temp,
5620 force_operand (XEXP (XEXP (value, 0), 1), 0),
5621 target, 0, OPTAB_LIB_WIDEN);
5622 }
5623
5624 tmp = force_operand (XEXP (value, 0), subtarget);
5625 return expand_binop (GET_MODE (value), binoptab, tmp,
5626 force_operand (op2, NULL_RTX),
5627 target, 0, OPTAB_LIB_WIDEN);
5628 /* We give UNSIGNEDP = 0 to expand_binop
5629 because the only operations we are expanding here are signed ones. */
5630 }
5631
5632 #ifdef INSN_SCHEDULING
5633 /* On machines that have insn scheduling, we want all memory reference to be
5634 explicit, so we need to deal with such paradoxical SUBREGs. */
5635 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5636 && (GET_MODE_SIZE (GET_MODE (value))
5637 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5638 value
5639 = simplify_gen_subreg (GET_MODE (value),
5640 force_reg (GET_MODE (SUBREG_REG (value)),
5641 force_operand (SUBREG_REG (value),
5642 NULL_RTX)),
5643 GET_MODE (SUBREG_REG (value)),
5644 SUBREG_BYTE (value));
5645 #endif
5646
5647 return value;
5648 }
5649 \f
5650 /* Subroutine of expand_expr: return nonzero iff there is no way that
5651 EXP can reference X, which is being modified. TOP_P is nonzero if this
5652 call is going to be used to determine whether we need a temporary
5653 for EXP, as opposed to a recursive call to this function.
5654
5655 It is always safe for this routine to return zero since it merely
5656 searches for optimization opportunities. */
5657
5658 int
5659 safe_from_p (x, exp, top_p)
5660 rtx x;
5661 tree exp;
5662 int top_p;
5663 {
5664 rtx exp_rtl = 0;
5665 int i, nops;
5666 static tree save_expr_list;
5667
5668 if (x == 0
5669 /* If EXP has varying size, we MUST use a target since we currently
5670 have no way of allocating temporaries of variable size
5671 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5672 So we assume here that something at a higher level has prevented a
5673 clash. This is somewhat bogus, but the best we can do. Only
5674 do this when X is BLKmode and when we are at the top level. */
5675 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5677 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5678 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5679 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5680 != INTEGER_CST)
5681 && GET_MODE (x) == BLKmode)
5682 /* If X is in the outgoing argument area, it is always safe. */
5683 || (GET_CODE (x) == MEM
5684 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5685 || (GET_CODE (XEXP (x, 0)) == PLUS
5686 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5687 return 1;
5688
5689 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5690 find the underlying pseudo. */
5691 if (GET_CODE (x) == SUBREG)
5692 {
5693 x = SUBREG_REG (x);
5694 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5695 return 0;
5696 }
5697
5698 /* A SAVE_EXPR might appear many times in the expression passed to the
5699 top-level safe_from_p call, and if it has a complex subexpression,
5700 examining it multiple times could result in a combinatorial explosion.
5701 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5702 with optimization took about 28 minutes to compile -- even though it was
5703 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5704 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5705 we have processed. Note that the only test of top_p was above. */
5706
5707 if (top_p)
5708 {
5709 int rtn;
5710 tree t;
5711
5712 save_expr_list = 0;
5713
5714 rtn = safe_from_p (x, exp, 0);
5715
5716 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5717 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5718
5719 return rtn;
5720 }
5721
5722 /* Now look at our tree code and possibly recurse. */
5723 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5724 {
5725 case 'd':
5726 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5727 break;
5728
5729 case 'c':
5730 return 1;
5731
5732 case 'x':
5733 if (TREE_CODE (exp) == TREE_LIST)
5734 return ((TREE_VALUE (exp) == 0
5735 || safe_from_p (x, TREE_VALUE (exp), 0))
5736 && (TREE_CHAIN (exp) == 0
5737 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5738 else if (TREE_CODE (exp) == ERROR_MARK)
5739 return 1; /* An already-visited SAVE_EXPR? */
5740 else
5741 return 0;
5742
5743 case '1':
5744 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5745
5746 case '2':
5747 case '<':
5748 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5749 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5750
5751 case 'e':
5752 case 'r':
5753 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5754 the expression. If it is set, we conflict iff we are that rtx or
5755 both are in memory. Otherwise, we check all operands of the
5756 expression recursively. */
5757
5758 switch (TREE_CODE (exp))
5759 {
5760 case ADDR_EXPR:
5761 /* If the operand is static or we are static, we can't conflict.
5762 Likewise if we don't conflict with the operand at all. */
5763 if (staticp (TREE_OPERAND (exp, 0))
5764 || TREE_STATIC (exp)
5765 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5766 return 1;
5767
5768 /* Otherwise, the only way this can conflict is if we are taking
5769 the address of a DECL a that address if part of X, which is
5770 very rare. */
5771 exp = TREE_OPERAND (exp, 0);
5772 if (DECL_P (exp))
5773 {
5774 if (!DECL_RTL_SET_P (exp)
5775 || GET_CODE (DECL_RTL (exp)) != MEM)
5776 return 0;
5777 else
5778 exp_rtl = XEXP (DECL_RTL (exp), 0);
5779 }
5780 break;
5781
5782 case INDIRECT_REF:
5783 if (GET_CODE (x) == MEM
5784 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5785 get_alias_set (exp)))
5786 return 0;
5787 break;
5788
5789 case CALL_EXPR:
5790 /* Assume that the call will clobber all hard registers and
5791 all of memory. */
5792 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5793 || GET_CODE (x) == MEM)
5794 return 0;
5795 break;
5796
5797 case RTL_EXPR:
5798 /* If a sequence exists, we would have to scan every instruction
5799 in the sequence to see if it was safe. This is probably not
5800 worthwhile. */
5801 if (RTL_EXPR_SEQUENCE (exp))
5802 return 0;
5803
5804 exp_rtl = RTL_EXPR_RTL (exp);
5805 break;
5806
5807 case WITH_CLEANUP_EXPR:
5808 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5809 break;
5810
5811 case CLEANUP_POINT_EXPR:
5812 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5813
5814 case SAVE_EXPR:
5815 exp_rtl = SAVE_EXPR_RTL (exp);
5816 if (exp_rtl)
5817 break;
5818
5819 /* If we've already scanned this, don't do it again. Otherwise,
5820 show we've scanned it and record for clearing the flag if we're
5821 going on. */
5822 if (TREE_PRIVATE (exp))
5823 return 1;
5824
5825 TREE_PRIVATE (exp) = 1;
5826 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5827 {
5828 TREE_PRIVATE (exp) = 0;
5829 return 0;
5830 }
5831
5832 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5833 return 1;
5834
5835 case BIND_EXPR:
5836 /* The only operand we look at is operand 1. The rest aren't
5837 part of the expression. */
5838 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5839
5840 case METHOD_CALL_EXPR:
5841 /* This takes an rtx argument, but shouldn't appear here. */
5842 abort ();
5843
5844 default:
5845 break;
5846 }
5847
5848 /* If we have an rtx, we do not need to scan our operands. */
5849 if (exp_rtl)
5850 break;
5851
5852 nops = first_rtl_op (TREE_CODE (exp));
5853 for (i = 0; i < nops; i++)
5854 if (TREE_OPERAND (exp, i) != 0
5855 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5856 return 0;
5857
5858 /* If this is a language-specific tree code, it may require
5859 special handling. */
5860 if ((unsigned int) TREE_CODE (exp)
5861 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5862 && !(*lang_hooks.safe_from_p) (x, exp))
5863 return 0;
5864 }
5865
5866 /* If we have an rtl, find any enclosed object. Then see if we conflict
5867 with it. */
5868 if (exp_rtl)
5869 {
5870 if (GET_CODE (exp_rtl) == SUBREG)
5871 {
5872 exp_rtl = SUBREG_REG (exp_rtl);
5873 if (GET_CODE (exp_rtl) == REG
5874 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5875 return 0;
5876 }
5877
5878 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5879 are memory and they conflict. */
5880 return ! (rtx_equal_p (x, exp_rtl)
5881 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5882 && true_dependence (exp_rtl, GET_MODE (x), x,
5883 rtx_addr_varies_p)));
5884 }
5885
5886 /* If we reach here, it is safe. */
5887 return 1;
5888 }
5889
5890 /* Subroutine of expand_expr: return rtx if EXP is a
5891 variable or parameter; else return 0. */
5892
5893 static rtx
5894 var_rtx (exp)
5895 tree exp;
5896 {
5897 STRIP_NOPS (exp);
5898 switch (TREE_CODE (exp))
5899 {
5900 case PARM_DECL:
5901 case VAR_DECL:
5902 return DECL_RTL (exp);
5903 default:
5904 return 0;
5905 }
5906 }
5907
5908 #ifdef MAX_INTEGER_COMPUTATION_MODE
5909
5910 void
5911 check_max_integer_computation_mode (exp)
5912 tree exp;
5913 {
5914 enum tree_code code;
5915 enum machine_mode mode;
5916
5917 /* Strip any NOPs that don't change the mode. */
5918 STRIP_NOPS (exp);
5919 code = TREE_CODE (exp);
5920
5921 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5922 if (code == NOP_EXPR
5923 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5924 return;
5925
5926 /* First check the type of the overall operation. We need only look at
5927 unary, binary and relational operations. */
5928 if (TREE_CODE_CLASS (code) == '1'
5929 || TREE_CODE_CLASS (code) == '2'
5930 || TREE_CODE_CLASS (code) == '<')
5931 {
5932 mode = TYPE_MODE (TREE_TYPE (exp));
5933 if (GET_MODE_CLASS (mode) == MODE_INT
5934 && mode > MAX_INTEGER_COMPUTATION_MODE)
5935 internal_error ("unsupported wide integer operation");
5936 }
5937
5938 /* Check operand of a unary op. */
5939 if (TREE_CODE_CLASS (code) == '1')
5940 {
5941 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5942 if (GET_MODE_CLASS (mode) == MODE_INT
5943 && mode > MAX_INTEGER_COMPUTATION_MODE)
5944 internal_error ("unsupported wide integer operation");
5945 }
5946
5947 /* Check operands of a binary/comparison op. */
5948 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5949 {
5950 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5951 if (GET_MODE_CLASS (mode) == MODE_INT
5952 && mode > MAX_INTEGER_COMPUTATION_MODE)
5953 internal_error ("unsupported wide integer operation");
5954
5955 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5956 if (GET_MODE_CLASS (mode) == MODE_INT
5957 && mode > MAX_INTEGER_COMPUTATION_MODE)
5958 internal_error ("unsupported wide integer operation");
5959 }
5960 }
5961 #endif
5962 \f
5963 /* Return the highest power of two that EXP is known to be a multiple of.
5964 This is used in updating alignment of MEMs in array references. */
5965
5966 static HOST_WIDE_INT
5967 highest_pow2_factor (exp)
5968 tree exp;
5969 {
5970 HOST_WIDE_INT c0, c1;
5971
5972 switch (TREE_CODE (exp))
5973 {
5974 case INTEGER_CST:
5975 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5976 lowest bit that's a one. If the result is zero, pessimize by
5977 returning 1. This is overly-conservative, but such things should not
5978 happen in the offset expressions that we are called with. */
5979 if (host_integerp (exp, 0))
5980 {
5981 c0 = tree_low_cst (exp, 0);
5982 c0 = c0 < 0 ? - c0 : c0;
5983 return c0 != 0 ? c0 & -c0 : 1;
5984 }
5985 break;
5986
5987 case PLUS_EXPR: case MINUS_EXPR:
5988 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5989 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5990 return MIN (c0, c1);
5991
5992 case MULT_EXPR:
5993 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5994 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5995 return c0 * c1;
5996
5997 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5998 case CEIL_DIV_EXPR:
5999 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6000 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6001 return MAX (1, c0 / c1);
6002
6003 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6004 case COMPOUND_EXPR: case SAVE_EXPR: case WITH_RECORD_EXPR:
6005 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6006
6007 case COND_EXPR:
6008 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6009 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6010 return MIN (c0, c1);
6011
6012 default:
6013 break;
6014 }
6015
6016 return 1;
6017 }
6018 \f
6019 /* Return an object on the placeholder list that matches EXP, a
6020 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6021 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6022 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6023 is a location which initially points to a starting location in the
6024 placeholder list (zero means start of the list) and where a pointer into
6025 the placeholder list at which the object is found is placed. */
6026
6027 tree
6028 find_placeholder (exp, plist)
6029 tree exp;
6030 tree *plist;
6031 {
6032 tree type = TREE_TYPE (exp);
6033 tree placeholder_expr;
6034
6035 for (placeholder_expr
6036 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6037 placeholder_expr != 0;
6038 placeholder_expr = TREE_CHAIN (placeholder_expr))
6039 {
6040 tree need_type = TYPE_MAIN_VARIANT (type);
6041 tree elt;
6042
6043 /* Find the outermost reference that is of the type we want. If none,
6044 see if any object has a type that is a pointer to the type we
6045 want. */
6046 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6047 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6048 || TREE_CODE (elt) == COND_EXPR)
6049 ? TREE_OPERAND (elt, 1)
6050 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6051 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6052 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6053 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6054 ? TREE_OPERAND (elt, 0) : 0))
6055 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6056 {
6057 if (plist)
6058 *plist = placeholder_expr;
6059 return elt;
6060 }
6061
6062 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6063 elt
6064 = ((TREE_CODE (elt) == COMPOUND_EXPR
6065 || TREE_CODE (elt) == COND_EXPR)
6066 ? TREE_OPERAND (elt, 1)
6067 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6068 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6069 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6070 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6071 ? TREE_OPERAND (elt, 0) : 0))
6072 if (POINTER_TYPE_P (TREE_TYPE (elt))
6073 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6074 == need_type))
6075 {
6076 if (plist)
6077 *plist = placeholder_expr;
6078 return build1 (INDIRECT_REF, need_type, elt);
6079 }
6080 }
6081
6082 return 0;
6083 }
6084 \f
6085 /* expand_expr: generate code for computing expression EXP.
6086 An rtx for the computed value is returned. The value is never null.
6087 In the case of a void EXP, const0_rtx is returned.
6088
6089 The value may be stored in TARGET if TARGET is nonzero.
6090 TARGET is just a suggestion; callers must assume that
6091 the rtx returned may not be the same as TARGET.
6092
6093 If TARGET is CONST0_RTX, it means that the value will be ignored.
6094
6095 If TMODE is not VOIDmode, it suggests generating the
6096 result in mode TMODE. But this is done only when convenient.
6097 Otherwise, TMODE is ignored and the value generated in its natural mode.
6098 TMODE is just a suggestion; callers must assume that
6099 the rtx returned may not have mode TMODE.
6100
6101 Note that TARGET may have neither TMODE nor MODE. In that case, it
6102 probably will not be used.
6103
6104 If MODIFIER is EXPAND_SUM then when EXP is an addition
6105 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6106 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6107 products as above, or REG or MEM, or constant.
6108 Ordinarily in such cases we would output mul or add instructions
6109 and then return a pseudo reg containing the sum.
6110
6111 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6112 it also marks a label as absolutely required (it can't be dead).
6113 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6114 This is used for outputting expressions used in initializers.
6115
6116 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6117 with a constant address even if that address is not normally legitimate.
6118 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6119
6120 rtx
6121 expand_expr (exp, target, tmode, modifier)
6122 tree exp;
6123 rtx target;
6124 enum machine_mode tmode;
6125 enum expand_modifier modifier;
6126 {
6127 rtx op0, op1, temp;
6128 tree type = TREE_TYPE (exp);
6129 int unsignedp = TREE_UNSIGNED (type);
6130 enum machine_mode mode;
6131 enum tree_code code = TREE_CODE (exp);
6132 optab this_optab;
6133 rtx subtarget, original_target;
6134 int ignore;
6135 tree context;
6136 /* Used by check-memory-usage to make modifier read only. */
6137 enum expand_modifier ro_modifier;
6138
6139 /* Handle ERROR_MARK before anybody tries to access its type. */
6140 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6141 {
6142 op0 = CONST0_RTX (tmode);
6143 if (op0 != 0)
6144 return op0;
6145 return const0_rtx;
6146 }
6147
6148 mode = TYPE_MODE (type);
6149 /* Use subtarget as the target for operand 0 of a binary operation. */
6150 subtarget = get_subtarget (target);
6151 original_target = target;
6152 ignore = (target == const0_rtx
6153 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6154 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6155 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6156 && TREE_CODE (type) == VOID_TYPE));
6157
6158 /* Make a read-only version of the modifier. */
6159 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6160 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6161 ro_modifier = modifier;
6162 else
6163 ro_modifier = EXPAND_NORMAL;
6164
6165 /* If we are going to ignore this result, we need only do something
6166 if there is a side-effect somewhere in the expression. If there
6167 is, short-circuit the most common cases here. Note that we must
6168 not call expand_expr with anything but const0_rtx in case this
6169 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6170
6171 if (ignore)
6172 {
6173 if (! TREE_SIDE_EFFECTS (exp))
6174 return const0_rtx;
6175
6176 /* Ensure we reference a volatile object even if value is ignored, but
6177 don't do this if all we are doing is taking its address. */
6178 if (TREE_THIS_VOLATILE (exp)
6179 && TREE_CODE (exp) != FUNCTION_DECL
6180 && mode != VOIDmode && mode != BLKmode
6181 && modifier != EXPAND_CONST_ADDRESS)
6182 {
6183 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6184 if (GET_CODE (temp) == MEM)
6185 temp = copy_to_reg (temp);
6186 return const0_rtx;
6187 }
6188
6189 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6190 || code == INDIRECT_REF || code == BUFFER_REF)
6191 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6192 VOIDmode, ro_modifier);
6193 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6194 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6195 {
6196 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6197 ro_modifier);
6198 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6199 ro_modifier);
6200 return const0_rtx;
6201 }
6202 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6203 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6204 /* If the second operand has no side effects, just evaluate
6205 the first. */
6206 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6207 VOIDmode, ro_modifier);
6208 else if (code == BIT_FIELD_REF)
6209 {
6210 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6211 ro_modifier);
6212 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6213 ro_modifier);
6214 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6215 ro_modifier);
6216 return const0_rtx;
6217 }
6218 ;
6219 target = 0;
6220 }
6221
6222 #ifdef MAX_INTEGER_COMPUTATION_MODE
6223 /* Only check stuff here if the mode we want is different from the mode
6224 of the expression; if it's the same, check_max_integer_computiation_mode
6225 will handle it. Do we really need to check this stuff at all? */
6226
6227 if (target
6228 && GET_MODE (target) != mode
6229 && TREE_CODE (exp) != INTEGER_CST
6230 && TREE_CODE (exp) != PARM_DECL
6231 && TREE_CODE (exp) != ARRAY_REF
6232 && TREE_CODE (exp) != ARRAY_RANGE_REF
6233 && TREE_CODE (exp) != COMPONENT_REF
6234 && TREE_CODE (exp) != BIT_FIELD_REF
6235 && TREE_CODE (exp) != INDIRECT_REF
6236 && TREE_CODE (exp) != CALL_EXPR
6237 && TREE_CODE (exp) != VAR_DECL
6238 && TREE_CODE (exp) != RTL_EXPR)
6239 {
6240 enum machine_mode mode = GET_MODE (target);
6241
6242 if (GET_MODE_CLASS (mode) == MODE_INT
6243 && mode > MAX_INTEGER_COMPUTATION_MODE)
6244 internal_error ("unsupported wide integer operation");
6245 }
6246
6247 if (tmode != mode
6248 && TREE_CODE (exp) != INTEGER_CST
6249 && TREE_CODE (exp) != PARM_DECL
6250 && TREE_CODE (exp) != ARRAY_REF
6251 && TREE_CODE (exp) != ARRAY_RANGE_REF
6252 && TREE_CODE (exp) != COMPONENT_REF
6253 && TREE_CODE (exp) != BIT_FIELD_REF
6254 && TREE_CODE (exp) != INDIRECT_REF
6255 && TREE_CODE (exp) != VAR_DECL
6256 && TREE_CODE (exp) != CALL_EXPR
6257 && TREE_CODE (exp) != RTL_EXPR
6258 && GET_MODE_CLASS (tmode) == MODE_INT
6259 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6260 internal_error ("unsupported wide integer operation");
6261
6262 check_max_integer_computation_mode (exp);
6263 #endif
6264
6265 /* If will do cse, generate all results into pseudo registers
6266 since 1) that allows cse to find more things
6267 and 2) otherwise cse could produce an insn the machine
6268 cannot support. And exception is a CONSTRUCTOR into a multi-word
6269 MEM: that's much more likely to be most efficient into the MEM. */
6270
6271 if (! cse_not_expected && mode != BLKmode && target
6272 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6273 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6274 target = subtarget;
6275
6276 switch (code)
6277 {
6278 case LABEL_DECL:
6279 {
6280 tree function = decl_function_context (exp);
6281 /* Handle using a label in a containing function. */
6282 if (function != current_function_decl
6283 && function != inline_function_decl && function != 0)
6284 {
6285 struct function *p = find_function_data (function);
6286 p->expr->x_forced_labels
6287 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6288 p->expr->x_forced_labels);
6289 }
6290 else
6291 {
6292 if (modifier == EXPAND_INITIALIZER)
6293 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6294 label_rtx (exp),
6295 forced_labels);
6296 }
6297
6298 temp = gen_rtx_MEM (FUNCTION_MODE,
6299 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6300 if (function != current_function_decl
6301 && function != inline_function_decl && function != 0)
6302 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6303 return temp;
6304 }
6305
6306 case PARM_DECL:
6307 if (DECL_RTL (exp) == 0)
6308 {
6309 error_with_decl (exp, "prior parameter's size depends on `%s'");
6310 return CONST0_RTX (mode);
6311 }
6312
6313 /* ... fall through ... */
6314
6315 case VAR_DECL:
6316 /* If a static var's type was incomplete when the decl was written,
6317 but the type is complete now, lay out the decl now. */
6318 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6319 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6320 {
6321 rtx value = DECL_RTL_IF_SET (exp);
6322
6323 layout_decl (exp, 0);
6324
6325 /* If the RTL was already set, update its mode and memory
6326 attributes. */
6327 if (value != 0)
6328 {
6329 PUT_MODE (value, DECL_MODE (exp));
6330 SET_DECL_RTL (exp, 0);
6331 set_mem_attributes (value, exp, 1);
6332 SET_DECL_RTL (exp, value);
6333 }
6334 }
6335
6336 /* Although static-storage variables start off initialized, according to
6337 ANSI C, a memcpy could overwrite them with uninitialized values. So
6338 we check them too. This also lets us check for read-only variables
6339 accessed via a non-const declaration, in case it won't be detected
6340 any other way (e.g., in an embedded system or OS kernel without
6341 memory protection).
6342
6343 Aggregates are not checked here; they're handled elsewhere. */
6344 if (cfun && current_function_check_memory_usage
6345 && code == VAR_DECL
6346 && GET_CODE (DECL_RTL (exp)) == MEM
6347 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6348 {
6349 enum memory_use_mode memory_usage;
6350 memory_usage = get_memory_usage_from_modifier (modifier);
6351
6352 in_check_memory_usage = 1;
6353 if (memory_usage != MEMORY_USE_DONT)
6354 emit_library_call (chkr_check_addr_libfunc,
6355 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6356 XEXP (DECL_RTL (exp), 0), Pmode,
6357 GEN_INT (int_size_in_bytes (type)),
6358 TYPE_MODE (sizetype),
6359 GEN_INT (memory_usage),
6360 TYPE_MODE (integer_type_node));
6361 in_check_memory_usage = 0;
6362 }
6363
6364 /* ... fall through ... */
6365
6366 case FUNCTION_DECL:
6367 case RESULT_DECL:
6368 if (DECL_RTL (exp) == 0)
6369 abort ();
6370
6371 /* Ensure variable marked as used even if it doesn't go through
6372 a parser. If it hasn't be used yet, write out an external
6373 definition. */
6374 if (! TREE_USED (exp))
6375 {
6376 assemble_external (exp);
6377 TREE_USED (exp) = 1;
6378 }
6379
6380 /* Show we haven't gotten RTL for this yet. */
6381 temp = 0;
6382
6383 /* Handle variables inherited from containing functions. */
6384 context = decl_function_context (exp);
6385
6386 /* We treat inline_function_decl as an alias for the current function
6387 because that is the inline function whose vars, types, etc.
6388 are being merged into the current function.
6389 See expand_inline_function. */
6390
6391 if (context != 0 && context != current_function_decl
6392 && context != inline_function_decl
6393 /* If var is static, we don't need a static chain to access it. */
6394 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6395 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6396 {
6397 rtx addr;
6398
6399 /* Mark as non-local and addressable. */
6400 DECL_NONLOCAL (exp) = 1;
6401 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6402 abort ();
6403 mark_addressable (exp);
6404 if (GET_CODE (DECL_RTL (exp)) != MEM)
6405 abort ();
6406 addr = XEXP (DECL_RTL (exp), 0);
6407 if (GET_CODE (addr) == MEM)
6408 addr
6409 = replace_equiv_address (addr,
6410 fix_lexical_addr (XEXP (addr, 0), exp));
6411 else
6412 addr = fix_lexical_addr (addr, exp);
6413
6414 temp = replace_equiv_address (DECL_RTL (exp), addr);
6415 }
6416
6417 /* This is the case of an array whose size is to be determined
6418 from its initializer, while the initializer is still being parsed.
6419 See expand_decl. */
6420
6421 else if (GET_CODE (DECL_RTL (exp)) == MEM
6422 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6423 temp = validize_mem (DECL_RTL (exp));
6424
6425 /* If DECL_RTL is memory, we are in the normal case and either
6426 the address is not valid or it is not a register and -fforce-addr
6427 is specified, get the address into a register. */
6428
6429 else if (GET_CODE (DECL_RTL (exp)) == MEM
6430 && modifier != EXPAND_CONST_ADDRESS
6431 && modifier != EXPAND_SUM
6432 && modifier != EXPAND_INITIALIZER
6433 && (! memory_address_p (DECL_MODE (exp),
6434 XEXP (DECL_RTL (exp), 0))
6435 || (flag_force_addr
6436 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6437 temp = replace_equiv_address (DECL_RTL (exp),
6438 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6439
6440 /* If we got something, return it. But first, set the alignment
6441 if the address is a register. */
6442 if (temp != 0)
6443 {
6444 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6445 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6446
6447 return temp;
6448 }
6449
6450 /* If the mode of DECL_RTL does not match that of the decl, it
6451 must be a promoted value. We return a SUBREG of the wanted mode,
6452 but mark it so that we know that it was already extended. */
6453
6454 if (GET_CODE (DECL_RTL (exp)) == REG
6455 && GET_MODE (DECL_RTL (exp)) != mode)
6456 {
6457 /* Get the signedness used for this variable. Ensure we get the
6458 same mode we got when the variable was declared. */
6459 if (GET_MODE (DECL_RTL (exp))
6460 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6461 abort ();
6462
6463 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6464 SUBREG_PROMOTED_VAR_P (temp) = 1;
6465 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6466 return temp;
6467 }
6468
6469 return DECL_RTL (exp);
6470
6471 case INTEGER_CST:
6472 return immed_double_const (TREE_INT_CST_LOW (exp),
6473 TREE_INT_CST_HIGH (exp), mode);
6474
6475 case CONST_DECL:
6476 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6477 EXPAND_MEMORY_USE_BAD);
6478
6479 case REAL_CST:
6480 /* If optimized, generate immediate CONST_DOUBLE
6481 which will be turned into memory by reload if necessary.
6482
6483 We used to force a register so that loop.c could see it. But
6484 this does not allow gen_* patterns to perform optimizations with
6485 the constants. It also produces two insns in cases like "x = 1.0;".
6486 On most machines, floating-point constants are not permitted in
6487 many insns, so we'd end up copying it to a register in any case.
6488
6489 Now, we do the copying in expand_binop, if appropriate. */
6490 return immed_real_const (exp);
6491
6492 case COMPLEX_CST:
6493 case STRING_CST:
6494 if (! TREE_CST_RTL (exp))
6495 output_constant_def (exp, 1);
6496
6497 /* TREE_CST_RTL probably contains a constant address.
6498 On RISC machines where a constant address isn't valid,
6499 make some insns to get that address into a register. */
6500 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6501 && modifier != EXPAND_CONST_ADDRESS
6502 && modifier != EXPAND_INITIALIZER
6503 && modifier != EXPAND_SUM
6504 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6505 || (flag_force_addr
6506 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6507 return replace_equiv_address (TREE_CST_RTL (exp),
6508 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6509 return TREE_CST_RTL (exp);
6510
6511 case EXPR_WITH_FILE_LOCATION:
6512 {
6513 rtx to_return;
6514 const char *saved_input_filename = input_filename;
6515 int saved_lineno = lineno;
6516 input_filename = EXPR_WFL_FILENAME (exp);
6517 lineno = EXPR_WFL_LINENO (exp);
6518 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6519 emit_line_note (input_filename, lineno);
6520 /* Possibly avoid switching back and forth here. */
6521 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6522 input_filename = saved_input_filename;
6523 lineno = saved_lineno;
6524 return to_return;
6525 }
6526
6527 case SAVE_EXPR:
6528 context = decl_function_context (exp);
6529
6530 /* If this SAVE_EXPR was at global context, assume we are an
6531 initialization function and move it into our context. */
6532 if (context == 0)
6533 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6534
6535 /* We treat inline_function_decl as an alias for the current function
6536 because that is the inline function whose vars, types, etc.
6537 are being merged into the current function.
6538 See expand_inline_function. */
6539 if (context == current_function_decl || context == inline_function_decl)
6540 context = 0;
6541
6542 /* If this is non-local, handle it. */
6543 if (context)
6544 {
6545 /* The following call just exists to abort if the context is
6546 not of a containing function. */
6547 find_function_data (context);
6548
6549 temp = SAVE_EXPR_RTL (exp);
6550 if (temp && GET_CODE (temp) == REG)
6551 {
6552 put_var_into_stack (exp);
6553 temp = SAVE_EXPR_RTL (exp);
6554 }
6555 if (temp == 0 || GET_CODE (temp) != MEM)
6556 abort ();
6557 return
6558 replace_equiv_address (temp,
6559 fix_lexical_addr (XEXP (temp, 0), exp));
6560 }
6561 if (SAVE_EXPR_RTL (exp) == 0)
6562 {
6563 if (mode == VOIDmode)
6564 temp = const0_rtx;
6565 else
6566 temp = assign_temp (build_qualified_type (type,
6567 (TYPE_QUALS (type)
6568 | TYPE_QUAL_CONST)),
6569 3, 0, 0);
6570
6571 SAVE_EXPR_RTL (exp) = temp;
6572 if (!optimize && GET_CODE (temp) == REG)
6573 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6574 save_expr_regs);
6575
6576 /* If the mode of TEMP does not match that of the expression, it
6577 must be a promoted value. We pass store_expr a SUBREG of the
6578 wanted mode but mark it so that we know that it was already
6579 extended. Note that `unsignedp' was modified above in
6580 this case. */
6581
6582 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6583 {
6584 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6585 SUBREG_PROMOTED_VAR_P (temp) = 1;
6586 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6587 }
6588
6589 if (temp == const0_rtx)
6590 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6591 EXPAND_MEMORY_USE_BAD);
6592 else
6593 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6594
6595 TREE_USED (exp) = 1;
6596 }
6597
6598 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6599 must be a promoted value. We return a SUBREG of the wanted mode,
6600 but mark it so that we know that it was already extended. */
6601
6602 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6603 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6604 {
6605 /* Compute the signedness and make the proper SUBREG. */
6606 promote_mode (type, mode, &unsignedp, 0);
6607 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6608 SUBREG_PROMOTED_VAR_P (temp) = 1;
6609 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6610 return temp;
6611 }
6612
6613 return SAVE_EXPR_RTL (exp);
6614
6615 case UNSAVE_EXPR:
6616 {
6617 rtx temp;
6618 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6619 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6620 return temp;
6621 }
6622
6623 case PLACEHOLDER_EXPR:
6624 {
6625 tree old_list = placeholder_list;
6626 tree placeholder_expr = 0;
6627
6628 exp = find_placeholder (exp, &placeholder_expr);
6629 if (exp == 0)
6630 abort ();
6631
6632 placeholder_list = TREE_CHAIN (placeholder_expr);
6633 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6634 placeholder_list = old_list;
6635 return temp;
6636 }
6637
6638 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6639 abort ();
6640
6641 case WITH_RECORD_EXPR:
6642 /* Put the object on the placeholder list, expand our first operand,
6643 and pop the list. */
6644 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6645 placeholder_list);
6646 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6647 tmode, ro_modifier);
6648 placeholder_list = TREE_CHAIN (placeholder_list);
6649 return target;
6650
6651 case GOTO_EXPR:
6652 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6653 expand_goto (TREE_OPERAND (exp, 0));
6654 else
6655 expand_computed_goto (TREE_OPERAND (exp, 0));
6656 return const0_rtx;
6657
6658 case EXIT_EXPR:
6659 expand_exit_loop_if_false (NULL,
6660 invert_truthvalue (TREE_OPERAND (exp, 0)));
6661 return const0_rtx;
6662
6663 case LABELED_BLOCK_EXPR:
6664 if (LABELED_BLOCK_BODY (exp))
6665 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6666 /* Should perhaps use expand_label, but this is simpler and safer. */
6667 do_pending_stack_adjust ();
6668 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6669 return const0_rtx;
6670
6671 case EXIT_BLOCK_EXPR:
6672 if (EXIT_BLOCK_RETURN (exp))
6673 sorry ("returned value in block_exit_expr");
6674 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6675 return const0_rtx;
6676
6677 case LOOP_EXPR:
6678 push_temp_slots ();
6679 expand_start_loop (1);
6680 expand_expr_stmt (TREE_OPERAND (exp, 0));
6681 expand_end_loop ();
6682 pop_temp_slots ();
6683
6684 return const0_rtx;
6685
6686 case BIND_EXPR:
6687 {
6688 tree vars = TREE_OPERAND (exp, 0);
6689 int vars_need_expansion = 0;
6690
6691 /* Need to open a binding contour here because
6692 if there are any cleanups they must be contained here. */
6693 expand_start_bindings (2);
6694
6695 /* Mark the corresponding BLOCK for output in its proper place. */
6696 if (TREE_OPERAND (exp, 2) != 0
6697 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6698 insert_block (TREE_OPERAND (exp, 2));
6699
6700 /* If VARS have not yet been expanded, expand them now. */
6701 while (vars)
6702 {
6703 if (!DECL_RTL_SET_P (vars))
6704 {
6705 vars_need_expansion = 1;
6706 expand_decl (vars);
6707 }
6708 expand_decl_init (vars);
6709 vars = TREE_CHAIN (vars);
6710 }
6711
6712 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6713
6714 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6715
6716 return temp;
6717 }
6718
6719 case RTL_EXPR:
6720 if (RTL_EXPR_SEQUENCE (exp))
6721 {
6722 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6723 abort ();
6724 emit_insns (RTL_EXPR_SEQUENCE (exp));
6725 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6726 }
6727 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6728 free_temps_for_rtl_expr (exp);
6729 return RTL_EXPR_RTL (exp);
6730
6731 case CONSTRUCTOR:
6732 /* If we don't need the result, just ensure we evaluate any
6733 subexpressions. */
6734 if (ignore)
6735 {
6736 tree elt;
6737 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6738 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6739 EXPAND_MEMORY_USE_BAD);
6740 return const0_rtx;
6741 }
6742
6743 /* All elts simple constants => refer to a constant in memory. But
6744 if this is a non-BLKmode mode, let it store a field at a time
6745 since that should make a CONST_INT or CONST_DOUBLE when we
6746 fold. Likewise, if we have a target we can use, it is best to
6747 store directly into the target unless the type is large enough
6748 that memcpy will be used. If we are making an initializer and
6749 all operands are constant, put it in memory as well. */
6750 else if ((TREE_STATIC (exp)
6751 && ((mode == BLKmode
6752 && ! (target != 0 && safe_from_p (target, exp, 1)))
6753 || TREE_ADDRESSABLE (exp)
6754 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6755 && (! MOVE_BY_PIECES_P
6756 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6757 TYPE_ALIGN (type)))
6758 && ! mostly_zeros_p (exp))))
6759 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6760 {
6761 rtx constructor = output_constant_def (exp, 1);
6762
6763 if (modifier != EXPAND_CONST_ADDRESS
6764 && modifier != EXPAND_INITIALIZER
6765 && modifier != EXPAND_SUM)
6766 constructor = validize_mem (constructor);
6767
6768 return constructor;
6769 }
6770 else
6771 {
6772 /* Handle calls that pass values in multiple non-contiguous
6773 locations. The Irix 6 ABI has examples of this. */
6774 if (target == 0 || ! safe_from_p (target, exp, 1)
6775 || GET_CODE (target) == PARALLEL)
6776 target
6777 = assign_temp (build_qualified_type (type,
6778 (TYPE_QUALS (type)
6779 | (TREE_READONLY (exp)
6780 * TYPE_QUAL_CONST))),
6781 0, TREE_ADDRESSABLE (exp), 1);
6782
6783 store_constructor (exp, target, 0,
6784 int_size_in_bytes (TREE_TYPE (exp)));
6785 return target;
6786 }
6787
6788 case INDIRECT_REF:
6789 {
6790 tree exp1 = TREE_OPERAND (exp, 0);
6791 tree index;
6792 tree string = string_constant (exp1, &index);
6793
6794 /* Try to optimize reads from const strings. */
6795 if (string
6796 && TREE_CODE (string) == STRING_CST
6797 && TREE_CODE (index) == INTEGER_CST
6798 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6799 && GET_MODE_CLASS (mode) == MODE_INT
6800 && GET_MODE_SIZE (mode) == 1
6801 && modifier != EXPAND_MEMORY_USE_WO)
6802 return
6803 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6804
6805 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6806 op0 = memory_address (mode, op0);
6807
6808 if (cfun && current_function_check_memory_usage
6809 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6810 {
6811 enum memory_use_mode memory_usage;
6812 memory_usage = get_memory_usage_from_modifier (modifier);
6813
6814 if (memory_usage != MEMORY_USE_DONT)
6815 {
6816 in_check_memory_usage = 1;
6817 emit_library_call (chkr_check_addr_libfunc,
6818 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6819 Pmode, GEN_INT (int_size_in_bytes (type)),
6820 TYPE_MODE (sizetype),
6821 GEN_INT (memory_usage),
6822 TYPE_MODE (integer_type_node));
6823 in_check_memory_usage = 0;
6824 }
6825 }
6826
6827 temp = gen_rtx_MEM (mode, op0);
6828 set_mem_attributes (temp, exp, 0);
6829
6830 /* If we are writing to this object and its type is a record with
6831 readonly fields, we must mark it as readonly so it will
6832 conflict with readonly references to those fields. */
6833 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6834 RTX_UNCHANGING_P (temp) = 1;
6835
6836 return temp;
6837 }
6838
6839 case ARRAY_REF:
6840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6841 abort ();
6842
6843 {
6844 tree array = TREE_OPERAND (exp, 0);
6845 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6846 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6847 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6848 HOST_WIDE_INT i;
6849
6850 /* Optimize the special-case of a zero lower bound.
6851
6852 We convert the low_bound to sizetype to avoid some problems
6853 with constant folding. (E.g. suppose the lower bound is 1,
6854 and its mode is QI. Without the conversion, (ARRAY
6855 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6856 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6857
6858 if (! integer_zerop (low_bound))
6859 index = size_diffop (index, convert (sizetype, low_bound));
6860
6861 /* Fold an expression like: "foo"[2].
6862 This is not done in fold so it won't happen inside &.
6863 Don't fold if this is for wide characters since it's too
6864 difficult to do correctly and this is a very rare case. */
6865
6866 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6867 && TREE_CODE (array) == STRING_CST
6868 && TREE_CODE (index) == INTEGER_CST
6869 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6870 && GET_MODE_CLASS (mode) == MODE_INT
6871 && GET_MODE_SIZE (mode) == 1)
6872 return
6873 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6874
6875 /* If this is a constant index into a constant array,
6876 just get the value from the array. Handle both the cases when
6877 we have an explicit constructor and when our operand is a variable
6878 that was declared const. */
6879
6880 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6881 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6882 && TREE_CODE (index) == INTEGER_CST
6883 && 0 > compare_tree_int (index,
6884 list_length (CONSTRUCTOR_ELTS
6885 (TREE_OPERAND (exp, 0)))))
6886 {
6887 tree elem;
6888
6889 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6890 i = TREE_INT_CST_LOW (index);
6891 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6892 ;
6893
6894 if (elem)
6895 return expand_expr (fold (TREE_VALUE (elem)), target,
6896 tmode, ro_modifier);
6897 }
6898
6899 else if (optimize >= 1
6900 && modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_INITIALIZER
6902 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6903 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6904 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6905 {
6906 if (TREE_CODE (index) == INTEGER_CST)
6907 {
6908 tree init = DECL_INITIAL (array);
6909
6910 if (TREE_CODE (init) == CONSTRUCTOR)
6911 {
6912 tree elem;
6913
6914 for (elem = CONSTRUCTOR_ELTS (init);
6915 (elem
6916 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6917 elem = TREE_CHAIN (elem))
6918 ;
6919
6920 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6921 return expand_expr (fold (TREE_VALUE (elem)), target,
6922 tmode, ro_modifier);
6923 }
6924 else if (TREE_CODE (init) == STRING_CST
6925 && 0 > compare_tree_int (index,
6926 TREE_STRING_LENGTH (init)))
6927 {
6928 tree type = TREE_TYPE (TREE_TYPE (init));
6929 enum machine_mode mode = TYPE_MODE (type);
6930
6931 if (GET_MODE_CLASS (mode) == MODE_INT
6932 && GET_MODE_SIZE (mode) == 1)
6933 return (GEN_INT
6934 (TREE_STRING_POINTER
6935 (init)[TREE_INT_CST_LOW (index)]));
6936 }
6937 }
6938 }
6939 }
6940 /* Fall through. */
6941
6942 case COMPONENT_REF:
6943 case BIT_FIELD_REF:
6944 case ARRAY_RANGE_REF:
6945 /* If the operand is a CONSTRUCTOR, we can just extract the
6946 appropriate field if it is present. Don't do this if we have
6947 already written the data since we want to refer to that copy
6948 and varasm.c assumes that's what we'll do. */
6949 if (code == COMPONENT_REF
6950 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6951 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6952 {
6953 tree elt;
6954
6955 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6956 elt = TREE_CHAIN (elt))
6957 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6958 /* We can normally use the value of the field in the
6959 CONSTRUCTOR. However, if this is a bitfield in
6960 an integral mode that we can fit in a HOST_WIDE_INT,
6961 we must mask only the number of bits in the bitfield,
6962 since this is done implicitly by the constructor. If
6963 the bitfield does not meet either of those conditions,
6964 we can't do this optimization. */
6965 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6966 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6967 == MODE_INT)
6968 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6969 <= HOST_BITS_PER_WIDE_INT))))
6970 {
6971 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6972 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6973 {
6974 HOST_WIDE_INT bitsize
6975 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6976
6977 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6978 {
6979 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6980 op0 = expand_and (op0, op1, target);
6981 }
6982 else
6983 {
6984 enum machine_mode imode
6985 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6986 tree count
6987 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6988 0);
6989
6990 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6991 target, 0);
6992 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6993 target, 0);
6994 }
6995 }
6996
6997 return op0;
6998 }
6999 }
7000
7001 {
7002 enum machine_mode mode1;
7003 HOST_WIDE_INT bitsize, bitpos;
7004 tree offset;
7005 int volatilep = 0;
7006 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7007 &mode1, &unsignedp, &volatilep);
7008 rtx orig_op0;
7009
7010 /* If we got back the original object, something is wrong. Perhaps
7011 we are evaluating an expression too early. In any event, don't
7012 infinitely recurse. */
7013 if (tem == exp)
7014 abort ();
7015
7016 /* If TEM's type is a union of variable size, pass TARGET to the inner
7017 computation, since it will need a temporary and TARGET is known
7018 to have to do. This occurs in unchecked conversion in Ada. */
7019
7020 orig_op0 = op0
7021 = expand_expr (tem,
7022 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7023 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7024 != INTEGER_CST)
7025 ? target : NULL_RTX),
7026 VOIDmode,
7027 (modifier == EXPAND_INITIALIZER
7028 || modifier == EXPAND_CONST_ADDRESS)
7029 ? modifier : EXPAND_NORMAL);
7030
7031 /* If this is a constant, put it into a register if it is a
7032 legitimate constant and OFFSET is 0 and memory if it isn't. */
7033 if (CONSTANT_P (op0))
7034 {
7035 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7036 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7037 && offset == 0)
7038 op0 = force_reg (mode, op0);
7039 else
7040 op0 = validize_mem (force_const_mem (mode, op0));
7041 }
7042
7043 if (offset != 0)
7044 {
7045 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7046
7047 /* If this object is in a register, put it into memory.
7048 This case can't occur in C, but can in Ada if we have
7049 unchecked conversion of an expression from a scalar type to
7050 an array or record type. */
7051 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7052 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7053 {
7054 /* If the operand is a SAVE_EXPR, we can deal with this by
7055 forcing the SAVE_EXPR into memory. */
7056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7057 {
7058 put_var_into_stack (TREE_OPERAND (exp, 0));
7059 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7060 }
7061 else
7062 {
7063 tree nt
7064 = build_qualified_type (TREE_TYPE (tem),
7065 (TYPE_QUALS (TREE_TYPE (tem))
7066 | TYPE_QUAL_CONST));
7067 rtx memloc = assign_temp (nt, 1, 1, 1);
7068
7069 emit_move_insn (memloc, op0);
7070 op0 = memloc;
7071 }
7072 }
7073
7074 if (GET_CODE (op0) != MEM)
7075 abort ();
7076
7077 if (GET_MODE (offset_rtx) != ptr_mode)
7078 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7079
7080 #ifdef POINTERS_EXTEND_UNSIGNED
7081 if (GET_MODE (offset_rtx) != Pmode)
7082 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7083 #endif
7084
7085 /* A constant address in OP0 can have VOIDmode, we must not try
7086 to call force_reg for that case. Avoid that case. */
7087 if (GET_CODE (op0) == MEM
7088 && GET_MODE (op0) == BLKmode
7089 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7090 && bitsize != 0
7091 && (bitpos % bitsize) == 0
7092 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7093 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7094 {
7095 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7096
7097 if (GET_CODE (XEXP (temp, 0)) == REG)
7098 op0 = temp;
7099 else
7100 op0 = (replace_equiv_address
7101 (op0,
7102 force_reg (GET_MODE (XEXP (temp, 0)),
7103 XEXP (temp, 0))));
7104 bitpos = 0;
7105 }
7106
7107 op0 = offset_address (op0, offset_rtx,
7108 highest_pow2_factor (offset));
7109 }
7110
7111 /* Don't forget about volatility even if this is a bitfield. */
7112 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7113 {
7114 if (op0 == orig_op0)
7115 op0 = copy_rtx (op0);
7116
7117 MEM_VOLATILE_P (op0) = 1;
7118 }
7119
7120 /* Check the access. */
7121 if (cfun != 0 && current_function_check_memory_usage
7122 && GET_CODE (op0) == MEM)
7123 {
7124 enum memory_use_mode memory_usage;
7125 memory_usage = get_memory_usage_from_modifier (modifier);
7126
7127 if (memory_usage != MEMORY_USE_DONT)
7128 {
7129 rtx to;
7130 int size;
7131
7132 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7133 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7134
7135 /* Check the access right of the pointer. */
7136 in_check_memory_usage = 1;
7137 if (size > BITS_PER_UNIT)
7138 emit_library_call (chkr_check_addr_libfunc,
7139 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7140 Pmode, GEN_INT (size / BITS_PER_UNIT),
7141 TYPE_MODE (sizetype),
7142 GEN_INT (memory_usage),
7143 TYPE_MODE (integer_type_node));
7144 in_check_memory_usage = 0;
7145 }
7146 }
7147
7148 /* In cases where an aligned union has an unaligned object
7149 as a field, we might be extracting a BLKmode value from
7150 an integer-mode (e.g., SImode) object. Handle this case
7151 by doing the extract into an object as wide as the field
7152 (which we know to be the width of a basic mode), then
7153 storing into memory, and changing the mode to BLKmode. */
7154 if (mode1 == VOIDmode
7155 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7156 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7157 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7158 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7159 && modifier != EXPAND_CONST_ADDRESS
7160 && modifier != EXPAND_INITIALIZER)
7161 /* If the field isn't aligned enough to fetch as a memref,
7162 fetch it as a bit field. */
7163 || (mode1 != BLKmode
7164 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7165 && ((TYPE_ALIGN (TREE_TYPE (tem))
7166 < GET_MODE_ALIGNMENT (mode))
7167 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7168 /* If the type and the field are a constant size and the
7169 size of the type isn't the same size as the bitfield,
7170 we must use bitfield operations. */
7171 || (bitsize >= 0
7172 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7173 == INTEGER_CST)
7174 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7175 bitsize)))
7176 {
7177 enum machine_mode ext_mode = mode;
7178
7179 if (ext_mode == BLKmode
7180 && ! (target != 0 && GET_CODE (op0) == MEM
7181 && GET_CODE (target) == MEM
7182 && bitpos % BITS_PER_UNIT == 0))
7183 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7184
7185 if (ext_mode == BLKmode)
7186 {
7187 /* In this case, BITPOS must start at a byte boundary and
7188 TARGET, if specified, must be a MEM. */
7189 if (GET_CODE (op0) != MEM
7190 || (target != 0 && GET_CODE (target) != MEM)
7191 || bitpos % BITS_PER_UNIT != 0)
7192 abort ();
7193
7194 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7195 if (target == 0)
7196 target = assign_temp (type, 0, 1, 1);
7197
7198 emit_block_move (target, op0,
7199 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7200 / BITS_PER_UNIT));
7201
7202 return target;
7203 }
7204
7205 op0 = validize_mem (op0);
7206
7207 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7208 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7209
7210 op0 = extract_bit_field (op0, bitsize, bitpos,
7211 unsignedp, target, ext_mode, ext_mode,
7212 int_size_in_bytes (TREE_TYPE (tem)));
7213
7214 /* If the result is a record type and BITSIZE is narrower than
7215 the mode of OP0, an integral mode, and this is a big endian
7216 machine, we must put the field into the high-order bits. */
7217 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7218 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7219 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7220 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7221 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 - bitsize),
7223 op0, 1);
7224
7225 if (mode == BLKmode)
7226 {
7227 rtx new = assign_temp (build_qualified_type
7228 (type_for_mode (ext_mode, 0),
7229 TYPE_QUAL_CONST), 0, 1, 1);
7230
7231 emit_move_insn (new, op0);
7232 op0 = copy_rtx (new);
7233 PUT_MODE (op0, BLKmode);
7234 set_mem_attributes (op0, exp, 1);
7235 }
7236
7237 return op0;
7238 }
7239
7240 /* If the result is BLKmode, use that to access the object
7241 now as well. */
7242 if (mode == BLKmode)
7243 mode1 = BLKmode;
7244
7245 /* Get a reference to just this component. */
7246 if (modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7248 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7249 else
7250 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7251
7252 if (op0 == orig_op0)
7253 op0 = copy_rtx (op0);
7254
7255 set_mem_attributes (op0, exp, 0);
7256 if (GET_CODE (XEXP (op0, 0)) == REG)
7257 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7258
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7263 return op0;
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7266
7267 convert_move (target, op0, unsignedp);
7268 return target;
7269 }
7270
7271 case VTABLE_REF:
7272 {
7273 rtx insn, before = get_last_insn (), vtbl_ref;
7274
7275 /* Evaluate the interior expression. */
7276 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7277 tmode, modifier);
7278
7279 /* Get or create an instruction off which to hang a note. */
7280 if (REG_P (subtarget))
7281 {
7282 target = subtarget;
7283 insn = get_last_insn ();
7284 if (insn == before)
7285 abort ();
7286 if (! INSN_P (insn))
7287 insn = prev_nonnote_insn (insn);
7288 }
7289 else
7290 {
7291 target = gen_reg_rtx (GET_MODE (subtarget));
7292 insn = emit_move_insn (target, subtarget);
7293 }
7294
7295 /* Collect the data for the note. */
7296 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7297 vtbl_ref = plus_constant (vtbl_ref,
7298 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7299 /* Discard the initial CONST that was added. */
7300 vtbl_ref = XEXP (vtbl_ref, 0);
7301
7302 REG_NOTES (insn)
7303 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7304
7305 return target;
7306 }
7307
7308 /* Intended for a reference to a buffer of a file-object in Pascal.
7309 But it's not certain that a special tree code will really be
7310 necessary for these. INDIRECT_REF might work for them. */
7311 case BUFFER_REF:
7312 abort ();
7313
7314 case IN_EXPR:
7315 {
7316 /* Pascal set IN expression.
7317
7318 Algorithm:
7319 rlo = set_low - (set_low%bits_per_word);
7320 the_word = set [ (index - rlo)/bits_per_word ];
7321 bit_index = index % bits_per_word;
7322 bitmask = 1 << bit_index;
7323 return !!(the_word & bitmask); */
7324
7325 tree set = TREE_OPERAND (exp, 0);
7326 tree index = TREE_OPERAND (exp, 1);
7327 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7328 tree set_type = TREE_TYPE (set);
7329 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7330 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7331 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7332 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7333 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7334 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7335 rtx setaddr = XEXP (setval, 0);
7336 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7337 rtx rlow;
7338 rtx diff, quo, rem, addr, bit, result;
7339
7340 /* If domain is empty, answer is no. Likewise if index is constant
7341 and out of bounds. */
7342 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7343 && TREE_CODE (set_low_bound) == INTEGER_CST
7344 && tree_int_cst_lt (set_high_bound, set_low_bound))
7345 || (TREE_CODE (index) == INTEGER_CST
7346 && TREE_CODE (set_low_bound) == INTEGER_CST
7347 && tree_int_cst_lt (index, set_low_bound))
7348 || (TREE_CODE (set_high_bound) == INTEGER_CST
7349 && TREE_CODE (index) == INTEGER_CST
7350 && tree_int_cst_lt (set_high_bound, index))))
7351 return const0_rtx;
7352
7353 if (target == 0)
7354 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7355
7356 /* If we get here, we have to generate the code for both cases
7357 (in range and out of range). */
7358
7359 op0 = gen_label_rtx ();
7360 op1 = gen_label_rtx ();
7361
7362 if (! (GET_CODE (index_val) == CONST_INT
7363 && GET_CODE (lo_r) == CONST_INT))
7364 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7365 GET_MODE (index_val), iunsignedp, op1);
7366
7367 if (! (GET_CODE (index_val) == CONST_INT
7368 && GET_CODE (hi_r) == CONST_INT))
7369 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7370 GET_MODE (index_val), iunsignedp, op1);
7371
7372 /* Calculate the element number of bit zero in the first word
7373 of the set. */
7374 if (GET_CODE (lo_r) == CONST_INT)
7375 rlow = GEN_INT (INTVAL (lo_r)
7376 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7377 else
7378 rlow = expand_binop (index_mode, and_optab, lo_r,
7379 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7380 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7381
7382 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7383 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7384
7385 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7386 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7387 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7388 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7389
7390 addr = memory_address (byte_mode,
7391 expand_binop (index_mode, add_optab, diff,
7392 setaddr, NULL_RTX, iunsignedp,
7393 OPTAB_LIB_WIDEN));
7394
7395 /* Extract the bit we want to examine. */
7396 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7397 gen_rtx_MEM (byte_mode, addr),
7398 make_tree (TREE_TYPE (index), rem),
7399 NULL_RTX, 1);
7400 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7401 GET_MODE (target) == byte_mode ? target : 0,
7402 1, OPTAB_LIB_WIDEN);
7403
7404 if (result != target)
7405 convert_move (target, result, 1);
7406
7407 /* Output the code to handle the out-of-range case. */
7408 emit_jump (op0);
7409 emit_label (op1);
7410 emit_move_insn (target, const0_rtx);
7411 emit_label (op0);
7412 return target;
7413 }
7414
7415 case WITH_CLEANUP_EXPR:
7416 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7417 {
7418 WITH_CLEANUP_EXPR_RTL (exp)
7419 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7420 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7421
7422 /* That's it for this cleanup. */
7423 TREE_OPERAND (exp, 1) = 0;
7424 }
7425 return WITH_CLEANUP_EXPR_RTL (exp);
7426
7427 case CLEANUP_POINT_EXPR:
7428 {
7429 /* Start a new binding layer that will keep track of all cleanup
7430 actions to be performed. */
7431 expand_start_bindings (2);
7432
7433 target_temp_slot_level = temp_slot_level;
7434
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7436 /* If we're going to use this value, load it up now. */
7437 if (! ignore)
7438 op0 = force_not_mem (op0);
7439 preserve_temp_slots (op0);
7440 expand_end_bindings (NULL_TREE, 0, 0);
7441 }
7442 return op0;
7443
7444 case CALL_EXPR:
7445 /* Check for a built-in function. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7447 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7448 == FUNCTION_DECL)
7449 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7450 {
7451 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7452 == BUILT_IN_FRONTEND)
7453 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7454 else
7455 return expand_builtin (exp, target, subtarget, tmode, ignore);
7456 }
7457
7458 return expand_call (exp, target, ignore);
7459
7460 case NON_LVALUE_EXPR:
7461 case NOP_EXPR:
7462 case CONVERT_EXPR:
7463 case REFERENCE_EXPR:
7464 if (TREE_OPERAND (exp, 0) == error_mark_node)
7465 return const0_rtx;
7466
7467 if (TREE_CODE (type) == UNION_TYPE)
7468 {
7469 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7470
7471 /* If both input and output are BLKmode, this conversion isn't doing
7472 anything except possibly changing memory attribute. */
7473 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7474 {
7475 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7476 modifier);
7477
7478 result = copy_rtx (result);
7479 set_mem_attributes (result, exp, 0);
7480 return result;
7481 }
7482
7483 if (target == 0)
7484 target = assign_temp (type, 0, 1, 1);
7485
7486 if (GET_CODE (target) == MEM)
7487 /* Store data into beginning of memory target. */
7488 store_expr (TREE_OPERAND (exp, 0),
7489 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7490
7491 else if (GET_CODE (target) == REG)
7492 /* Store this field into a union of the proper type. */
7493 store_field (target,
7494 MIN ((int_size_in_bytes (TREE_TYPE
7495 (TREE_OPERAND (exp, 0)))
7496 * BITS_PER_UNIT),
7497 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7498 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7499 VOIDmode, 0, type, 0);
7500 else
7501 abort ();
7502
7503 /* Return the entire union. */
7504 return target;
7505 }
7506
7507 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7508 {
7509 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7510 ro_modifier);
7511
7512 /* If the signedness of the conversion differs and OP0 is
7513 a promoted SUBREG, clear that indication since we now
7514 have to do the proper extension. */
7515 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7516 && GET_CODE (op0) == SUBREG)
7517 SUBREG_PROMOTED_VAR_P (op0) = 0;
7518
7519 return op0;
7520 }
7521
7522 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7523 if (GET_MODE (op0) == mode)
7524 return op0;
7525
7526 /* If OP0 is a constant, just convert it into the proper mode. */
7527 if (CONSTANT_P (op0))
7528 return
7529 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7530 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7531
7532 if (modifier == EXPAND_INITIALIZER)
7533 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7534
7535 if (target == 0)
7536 return
7537 convert_to_mode (mode, op0,
7538 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7539 else
7540 convert_move (target, op0,
7541 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7542 return target;
7543
7544 case VIEW_CONVERT_EXPR:
7545 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, ro_modifier);
7546
7547 /* If the input and output modes are both the same, we are done.
7548 Otherwise, if neither mode is BLKmode and both are within a word, we
7549 can use gen_lowpart. If neither is true, make sure the operand is
7550 in memory and convert the MEM to the new mode. */
7551 if (TYPE_MODE (type) == GET_MODE (op0))
7552 ;
7553 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7554 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7555 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7556 op0 = gen_lowpart (TYPE_MODE (type), op0);
7557 else if (GET_CODE (op0) != MEM)
7558 {
7559 /* If the operand is not a MEM, force it into memory. Since we
7560 are going to be be changing the mode of the MEM, don't call
7561 force_const_mem for constants because we don't allow pool
7562 constants to change mode. */
7563 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7564
7565 if (TREE_ADDRESSABLE (exp))
7566 abort ();
7567
7568 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7569 target
7570 = assign_stack_temp_for_type
7571 (TYPE_MODE (inner_type),
7572 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7573
7574 emit_move_insn (target, op0);
7575 op0 = target;
7576 }
7577
7578 /* At this point, OP0 is in the correct mode. If the output type is such
7579 that the operand is known to be aligned, indicate that it is.
7580 Otherwise, we need only be concerned about alignment for non-BLKmode
7581 results. */
7582 if (GET_CODE (op0) == MEM)
7583 {
7584 op0 = copy_rtx (op0);
7585
7586 if (TYPE_ALIGN_OK (type))
7587 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7588 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7589 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7590 {
7591 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7592 HOST_WIDE_INT temp_size = MAX (int_size_in_bytes (inner_type),
7593 GET_MODE_SIZE (TYPE_MODE (type)));
7594 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7595 temp_size, 0, type);
7596 rtx new_with_op0_mode = copy_rtx (new);
7597
7598 if (TREE_ADDRESSABLE (exp))
7599 abort ();
7600
7601 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7602 if (GET_MODE (op0) == BLKmode)
7603 emit_block_move (new_with_op0_mode, op0,
7604 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7605 else
7606 emit_move_insn (new_with_op0_mode, op0);
7607
7608 op0 = new;
7609 }
7610
7611 PUT_MODE (op0, TYPE_MODE (type));
7612 }
7613
7614 return op0;
7615
7616 case PLUS_EXPR:
7617 /* We come here from MINUS_EXPR when the second operand is a
7618 constant. */
7619 plus_expr:
7620 this_optab = ! unsignedp && flag_trapv
7621 && (GET_MODE_CLASS(mode) == MODE_INT)
7622 ? addv_optab : add_optab;
7623
7624 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7625 something else, make sure we add the register to the constant and
7626 then to the other thing. This case can occur during strength
7627 reduction and doing it this way will produce better code if the
7628 frame pointer or argument pointer is eliminated.
7629
7630 fold-const.c will ensure that the constant is always in the inner
7631 PLUS_EXPR, so the only case we need to do anything about is if
7632 sp, ap, or fp is our second argument, in which case we must swap
7633 the innermost first argument and our second argument. */
7634
7635 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7636 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7637 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7638 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7639 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7640 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7641 {
7642 tree t = TREE_OPERAND (exp, 1);
7643
7644 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7645 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7646 }
7647
7648 /* If the result is to be ptr_mode and we are adding an integer to
7649 something, we might be forming a constant. So try to use
7650 plus_constant. If it produces a sum and we can't accept it,
7651 use force_operand. This allows P = &ARR[const] to generate
7652 efficient code on machines where a SYMBOL_REF is not a valid
7653 address.
7654
7655 If this is an EXPAND_SUM call, always return the sum. */
7656 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7657 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7658 {
7659 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7660 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7661 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7662 {
7663 rtx constant_part;
7664
7665 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7666 EXPAND_SUM);
7667 /* Use immed_double_const to ensure that the constant is
7668 truncated according to the mode of OP1, then sign extended
7669 to a HOST_WIDE_INT. Using the constant directly can result
7670 in non-canonical RTL in a 64x32 cross compile. */
7671 constant_part
7672 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7673 (HOST_WIDE_INT) 0,
7674 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7675 op1 = plus_constant (op1, INTVAL (constant_part));
7676 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7677 op1 = force_operand (op1, target);
7678 return op1;
7679 }
7680
7681 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7682 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7683 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7684 {
7685 rtx constant_part;
7686
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7688 EXPAND_SUM);
7689 if (! CONSTANT_P (op0))
7690 {
7691 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7692 VOIDmode, modifier);
7693 /* Don't go to both_summands if modifier
7694 says it's not right to return a PLUS. */
7695 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7696 goto binop2;
7697 goto both_summands;
7698 }
7699 /* Use immed_double_const to ensure that the constant is
7700 truncated according to the mode of OP1, then sign extended
7701 to a HOST_WIDE_INT. Using the constant directly can result
7702 in non-canonical RTL in a 64x32 cross compile. */
7703 constant_part
7704 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7705 (HOST_WIDE_INT) 0,
7706 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7707 op0 = plus_constant (op0, INTVAL (constant_part));
7708 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7709 op0 = force_operand (op0, target);
7710 return op0;
7711 }
7712 }
7713
7714 /* No sense saving up arithmetic to be done
7715 if it's all in the wrong mode to form part of an address.
7716 And force_operand won't know whether to sign-extend or
7717 zero-extend. */
7718 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7719 || mode != ptr_mode)
7720 goto binop;
7721
7722 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7723 subtarget = 0;
7724
7725 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7726 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7727
7728 both_summands:
7729 /* Make sure any term that's a sum with a constant comes last. */
7730 if (GET_CODE (op0) == PLUS
7731 && CONSTANT_P (XEXP (op0, 1)))
7732 {
7733 temp = op0;
7734 op0 = op1;
7735 op1 = temp;
7736 }
7737 /* If adding to a sum including a constant,
7738 associate it to put the constant outside. */
7739 if (GET_CODE (op1) == PLUS
7740 && CONSTANT_P (XEXP (op1, 1)))
7741 {
7742 rtx constant_term = const0_rtx;
7743
7744 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7745 if (temp != 0)
7746 op0 = temp;
7747 /* Ensure that MULT comes first if there is one. */
7748 else if (GET_CODE (op0) == MULT)
7749 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7750 else
7751 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7752
7753 /* Let's also eliminate constants from op0 if possible. */
7754 op0 = eliminate_constant_term (op0, &constant_term);
7755
7756 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7757 their sum should be a constant. Form it into OP1, since the
7758 result we want will then be OP0 + OP1. */
7759
7760 temp = simplify_binary_operation (PLUS, mode, constant_term,
7761 XEXP (op1, 1));
7762 if (temp != 0)
7763 op1 = temp;
7764 else
7765 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7766 }
7767
7768 /* Put a constant term last and put a multiplication first. */
7769 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7770 temp = op1, op1 = op0, op0 = temp;
7771
7772 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7773 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7774
7775 case MINUS_EXPR:
7776 /* For initializers, we are allowed to return a MINUS of two
7777 symbolic constants. Here we handle all cases when both operands
7778 are constant. */
7779 /* Handle difference of two symbolic constants,
7780 for the sake of an initializer. */
7781 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7782 && really_constant_p (TREE_OPERAND (exp, 0))
7783 && really_constant_p (TREE_OPERAND (exp, 1)))
7784 {
7785 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7786 VOIDmode, ro_modifier);
7787 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7788 VOIDmode, ro_modifier);
7789
7790 /* If the last operand is a CONST_INT, use plus_constant of
7791 the negated constant. Else make the MINUS. */
7792 if (GET_CODE (op1) == CONST_INT)
7793 return plus_constant (op0, - INTVAL (op1));
7794 else
7795 return gen_rtx_MINUS (mode, op0, op1);
7796 }
7797 /* Convert A - const to A + (-const). */
7798 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7799 {
7800 tree negated = fold (build1 (NEGATE_EXPR, type,
7801 TREE_OPERAND (exp, 1)));
7802
7803 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7804 /* If we can't negate the constant in TYPE, leave it alone and
7805 expand_binop will negate it for us. We used to try to do it
7806 here in the signed version of TYPE, but that doesn't work
7807 on POINTER_TYPEs. */;
7808 else
7809 {
7810 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7811 goto plus_expr;
7812 }
7813 }
7814 this_optab = ! unsignedp && flag_trapv
7815 && (GET_MODE_CLASS(mode) == MODE_INT)
7816 ? subv_optab : sub_optab;
7817 goto binop;
7818
7819 case MULT_EXPR:
7820 /* If first operand is constant, swap them.
7821 Thus the following special case checks need only
7822 check the second operand. */
7823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7824 {
7825 tree t1 = TREE_OPERAND (exp, 0);
7826 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7827 TREE_OPERAND (exp, 1) = t1;
7828 }
7829
7830 /* Attempt to return something suitable for generating an
7831 indexed address, for machines that support that. */
7832
7833 if (modifier == EXPAND_SUM && mode == ptr_mode
7834 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7835 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7836 {
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7838 EXPAND_SUM);
7839
7840 /* Apply distributive law if OP0 is x+c. */
7841 if (GET_CODE (op0) == PLUS
7842 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7843 return
7844 gen_rtx_PLUS
7845 (mode,
7846 gen_rtx_MULT
7847 (mode, XEXP (op0, 0),
7848 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7849 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7850 * INTVAL (XEXP (op0, 1))));
7851
7852 if (GET_CODE (op0) != REG)
7853 op0 = force_operand (op0, NULL_RTX);
7854 if (GET_CODE (op0) != REG)
7855 op0 = copy_to_mode_reg (mode, op0);
7856
7857 return
7858 gen_rtx_MULT (mode, op0,
7859 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7860 }
7861
7862 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7863 subtarget = 0;
7864
7865 /* Check for multiplying things that have been extended
7866 from a narrower type. If this machine supports multiplying
7867 in that narrower type with a result in the desired type,
7868 do it that way, and avoid the explicit type-conversion. */
7869 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7870 && TREE_CODE (type) == INTEGER_TYPE
7871 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7872 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7873 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7874 && int_fits_type_p (TREE_OPERAND (exp, 1),
7875 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7876 /* Don't use a widening multiply if a shift will do. */
7877 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7878 > HOST_BITS_PER_WIDE_INT)
7879 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7880 ||
7881 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7883 ==
7884 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7885 /* If both operands are extended, they must either both
7886 be zero-extended or both be sign-extended. */
7887 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7888 ==
7889 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7890 {
7891 enum machine_mode innermode
7892 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7893 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7894 ? smul_widen_optab : umul_widen_optab);
7895 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7896 ? umul_widen_optab : smul_widen_optab);
7897 if (mode == GET_MODE_WIDER_MODE (innermode))
7898 {
7899 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7900 {
7901 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7902 NULL_RTX, VOIDmode, 0);
7903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7905 VOIDmode, 0);
7906 else
7907 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7908 NULL_RTX, VOIDmode, 0);
7909 goto binop2;
7910 }
7911 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7912 && innermode == word_mode)
7913 {
7914 rtx htem;
7915 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7916 NULL_RTX, VOIDmode, 0);
7917 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7918 op1 = convert_modes (innermode, mode,
7919 expand_expr (TREE_OPERAND (exp, 1),
7920 NULL_RTX, VOIDmode, 0),
7921 unsignedp);
7922 else
7923 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7924 NULL_RTX, VOIDmode, 0);
7925 temp = expand_binop (mode, other_optab, op0, op1, target,
7926 unsignedp, OPTAB_LIB_WIDEN);
7927 htem = expand_mult_highpart_adjust (innermode,
7928 gen_highpart (innermode, temp),
7929 op0, op1,
7930 gen_highpart (innermode, temp),
7931 unsignedp);
7932 emit_move_insn (gen_highpart (innermode, temp), htem);
7933 return temp;
7934 }
7935 }
7936 }
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7938 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7939 return expand_mult (mode, op0, op1, target, unsignedp);
7940
7941 case TRUNC_DIV_EXPR:
7942 case FLOOR_DIV_EXPR:
7943 case CEIL_DIV_EXPR:
7944 case ROUND_DIV_EXPR:
7945 case EXACT_DIV_EXPR:
7946 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7947 subtarget = 0;
7948 /* Possible optimization: compute the dividend with EXPAND_SUM
7949 then if the divisor is constant can optimize the case
7950 where some terms of the dividend have coeffs divisible by it. */
7951 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7952 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7953 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7954
7955 case RDIV_EXPR:
7956 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7957 expensive divide. If not, combine will rebuild the original
7958 computation. */
7959 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7960 && !real_onep (TREE_OPERAND (exp, 0)))
7961 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7962 build (RDIV_EXPR, type,
7963 build_real (type, dconst1),
7964 TREE_OPERAND (exp, 1))),
7965 target, tmode, unsignedp);
7966 this_optab = sdiv_optab;
7967 goto binop;
7968
7969 case TRUNC_MOD_EXPR:
7970 case FLOOR_MOD_EXPR:
7971 case CEIL_MOD_EXPR:
7972 case ROUND_MOD_EXPR:
7973 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7974 subtarget = 0;
7975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7976 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7977 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7978
7979 case FIX_ROUND_EXPR:
7980 case FIX_FLOOR_EXPR:
7981 case FIX_CEIL_EXPR:
7982 abort (); /* Not used for C. */
7983
7984 case FIX_TRUNC_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7986 if (target == 0)
7987 target = gen_reg_rtx (mode);
7988 expand_fix (target, op0, unsignedp);
7989 return target;
7990
7991 case FLOAT_EXPR:
7992 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7993 if (target == 0)
7994 target = gen_reg_rtx (mode);
7995 /* expand_float can't figure out what to do if FROM has VOIDmode.
7996 So give it the correct mode. With -O, cse will optimize this. */
7997 if (GET_MODE (op0) == VOIDmode)
7998 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7999 op0);
8000 expand_float (target, op0,
8001 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8002 return target;
8003
8004 case NEGATE_EXPR:
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8006 temp = expand_unop (mode,
8007 ! unsignedp && flag_trapv
8008 && (GET_MODE_CLASS(mode) == MODE_INT)
8009 ? negv_optab : neg_optab, op0, target, 0);
8010 if (temp == 0)
8011 abort ();
8012 return temp;
8013
8014 case ABS_EXPR:
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8016
8017 /* Handle complex values specially. */
8018 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8019 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8020 return expand_complex_abs (mode, op0, target, unsignedp);
8021
8022 /* Unsigned abs is simply the operand. Testing here means we don't
8023 risk generating incorrect code below. */
8024 if (TREE_UNSIGNED (type))
8025 return op0;
8026
8027 return expand_abs (mode, op0, target, unsignedp,
8028 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8029
8030 case MAX_EXPR:
8031 case MIN_EXPR:
8032 target = original_target;
8033 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8034 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8035 || GET_MODE (target) != mode
8036 || (GET_CODE (target) == REG
8037 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8038 target = gen_reg_rtx (mode);
8039 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8040 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8041
8042 /* First try to do it with a special MIN or MAX instruction.
8043 If that does not win, use a conditional jump to select the proper
8044 value. */
8045 this_optab = (TREE_UNSIGNED (type)
8046 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8047 : (code == MIN_EXPR ? smin_optab : smax_optab));
8048
8049 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8050 OPTAB_WIDEN);
8051 if (temp != 0)
8052 return temp;
8053
8054 /* At this point, a MEM target is no longer useful; we will get better
8055 code without it. */
8056
8057 if (GET_CODE (target) == MEM)
8058 target = gen_reg_rtx (mode);
8059
8060 if (target != op0)
8061 emit_move_insn (target, op0);
8062
8063 op0 = gen_label_rtx ();
8064
8065 /* If this mode is an integer too wide to compare properly,
8066 compare word by word. Rely on cse to optimize constant cases. */
8067 if (GET_MODE_CLASS (mode) == MODE_INT
8068 && ! can_compare_p (GE, mode, ccp_jump))
8069 {
8070 if (code == MAX_EXPR)
8071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8072 target, op1, NULL_RTX, op0);
8073 else
8074 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8075 op1, target, NULL_RTX, op0);
8076 }
8077 else
8078 {
8079 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8080 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8081 unsignedp, mode, NULL_RTX, NULL_RTX,
8082 op0);
8083 }
8084 emit_move_insn (target, op1);
8085 emit_label (op0);
8086 return target;
8087
8088 case BIT_NOT_EXPR:
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8090 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8091 if (temp == 0)
8092 abort ();
8093 return temp;
8094
8095 case FFS_EXPR:
8096 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8097 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8098 if (temp == 0)
8099 abort ();
8100 return temp;
8101
8102 /* ??? Can optimize bitwise operations with one arg constant.
8103 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8104 and (a bitwise1 b) bitwise2 b (etc)
8105 but that is probably not worth while. */
8106
8107 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8108 boolean values when we want in all cases to compute both of them. In
8109 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8110 as actual zero-or-1 values and then bitwise anding. In cases where
8111 there cannot be any side effects, better code would be made by
8112 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8113 how to recognize those cases. */
8114
8115 case TRUTH_AND_EXPR:
8116 case BIT_AND_EXPR:
8117 this_optab = and_optab;
8118 goto binop;
8119
8120 case TRUTH_OR_EXPR:
8121 case BIT_IOR_EXPR:
8122 this_optab = ior_optab;
8123 goto binop;
8124
8125 case TRUTH_XOR_EXPR:
8126 case BIT_XOR_EXPR:
8127 this_optab = xor_optab;
8128 goto binop;
8129
8130 case LSHIFT_EXPR:
8131 case RSHIFT_EXPR:
8132 case LROTATE_EXPR:
8133 case RROTATE_EXPR:
8134 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8135 subtarget = 0;
8136 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8137 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8138 unsignedp);
8139
8140 /* Could determine the answer when only additive constants differ. Also,
8141 the addition of one can be handled by changing the condition. */
8142 case LT_EXPR:
8143 case LE_EXPR:
8144 case GT_EXPR:
8145 case GE_EXPR:
8146 case EQ_EXPR:
8147 case NE_EXPR:
8148 case UNORDERED_EXPR:
8149 case ORDERED_EXPR:
8150 case UNLT_EXPR:
8151 case UNLE_EXPR:
8152 case UNGT_EXPR:
8153 case UNGE_EXPR:
8154 case UNEQ_EXPR:
8155 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8156 if (temp != 0)
8157 return temp;
8158
8159 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8160 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8161 && original_target
8162 && GET_CODE (original_target) == REG
8163 && (GET_MODE (original_target)
8164 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8165 {
8166 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8167 VOIDmode, 0);
8168
8169 if (temp != original_target)
8170 temp = copy_to_reg (temp);
8171
8172 op1 = gen_label_rtx ();
8173 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8174 GET_MODE (temp), unsignedp, op1);
8175 emit_move_insn (temp, const1_rtx);
8176 emit_label (op1);
8177 return temp;
8178 }
8179
8180 /* If no set-flag instruction, must generate a conditional
8181 store into a temporary variable. Drop through
8182 and handle this like && and ||. */
8183
8184 case TRUTH_ANDIF_EXPR:
8185 case TRUTH_ORIF_EXPR:
8186 if (! ignore
8187 && (target == 0 || ! safe_from_p (target, exp, 1)
8188 /* Make sure we don't have a hard reg (such as function's return
8189 value) live across basic blocks, if not optimizing. */
8190 || (!optimize && GET_CODE (target) == REG
8191 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8192 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8193
8194 if (target)
8195 emit_clr_insn (target);
8196
8197 op1 = gen_label_rtx ();
8198 jumpifnot (exp, op1);
8199
8200 if (target)
8201 emit_0_to_1_insn (target);
8202
8203 emit_label (op1);
8204 return ignore ? const0_rtx : target;
8205
8206 case TRUTH_NOT_EXPR:
8207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8208 /* The parser is careful to generate TRUTH_NOT_EXPR
8209 only with operands that are always zero or one. */
8210 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8211 target, 1, OPTAB_LIB_WIDEN);
8212 if (temp == 0)
8213 abort ();
8214 return temp;
8215
8216 case COMPOUND_EXPR:
8217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8218 emit_queue ();
8219 return expand_expr (TREE_OPERAND (exp, 1),
8220 (ignore ? const0_rtx : target),
8221 VOIDmode, 0);
8222
8223 case COND_EXPR:
8224 /* If we would have a "singleton" (see below) were it not for a
8225 conversion in each arm, bring that conversion back out. */
8226 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8227 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8228 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8229 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8230 {
8231 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8232 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8233
8234 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8235 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8236 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8237 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8238 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8239 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8240 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8241 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8242 return expand_expr (build1 (NOP_EXPR, type,
8243 build (COND_EXPR, TREE_TYPE (iftrue),
8244 TREE_OPERAND (exp, 0),
8245 iftrue, iffalse)),
8246 target, tmode, modifier);
8247 }
8248
8249 {
8250 /* Note that COND_EXPRs whose type is a structure or union
8251 are required to be constructed to contain assignments of
8252 a temporary variable, so that we can evaluate them here
8253 for side effect only. If type is void, we must do likewise. */
8254
8255 /* If an arm of the branch requires a cleanup,
8256 only that cleanup is performed. */
8257
8258 tree singleton = 0;
8259 tree binary_op = 0, unary_op = 0;
8260
8261 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8262 convert it to our mode, if necessary. */
8263 if (integer_onep (TREE_OPERAND (exp, 1))
8264 && integer_zerop (TREE_OPERAND (exp, 2))
8265 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8266 {
8267 if (ignore)
8268 {
8269 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8270 ro_modifier);
8271 return const0_rtx;
8272 }
8273
8274 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8275 if (GET_MODE (op0) == mode)
8276 return op0;
8277
8278 if (target == 0)
8279 target = gen_reg_rtx (mode);
8280 convert_move (target, op0, unsignedp);
8281 return target;
8282 }
8283
8284 /* Check for X ? A + B : A. If we have this, we can copy A to the
8285 output and conditionally add B. Similarly for unary operations.
8286 Don't do this if X has side-effects because those side effects
8287 might affect A or B and the "?" operation is a sequence point in
8288 ANSI. (operand_equal_p tests for side effects.) */
8289
8290 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8291 && operand_equal_p (TREE_OPERAND (exp, 2),
8292 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8293 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8294 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8295 && operand_equal_p (TREE_OPERAND (exp, 1),
8296 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8297 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8298 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8299 && operand_equal_p (TREE_OPERAND (exp, 2),
8300 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8301 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8302 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8303 && operand_equal_p (TREE_OPERAND (exp, 1),
8304 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8305 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8306
8307 /* If we are not to produce a result, we have no target. Otherwise,
8308 if a target was specified use it; it will not be used as an
8309 intermediate target unless it is safe. If no target, use a
8310 temporary. */
8311
8312 if (ignore)
8313 temp = 0;
8314 else if (original_target
8315 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8316 || (singleton && GET_CODE (original_target) == REG
8317 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8318 && original_target == var_rtx (singleton)))
8319 && GET_MODE (original_target) == mode
8320 #ifdef HAVE_conditional_move
8321 && (! can_conditionally_move_p (mode)
8322 || GET_CODE (original_target) == REG
8323 || TREE_ADDRESSABLE (type))
8324 #endif
8325 && (GET_CODE (original_target) != MEM
8326 || TREE_ADDRESSABLE (type)))
8327 temp = original_target;
8328 else if (TREE_ADDRESSABLE (type))
8329 abort ();
8330 else
8331 temp = assign_temp (type, 0, 0, 1);
8332
8333 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8334 do the test of X as a store-flag operation, do this as
8335 A + ((X != 0) << log C). Similarly for other simple binary
8336 operators. Only do for C == 1 if BRANCH_COST is low. */
8337 if (temp && singleton && binary_op
8338 && (TREE_CODE (binary_op) == PLUS_EXPR
8339 || TREE_CODE (binary_op) == MINUS_EXPR
8340 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8341 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8342 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8343 : integer_onep (TREE_OPERAND (binary_op, 1)))
8344 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8345 {
8346 rtx result;
8347 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8348 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8349 ? addv_optab : add_optab)
8350 : TREE_CODE (binary_op) == MINUS_EXPR
8351 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8352 ? subv_optab : sub_optab)
8353 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8354 : xor_optab);
8355
8356 /* If we had X ? A : A + 1, do this as A + (X == 0).
8357
8358 We have to invert the truth value here and then put it
8359 back later if do_store_flag fails. We cannot simply copy
8360 TREE_OPERAND (exp, 0) to another variable and modify that
8361 because invert_truthvalue can modify the tree pointed to
8362 by its argument. */
8363 if (singleton == TREE_OPERAND (exp, 1))
8364 TREE_OPERAND (exp, 0)
8365 = invert_truthvalue (TREE_OPERAND (exp, 0));
8366
8367 result = do_store_flag (TREE_OPERAND (exp, 0),
8368 (safe_from_p (temp, singleton, 1)
8369 ? temp : NULL_RTX),
8370 mode, BRANCH_COST <= 1);
8371
8372 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8373 result = expand_shift (LSHIFT_EXPR, mode, result,
8374 build_int_2 (tree_log2
8375 (TREE_OPERAND
8376 (binary_op, 1)),
8377 0),
8378 (safe_from_p (temp, singleton, 1)
8379 ? temp : NULL_RTX), 0);
8380
8381 if (result)
8382 {
8383 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8384 return expand_binop (mode, boptab, op1, result, temp,
8385 unsignedp, OPTAB_LIB_WIDEN);
8386 }
8387 else if (singleton == TREE_OPERAND (exp, 1))
8388 TREE_OPERAND (exp, 0)
8389 = invert_truthvalue (TREE_OPERAND (exp, 0));
8390 }
8391
8392 do_pending_stack_adjust ();
8393 NO_DEFER_POP;
8394 op0 = gen_label_rtx ();
8395
8396 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8397 {
8398 if (temp != 0)
8399 {
8400 /* If the target conflicts with the other operand of the
8401 binary op, we can't use it. Also, we can't use the target
8402 if it is a hard register, because evaluating the condition
8403 might clobber it. */
8404 if ((binary_op
8405 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8406 || (GET_CODE (temp) == REG
8407 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8408 temp = gen_reg_rtx (mode);
8409 store_expr (singleton, temp, 0);
8410 }
8411 else
8412 expand_expr (singleton,
8413 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8414 if (singleton == TREE_OPERAND (exp, 1))
8415 jumpif (TREE_OPERAND (exp, 0), op0);
8416 else
8417 jumpifnot (TREE_OPERAND (exp, 0), op0);
8418
8419 start_cleanup_deferral ();
8420 if (binary_op && temp == 0)
8421 /* Just touch the other operand. */
8422 expand_expr (TREE_OPERAND (binary_op, 1),
8423 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8424 else if (binary_op)
8425 store_expr (build (TREE_CODE (binary_op), type,
8426 make_tree (type, temp),
8427 TREE_OPERAND (binary_op, 1)),
8428 temp, 0);
8429 else
8430 store_expr (build1 (TREE_CODE (unary_op), type,
8431 make_tree (type, temp)),
8432 temp, 0);
8433 op1 = op0;
8434 }
8435 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8436 comparison operator. If we have one of these cases, set the
8437 output to A, branch on A (cse will merge these two references),
8438 then set the output to FOO. */
8439 else if (temp
8440 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8441 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8442 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8443 TREE_OPERAND (exp, 1), 0)
8444 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8445 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8446 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8447 {
8448 if (GET_CODE (temp) == REG
8449 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8450 temp = gen_reg_rtx (mode);
8451 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8452 jumpif (TREE_OPERAND (exp, 0), op0);
8453
8454 start_cleanup_deferral ();
8455 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8456 op1 = op0;
8457 }
8458 else if (temp
8459 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8460 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8461 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8462 TREE_OPERAND (exp, 2), 0)
8463 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8464 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8465 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8466 {
8467 if (GET_CODE (temp) == REG
8468 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8469 temp = gen_reg_rtx (mode);
8470 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8471 jumpifnot (TREE_OPERAND (exp, 0), op0);
8472
8473 start_cleanup_deferral ();
8474 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8475 op1 = op0;
8476 }
8477 else
8478 {
8479 op1 = gen_label_rtx ();
8480 jumpifnot (TREE_OPERAND (exp, 0), op0);
8481
8482 start_cleanup_deferral ();
8483
8484 /* One branch of the cond can be void, if it never returns. For
8485 example A ? throw : E */
8486 if (temp != 0
8487 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8488 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8489 else
8490 expand_expr (TREE_OPERAND (exp, 1),
8491 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8492 end_cleanup_deferral ();
8493 emit_queue ();
8494 emit_jump_insn (gen_jump (op1));
8495 emit_barrier ();
8496 emit_label (op0);
8497 start_cleanup_deferral ();
8498 if (temp != 0
8499 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8500 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8501 else
8502 expand_expr (TREE_OPERAND (exp, 2),
8503 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8504 }
8505
8506 end_cleanup_deferral ();
8507
8508 emit_queue ();
8509 emit_label (op1);
8510 OK_DEFER_POP;
8511
8512 return temp;
8513 }
8514
8515 case TARGET_EXPR:
8516 {
8517 /* Something needs to be initialized, but we didn't know
8518 where that thing was when building the tree. For example,
8519 it could be the return value of a function, or a parameter
8520 to a function which lays down in the stack, or a temporary
8521 variable which must be passed by reference.
8522
8523 We guarantee that the expression will either be constructed
8524 or copied into our original target. */
8525
8526 tree slot = TREE_OPERAND (exp, 0);
8527 tree cleanups = NULL_TREE;
8528 tree exp1;
8529
8530 if (TREE_CODE (slot) != VAR_DECL)
8531 abort ();
8532
8533 if (! ignore)
8534 target = original_target;
8535
8536 /* Set this here so that if we get a target that refers to a
8537 register variable that's already been used, put_reg_into_stack
8538 knows that it should fix up those uses. */
8539 TREE_USED (slot) = 1;
8540
8541 if (target == 0)
8542 {
8543 if (DECL_RTL_SET_P (slot))
8544 {
8545 target = DECL_RTL (slot);
8546 /* If we have already expanded the slot, so don't do
8547 it again. (mrs) */
8548 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8549 return target;
8550 }
8551 else
8552 {
8553 target = assign_temp (type, 2, 0, 1);
8554 /* All temp slots at this level must not conflict. */
8555 preserve_temp_slots (target);
8556 SET_DECL_RTL (slot, target);
8557 if (TREE_ADDRESSABLE (slot))
8558 put_var_into_stack (slot);
8559
8560 /* Since SLOT is not known to the called function
8561 to belong to its stack frame, we must build an explicit
8562 cleanup. This case occurs when we must build up a reference
8563 to pass the reference as an argument. In this case,
8564 it is very likely that such a reference need not be
8565 built here. */
8566
8567 if (TREE_OPERAND (exp, 2) == 0)
8568 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8569 cleanups = TREE_OPERAND (exp, 2);
8570 }
8571 }
8572 else
8573 {
8574 /* This case does occur, when expanding a parameter which
8575 needs to be constructed on the stack. The target
8576 is the actual stack address that we want to initialize.
8577 The function we call will perform the cleanup in this case. */
8578
8579 /* If we have already assigned it space, use that space,
8580 not target that we were passed in, as our target
8581 parameter is only a hint. */
8582 if (DECL_RTL_SET_P (slot))
8583 {
8584 target = DECL_RTL (slot);
8585 /* If we have already expanded the slot, so don't do
8586 it again. (mrs) */
8587 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8588 return target;
8589 }
8590 else
8591 {
8592 SET_DECL_RTL (slot, target);
8593 /* If we must have an addressable slot, then make sure that
8594 the RTL that we just stored in slot is OK. */
8595 if (TREE_ADDRESSABLE (slot))
8596 put_var_into_stack (slot);
8597 }
8598 }
8599
8600 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8601 /* Mark it as expanded. */
8602 TREE_OPERAND (exp, 1) = NULL_TREE;
8603
8604 store_expr (exp1, target, 0);
8605
8606 expand_decl_cleanup (NULL_TREE, cleanups);
8607
8608 return target;
8609 }
8610
8611 case INIT_EXPR:
8612 {
8613 tree lhs = TREE_OPERAND (exp, 0);
8614 tree rhs = TREE_OPERAND (exp, 1);
8615
8616 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8617 return temp;
8618 }
8619
8620 case MODIFY_EXPR:
8621 {
8622 /* If lhs is complex, expand calls in rhs before computing it.
8623 That's so we don't compute a pointer and save it over a
8624 call. If lhs is simple, compute it first so we can give it
8625 as a target if the rhs is just a call. This avoids an
8626 extra temp and copy and that prevents a partial-subsumption
8627 which makes bad code. Actually we could treat
8628 component_ref's of vars like vars. */
8629
8630 tree lhs = TREE_OPERAND (exp, 0);
8631 tree rhs = TREE_OPERAND (exp, 1);
8632
8633 temp = 0;
8634
8635 /* Check for |= or &= of a bitfield of size one into another bitfield
8636 of size 1. In this case, (unless we need the result of the
8637 assignment) we can do this more efficiently with a
8638 test followed by an assignment, if necessary.
8639
8640 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8641 things change so we do, this code should be enhanced to
8642 support it. */
8643 if (ignore
8644 && TREE_CODE (lhs) == COMPONENT_REF
8645 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8646 || TREE_CODE (rhs) == BIT_AND_EXPR)
8647 && TREE_OPERAND (rhs, 0) == lhs
8648 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8649 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8650 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8651 {
8652 rtx label = gen_label_rtx ();
8653
8654 do_jump (TREE_OPERAND (rhs, 1),
8655 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8656 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8657 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8658 (TREE_CODE (rhs) == BIT_IOR_EXPR
8659 ? integer_one_node
8660 : integer_zero_node)),
8661 0, 0);
8662 do_pending_stack_adjust ();
8663 emit_label (label);
8664 return const0_rtx;
8665 }
8666
8667 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8668
8669 return temp;
8670 }
8671
8672 case RETURN_EXPR:
8673 if (!TREE_OPERAND (exp, 0))
8674 expand_null_return ();
8675 else
8676 expand_return (TREE_OPERAND (exp, 0));
8677 return const0_rtx;
8678
8679 case PREINCREMENT_EXPR:
8680 case PREDECREMENT_EXPR:
8681 return expand_increment (exp, 0, ignore);
8682
8683 case POSTINCREMENT_EXPR:
8684 case POSTDECREMENT_EXPR:
8685 /* Faster to treat as pre-increment if result is not used. */
8686 return expand_increment (exp, ! ignore, ignore);
8687
8688 case ADDR_EXPR:
8689 /* Are we taking the address of a nested function? */
8690 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8691 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8692 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8693 && ! TREE_STATIC (exp))
8694 {
8695 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8696 op0 = force_operand (op0, target);
8697 }
8698 /* If we are taking the address of something erroneous, just
8699 return a zero. */
8700 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8701 return const0_rtx;
8702 /* If we are taking the address of a constant and are at the
8703 top level, we have to use output_constant_def since we can't
8704 call force_const_mem at top level. */
8705 else if (cfun == 0
8706 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8707 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8708 == 'c')))
8709 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8710 else
8711 {
8712 /* We make sure to pass const0_rtx down if we came in with
8713 ignore set, to avoid doing the cleanups twice for something. */
8714 op0 = expand_expr (TREE_OPERAND (exp, 0),
8715 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8716 (modifier == EXPAND_INITIALIZER
8717 ? modifier : EXPAND_CONST_ADDRESS));
8718
8719 /* If we are going to ignore the result, OP0 will have been set
8720 to const0_rtx, so just return it. Don't get confused and
8721 think we are taking the address of the constant. */
8722 if (ignore)
8723 return op0;
8724
8725 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8726 clever and returns a REG when given a MEM. */
8727 op0 = protect_from_queue (op0, 1);
8728
8729 /* We would like the object in memory. If it is a constant, we can
8730 have it be statically allocated into memory. For a non-constant,
8731 we need to allocate some memory and store the value into it. */
8732
8733 if (CONSTANT_P (op0))
8734 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8735 op0);
8736 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8737 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8738 || GET_CODE (op0) == PARALLEL)
8739 {
8740 /* If this object is in a register, it must can't be BLKmode. */
8741 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8742 tree nt = build_qualified_type (inner_type,
8743 (TYPE_QUALS (inner_type)
8744 | TYPE_QUAL_CONST));
8745 rtx memloc = assign_temp (nt, 1, 1, 1);
8746
8747 if (GET_CODE (op0) == PARALLEL)
8748 /* Handle calls that pass values in multiple non-contiguous
8749 locations. The Irix 6 ABI has examples of this. */
8750 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8751 else
8752 emit_move_insn (memloc, op0);
8753
8754 op0 = memloc;
8755 }
8756
8757 if (GET_CODE (op0) != MEM)
8758 abort ();
8759
8760 mark_temp_addr_taken (op0);
8761 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8762 {
8763 op0 = XEXP (op0, 0);
8764 #ifdef POINTERS_EXTEND_UNSIGNED
8765 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8766 && mode == ptr_mode)
8767 op0 = convert_memory_address (ptr_mode, op0);
8768 #endif
8769 return op0;
8770 }
8771
8772 /* If OP0 is not aligned as least as much as the type requires, we
8773 need to make a temporary, copy OP0 to it, and take the address of
8774 the temporary. We want to use the alignment of the type, not of
8775 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8776 the test for BLKmode means that can't happen. The test for
8777 BLKmode is because we never make mis-aligned MEMs with
8778 non-BLKmode.
8779
8780 We don't need to do this at all if the machine doesn't have
8781 strict alignment. */
8782 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8783 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8784 > MEM_ALIGN (op0))
8785 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8786 {
8787 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8788 rtx new
8789 = assign_stack_temp_for_type
8790 (TYPE_MODE (inner_type),
8791 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8792 : int_size_in_bytes (inner_type),
8793 1, build_qualified_type (inner_type,
8794 (TYPE_QUALS (inner_type)
8795 | TYPE_QUAL_CONST)));
8796
8797 if (TYPE_ALIGN_OK (inner_type))
8798 abort ();
8799
8800 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8801 op0 = new;
8802 }
8803
8804 op0 = force_operand (XEXP (op0, 0), target);
8805 }
8806
8807 if (flag_force_addr && GET_CODE (op0) != REG)
8808 op0 = force_reg (Pmode, op0);
8809
8810 if (GET_CODE (op0) == REG
8811 && ! REG_USERVAR_P (op0))
8812 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8813
8814 #ifdef POINTERS_EXTEND_UNSIGNED
8815 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8816 && mode == ptr_mode)
8817 op0 = convert_memory_address (ptr_mode, op0);
8818 #endif
8819
8820 return op0;
8821
8822 case ENTRY_VALUE_EXPR:
8823 abort ();
8824
8825 /* COMPLEX type for Extended Pascal & Fortran */
8826 case COMPLEX_EXPR:
8827 {
8828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8829 rtx insns;
8830
8831 /* Get the rtx code of the operands. */
8832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8833 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8834
8835 if (! target)
8836 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8837
8838 start_sequence ();
8839
8840 /* Move the real (op0) and imaginary (op1) parts to their location. */
8841 emit_move_insn (gen_realpart (mode, target), op0);
8842 emit_move_insn (gen_imagpart (mode, target), op1);
8843
8844 insns = get_insns ();
8845 end_sequence ();
8846
8847 /* Complex construction should appear as a single unit. */
8848 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8849 each with a separate pseudo as destination.
8850 It's not correct for flow to treat them as a unit. */
8851 if (GET_CODE (target) != CONCAT)
8852 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8853 else
8854 emit_insns (insns);
8855
8856 return target;
8857 }
8858
8859 case REALPART_EXPR:
8860 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8861 return gen_realpart (mode, op0);
8862
8863 case IMAGPART_EXPR:
8864 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8865 return gen_imagpart (mode, op0);
8866
8867 case CONJ_EXPR:
8868 {
8869 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8870 rtx imag_t;
8871 rtx insns;
8872
8873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8874
8875 if (! target)
8876 target = gen_reg_rtx (mode);
8877
8878 start_sequence ();
8879
8880 /* Store the realpart and the negated imagpart to target. */
8881 emit_move_insn (gen_realpart (partmode, target),
8882 gen_realpart (partmode, op0));
8883
8884 imag_t = gen_imagpart (partmode, target);
8885 temp = expand_unop (partmode,
8886 ! unsignedp && flag_trapv
8887 && (GET_MODE_CLASS(partmode) == MODE_INT)
8888 ? negv_optab : neg_optab,
8889 gen_imagpart (partmode, op0), imag_t, 0);
8890 if (temp != imag_t)
8891 emit_move_insn (imag_t, temp);
8892
8893 insns = get_insns ();
8894 end_sequence ();
8895
8896 /* Conjugate should appear as a single unit
8897 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8898 each with a separate pseudo as destination.
8899 It's not correct for flow to treat them as a unit. */
8900 if (GET_CODE (target) != CONCAT)
8901 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8902 else
8903 emit_insns (insns);
8904
8905 return target;
8906 }
8907
8908 case TRY_CATCH_EXPR:
8909 {
8910 tree handler = TREE_OPERAND (exp, 1);
8911
8912 expand_eh_region_start ();
8913
8914 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8915
8916 expand_eh_region_end_cleanup (handler);
8917
8918 return op0;
8919 }
8920
8921 case TRY_FINALLY_EXPR:
8922 {
8923 tree try_block = TREE_OPERAND (exp, 0);
8924 tree finally_block = TREE_OPERAND (exp, 1);
8925 rtx finally_label = gen_label_rtx ();
8926 rtx done_label = gen_label_rtx ();
8927 rtx return_link = gen_reg_rtx (Pmode);
8928 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8929 (tree) finally_label, (tree) return_link);
8930 TREE_SIDE_EFFECTS (cleanup) = 1;
8931
8932 /* Start a new binding layer that will keep track of all cleanup
8933 actions to be performed. */
8934 expand_start_bindings (2);
8935
8936 target_temp_slot_level = temp_slot_level;
8937
8938 expand_decl_cleanup (NULL_TREE, cleanup);
8939 op0 = expand_expr (try_block, target, tmode, modifier);
8940
8941 preserve_temp_slots (op0);
8942 expand_end_bindings (NULL_TREE, 0, 0);
8943 emit_jump (done_label);
8944 emit_label (finally_label);
8945 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8946 emit_indirect_jump (return_link);
8947 emit_label (done_label);
8948 return op0;
8949 }
8950
8951 case GOTO_SUBROUTINE_EXPR:
8952 {
8953 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8954 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8955 rtx return_address = gen_label_rtx ();
8956 emit_move_insn (return_link,
8957 gen_rtx_LABEL_REF (Pmode, return_address));
8958 emit_jump (subr);
8959 emit_label (return_address);
8960 return const0_rtx;
8961 }
8962
8963 case VA_ARG_EXPR:
8964 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8965
8966 case EXC_PTR_EXPR:
8967 return get_exception_pointer (cfun);
8968
8969 case FDESC_EXPR:
8970 /* Function descriptors are not valid except for as
8971 initialization constants, and should not be expanded. */
8972 abort ();
8973
8974 default:
8975 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8976 }
8977
8978 /* Here to do an ordinary binary operator, generating an instruction
8979 from the optab already placed in `this_optab'. */
8980 binop:
8981 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8982 subtarget = 0;
8983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8984 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8985 binop2:
8986 temp = expand_binop (mode, this_optab, op0, op1, target,
8987 unsignedp, OPTAB_LIB_WIDEN);
8988 if (temp == 0)
8989 abort ();
8990 return temp;
8991 }
8992 \f
8993 /* Return the tree node if a ARG corresponds to a string constant or zero
8994 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8995 in bytes within the string that ARG is accessing. The type of the
8996 offset will be `sizetype'. */
8997
8998 tree
8999 string_constant (arg, ptr_offset)
9000 tree arg;
9001 tree *ptr_offset;
9002 {
9003 STRIP_NOPS (arg);
9004
9005 if (TREE_CODE (arg) == ADDR_EXPR
9006 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9007 {
9008 *ptr_offset = size_zero_node;
9009 return TREE_OPERAND (arg, 0);
9010 }
9011 else if (TREE_CODE (arg) == PLUS_EXPR)
9012 {
9013 tree arg0 = TREE_OPERAND (arg, 0);
9014 tree arg1 = TREE_OPERAND (arg, 1);
9015
9016 STRIP_NOPS (arg0);
9017 STRIP_NOPS (arg1);
9018
9019 if (TREE_CODE (arg0) == ADDR_EXPR
9020 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9021 {
9022 *ptr_offset = convert (sizetype, arg1);
9023 return TREE_OPERAND (arg0, 0);
9024 }
9025 else if (TREE_CODE (arg1) == ADDR_EXPR
9026 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9027 {
9028 *ptr_offset = convert (sizetype, arg0);
9029 return TREE_OPERAND (arg1, 0);
9030 }
9031 }
9032
9033 return 0;
9034 }
9035 \f
9036 /* Expand code for a post- or pre- increment or decrement
9037 and return the RTX for the result.
9038 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9039
9040 static rtx
9041 expand_increment (exp, post, ignore)
9042 tree exp;
9043 int post, ignore;
9044 {
9045 rtx op0, op1;
9046 rtx temp, value;
9047 tree incremented = TREE_OPERAND (exp, 0);
9048 optab this_optab = add_optab;
9049 int icode;
9050 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9051 int op0_is_copy = 0;
9052 int single_insn = 0;
9053 /* 1 means we can't store into OP0 directly,
9054 because it is a subreg narrower than a word,
9055 and we don't dare clobber the rest of the word. */
9056 int bad_subreg = 0;
9057
9058 /* Stabilize any component ref that might need to be
9059 evaluated more than once below. */
9060 if (!post
9061 || TREE_CODE (incremented) == BIT_FIELD_REF
9062 || (TREE_CODE (incremented) == COMPONENT_REF
9063 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9064 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9065 incremented = stabilize_reference (incremented);
9066 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9067 ones into save exprs so that they don't accidentally get evaluated
9068 more than once by the code below. */
9069 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9070 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9071 incremented = save_expr (incremented);
9072
9073 /* Compute the operands as RTX.
9074 Note whether OP0 is the actual lvalue or a copy of it:
9075 I believe it is a copy iff it is a register or subreg
9076 and insns were generated in computing it. */
9077
9078 temp = get_last_insn ();
9079 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9080
9081 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9082 in place but instead must do sign- or zero-extension during assignment,
9083 so we copy it into a new register and let the code below use it as
9084 a copy.
9085
9086 Note that we can safely modify this SUBREG since it is know not to be
9087 shared (it was made by the expand_expr call above). */
9088
9089 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9090 {
9091 if (post)
9092 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9093 else
9094 bad_subreg = 1;
9095 }
9096 else if (GET_CODE (op0) == SUBREG
9097 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9098 {
9099 /* We cannot increment this SUBREG in place. If we are
9100 post-incrementing, get a copy of the old value. Otherwise,
9101 just mark that we cannot increment in place. */
9102 if (post)
9103 op0 = copy_to_reg (op0);
9104 else
9105 bad_subreg = 1;
9106 }
9107
9108 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9109 && temp != get_last_insn ());
9110 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9111 EXPAND_MEMORY_USE_BAD);
9112
9113 /* Decide whether incrementing or decrementing. */
9114 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9115 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9116 this_optab = sub_optab;
9117
9118 /* Convert decrement by a constant into a negative increment. */
9119 if (this_optab == sub_optab
9120 && GET_CODE (op1) == CONST_INT)
9121 {
9122 op1 = GEN_INT (-INTVAL (op1));
9123 this_optab = add_optab;
9124 }
9125
9126 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9127 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9128
9129 /* For a preincrement, see if we can do this with a single instruction. */
9130 if (!post)
9131 {
9132 icode = (int) this_optab->handlers[(int) mode].insn_code;
9133 if (icode != (int) CODE_FOR_nothing
9134 /* Make sure that OP0 is valid for operands 0 and 1
9135 of the insn we want to queue. */
9136 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9137 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9138 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9139 single_insn = 1;
9140 }
9141
9142 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9143 then we cannot just increment OP0. We must therefore contrive to
9144 increment the original value. Then, for postincrement, we can return
9145 OP0 since it is a copy of the old value. For preincrement, expand here
9146 unless we can do it with a single insn.
9147
9148 Likewise if storing directly into OP0 would clobber high bits
9149 we need to preserve (bad_subreg). */
9150 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9151 {
9152 /* This is the easiest way to increment the value wherever it is.
9153 Problems with multiple evaluation of INCREMENTED are prevented
9154 because either (1) it is a component_ref or preincrement,
9155 in which case it was stabilized above, or (2) it is an array_ref
9156 with constant index in an array in a register, which is
9157 safe to reevaluate. */
9158 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9159 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9160 ? MINUS_EXPR : PLUS_EXPR),
9161 TREE_TYPE (exp),
9162 incremented,
9163 TREE_OPERAND (exp, 1));
9164
9165 while (TREE_CODE (incremented) == NOP_EXPR
9166 || TREE_CODE (incremented) == CONVERT_EXPR)
9167 {
9168 newexp = convert (TREE_TYPE (incremented), newexp);
9169 incremented = TREE_OPERAND (incremented, 0);
9170 }
9171
9172 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9173 return post ? op0 : temp;
9174 }
9175
9176 if (post)
9177 {
9178 /* We have a true reference to the value in OP0.
9179 If there is an insn to add or subtract in this mode, queue it.
9180 Queueing the increment insn avoids the register shuffling
9181 that often results if we must increment now and first save
9182 the old value for subsequent use. */
9183
9184 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9185 op0 = stabilize (op0);
9186 #endif
9187
9188 icode = (int) this_optab->handlers[(int) mode].insn_code;
9189 if (icode != (int) CODE_FOR_nothing
9190 /* Make sure that OP0 is valid for operands 0 and 1
9191 of the insn we want to queue. */
9192 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9193 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9194 {
9195 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9196 op1 = force_reg (mode, op1);
9197
9198 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9199 }
9200 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9201 {
9202 rtx addr = (general_operand (XEXP (op0, 0), mode)
9203 ? force_reg (Pmode, XEXP (op0, 0))
9204 : copy_to_reg (XEXP (op0, 0)));
9205 rtx temp, result;
9206
9207 op0 = replace_equiv_address (op0, addr);
9208 temp = force_reg (GET_MODE (op0), op0);
9209 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9210 op1 = force_reg (mode, op1);
9211
9212 /* The increment queue is LIFO, thus we have to `queue'
9213 the instructions in reverse order. */
9214 enqueue_insn (op0, gen_move_insn (op0, temp));
9215 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9216 return result;
9217 }
9218 }
9219
9220 /* Preincrement, or we can't increment with one simple insn. */
9221 if (post)
9222 /* Save a copy of the value before inc or dec, to return it later. */
9223 temp = value = copy_to_reg (op0);
9224 else
9225 /* Arrange to return the incremented value. */
9226 /* Copy the rtx because expand_binop will protect from the queue,
9227 and the results of that would be invalid for us to return
9228 if our caller does emit_queue before using our result. */
9229 temp = copy_rtx (value = op0);
9230
9231 /* Increment however we can. */
9232 op1 = expand_binop (mode, this_optab, value, op1,
9233 current_function_check_memory_usage ? NULL_RTX : op0,
9234 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9235 /* Make sure the value is stored into OP0. */
9236 if (op1 != op0)
9237 emit_move_insn (op0, op1);
9238
9239 return temp;
9240 }
9241 \f
9242 /* At the start of a function, record that we have no previously-pushed
9243 arguments waiting to be popped. */
9244
9245 void
9246 init_pending_stack_adjust ()
9247 {
9248 pending_stack_adjust = 0;
9249 }
9250
9251 /* When exiting from function, if safe, clear out any pending stack adjust
9252 so the adjustment won't get done.
9253
9254 Note, if the current function calls alloca, then it must have a
9255 frame pointer regardless of the value of flag_omit_frame_pointer. */
9256
9257 void
9258 clear_pending_stack_adjust ()
9259 {
9260 #ifdef EXIT_IGNORE_STACK
9261 if (optimize > 0
9262 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9263 && EXIT_IGNORE_STACK
9264 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9265 && ! flag_inline_functions)
9266 {
9267 stack_pointer_delta -= pending_stack_adjust,
9268 pending_stack_adjust = 0;
9269 }
9270 #endif
9271 }
9272
9273 /* Pop any previously-pushed arguments that have not been popped yet. */
9274
9275 void
9276 do_pending_stack_adjust ()
9277 {
9278 if (inhibit_defer_pop == 0)
9279 {
9280 if (pending_stack_adjust != 0)
9281 adjust_stack (GEN_INT (pending_stack_adjust));
9282 pending_stack_adjust = 0;
9283 }
9284 }
9285 \f
9286 /* Expand conditional expressions. */
9287
9288 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9289 LABEL is an rtx of code CODE_LABEL, in this function and all the
9290 functions here. */
9291
9292 void
9293 jumpifnot (exp, label)
9294 tree exp;
9295 rtx label;
9296 {
9297 do_jump (exp, label, NULL_RTX);
9298 }
9299
9300 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9301
9302 void
9303 jumpif (exp, label)
9304 tree exp;
9305 rtx label;
9306 {
9307 do_jump (exp, NULL_RTX, label);
9308 }
9309
9310 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9311 the result is zero, or IF_TRUE_LABEL if the result is one.
9312 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9313 meaning fall through in that case.
9314
9315 do_jump always does any pending stack adjust except when it does not
9316 actually perform a jump. An example where there is no jump
9317 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9318
9319 This function is responsible for optimizing cases such as
9320 &&, || and comparison operators in EXP. */
9321
9322 void
9323 do_jump (exp, if_false_label, if_true_label)
9324 tree exp;
9325 rtx if_false_label, if_true_label;
9326 {
9327 enum tree_code code = TREE_CODE (exp);
9328 /* Some cases need to create a label to jump to
9329 in order to properly fall through.
9330 These cases set DROP_THROUGH_LABEL nonzero. */
9331 rtx drop_through_label = 0;
9332 rtx temp;
9333 int i;
9334 tree type;
9335 enum machine_mode mode;
9336
9337 #ifdef MAX_INTEGER_COMPUTATION_MODE
9338 check_max_integer_computation_mode (exp);
9339 #endif
9340
9341 emit_queue ();
9342
9343 switch (code)
9344 {
9345 case ERROR_MARK:
9346 break;
9347
9348 case INTEGER_CST:
9349 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9350 if (temp)
9351 emit_jump (temp);
9352 break;
9353
9354 #if 0
9355 /* This is not true with #pragma weak */
9356 case ADDR_EXPR:
9357 /* The address of something can never be zero. */
9358 if (if_true_label)
9359 emit_jump (if_true_label);
9360 break;
9361 #endif
9362
9363 case NOP_EXPR:
9364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9365 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9366 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9367 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9368 goto normal;
9369 case CONVERT_EXPR:
9370 /* If we are narrowing the operand, we have to do the compare in the
9371 narrower mode. */
9372 if ((TYPE_PRECISION (TREE_TYPE (exp))
9373 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9374 goto normal;
9375 case NON_LVALUE_EXPR:
9376 case REFERENCE_EXPR:
9377 case ABS_EXPR:
9378 case NEGATE_EXPR:
9379 case LROTATE_EXPR:
9380 case RROTATE_EXPR:
9381 /* These cannot change zero->non-zero or vice versa. */
9382 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9383 break;
9384
9385 case WITH_RECORD_EXPR:
9386 /* Put the object on the placeholder list, recurse through our first
9387 operand, and pop the list. */
9388 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9389 placeholder_list);
9390 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9391 placeholder_list = TREE_CHAIN (placeholder_list);
9392 break;
9393
9394 #if 0
9395 /* This is never less insns than evaluating the PLUS_EXPR followed by
9396 a test and can be longer if the test is eliminated. */
9397 case PLUS_EXPR:
9398 /* Reduce to minus. */
9399 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9400 TREE_OPERAND (exp, 0),
9401 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9402 TREE_OPERAND (exp, 1))));
9403 /* Process as MINUS. */
9404 #endif
9405
9406 case MINUS_EXPR:
9407 /* Non-zero iff operands of minus differ. */
9408 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9409 TREE_OPERAND (exp, 0),
9410 TREE_OPERAND (exp, 1)),
9411 NE, NE, if_false_label, if_true_label);
9412 break;
9413
9414 case BIT_AND_EXPR:
9415 /* If we are AND'ing with a small constant, do this comparison in the
9416 smallest type that fits. If the machine doesn't have comparisons
9417 that small, it will be converted back to the wider comparison.
9418 This helps if we are testing the sign bit of a narrower object.
9419 combine can't do this for us because it can't know whether a
9420 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9421
9422 if (! SLOW_BYTE_ACCESS
9423 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9424 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9425 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9426 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9427 && (type = type_for_mode (mode, 1)) != 0
9428 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9429 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9430 != CODE_FOR_nothing))
9431 {
9432 do_jump (convert (type, exp), if_false_label, if_true_label);
9433 break;
9434 }
9435 goto normal;
9436
9437 case TRUTH_NOT_EXPR:
9438 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9439 break;
9440
9441 case TRUTH_ANDIF_EXPR:
9442 if (if_false_label == 0)
9443 if_false_label = drop_through_label = gen_label_rtx ();
9444 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9445 start_cleanup_deferral ();
9446 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9447 end_cleanup_deferral ();
9448 break;
9449
9450 case TRUTH_ORIF_EXPR:
9451 if (if_true_label == 0)
9452 if_true_label = drop_through_label = gen_label_rtx ();
9453 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9454 start_cleanup_deferral ();
9455 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9456 end_cleanup_deferral ();
9457 break;
9458
9459 case COMPOUND_EXPR:
9460 push_temp_slots ();
9461 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9462 preserve_temp_slots (NULL_RTX);
9463 free_temp_slots ();
9464 pop_temp_slots ();
9465 emit_queue ();
9466 do_pending_stack_adjust ();
9467 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9468 break;
9469
9470 case COMPONENT_REF:
9471 case BIT_FIELD_REF:
9472 case ARRAY_REF:
9473 case ARRAY_RANGE_REF:
9474 {
9475 HOST_WIDE_INT bitsize, bitpos;
9476 int unsignedp;
9477 enum machine_mode mode;
9478 tree type;
9479 tree offset;
9480 int volatilep = 0;
9481
9482 /* Get description of this reference. We don't actually care
9483 about the underlying object here. */
9484 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9485 &unsignedp, &volatilep);
9486
9487 type = type_for_size (bitsize, unsignedp);
9488 if (! SLOW_BYTE_ACCESS
9489 && type != 0 && bitsize >= 0
9490 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9491 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9492 != CODE_FOR_nothing))
9493 {
9494 do_jump (convert (type, exp), if_false_label, if_true_label);
9495 break;
9496 }
9497 goto normal;
9498 }
9499
9500 case COND_EXPR:
9501 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9502 if (integer_onep (TREE_OPERAND (exp, 1))
9503 && integer_zerop (TREE_OPERAND (exp, 2)))
9504 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9505
9506 else if (integer_zerop (TREE_OPERAND (exp, 1))
9507 && integer_onep (TREE_OPERAND (exp, 2)))
9508 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9509
9510 else
9511 {
9512 rtx label1 = gen_label_rtx ();
9513 drop_through_label = gen_label_rtx ();
9514
9515 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9516
9517 start_cleanup_deferral ();
9518 /* Now the THEN-expression. */
9519 do_jump (TREE_OPERAND (exp, 1),
9520 if_false_label ? if_false_label : drop_through_label,
9521 if_true_label ? if_true_label : drop_through_label);
9522 /* In case the do_jump just above never jumps. */
9523 do_pending_stack_adjust ();
9524 emit_label (label1);
9525
9526 /* Now the ELSE-expression. */
9527 do_jump (TREE_OPERAND (exp, 2),
9528 if_false_label ? if_false_label : drop_through_label,
9529 if_true_label ? if_true_label : drop_through_label);
9530 end_cleanup_deferral ();
9531 }
9532 break;
9533
9534 case EQ_EXPR:
9535 {
9536 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9537
9538 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9539 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9540 {
9541 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9542 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9543 do_jump
9544 (fold
9545 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9546 fold (build (EQ_EXPR, TREE_TYPE (exp),
9547 fold (build1 (REALPART_EXPR,
9548 TREE_TYPE (inner_type),
9549 exp0)),
9550 fold (build1 (REALPART_EXPR,
9551 TREE_TYPE (inner_type),
9552 exp1)))),
9553 fold (build (EQ_EXPR, TREE_TYPE (exp),
9554 fold (build1 (IMAGPART_EXPR,
9555 TREE_TYPE (inner_type),
9556 exp0)),
9557 fold (build1 (IMAGPART_EXPR,
9558 TREE_TYPE (inner_type),
9559 exp1)))))),
9560 if_false_label, if_true_label);
9561 }
9562
9563 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9564 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9565
9566 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9567 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9568 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9569 else
9570 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9571 break;
9572 }
9573
9574 case NE_EXPR:
9575 {
9576 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9577
9578 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9579 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9580 {
9581 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9582 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9583 do_jump
9584 (fold
9585 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9586 fold (build (NE_EXPR, TREE_TYPE (exp),
9587 fold (build1 (REALPART_EXPR,
9588 TREE_TYPE (inner_type),
9589 exp0)),
9590 fold (build1 (REALPART_EXPR,
9591 TREE_TYPE (inner_type),
9592 exp1)))),
9593 fold (build (NE_EXPR, TREE_TYPE (exp),
9594 fold (build1 (IMAGPART_EXPR,
9595 TREE_TYPE (inner_type),
9596 exp0)),
9597 fold (build1 (IMAGPART_EXPR,
9598 TREE_TYPE (inner_type),
9599 exp1)))))),
9600 if_false_label, if_true_label);
9601 }
9602
9603 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9604 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9605
9606 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9607 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9608 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9609 else
9610 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9611 break;
9612 }
9613
9614 case LT_EXPR:
9615 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9616 if (GET_MODE_CLASS (mode) == MODE_INT
9617 && ! can_compare_p (LT, mode, ccp_jump))
9618 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9619 else
9620 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9621 break;
9622
9623 case LE_EXPR:
9624 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9625 if (GET_MODE_CLASS (mode) == MODE_INT
9626 && ! can_compare_p (LE, mode, ccp_jump))
9627 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9628 else
9629 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9630 break;
9631
9632 case GT_EXPR:
9633 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9634 if (GET_MODE_CLASS (mode) == MODE_INT
9635 && ! can_compare_p (GT, mode, ccp_jump))
9636 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9637 else
9638 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9639 break;
9640
9641 case GE_EXPR:
9642 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9643 if (GET_MODE_CLASS (mode) == MODE_INT
9644 && ! can_compare_p (GE, mode, ccp_jump))
9645 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9646 else
9647 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9648 break;
9649
9650 case UNORDERED_EXPR:
9651 case ORDERED_EXPR:
9652 {
9653 enum rtx_code cmp, rcmp;
9654 int do_rev;
9655
9656 if (code == UNORDERED_EXPR)
9657 cmp = UNORDERED, rcmp = ORDERED;
9658 else
9659 cmp = ORDERED, rcmp = UNORDERED;
9660 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9661
9662 do_rev = 0;
9663 if (! can_compare_p (cmp, mode, ccp_jump)
9664 && (can_compare_p (rcmp, mode, ccp_jump)
9665 /* If the target doesn't provide either UNORDERED or ORDERED
9666 comparisons, canonicalize on UNORDERED for the library. */
9667 || rcmp == UNORDERED))
9668 do_rev = 1;
9669
9670 if (! do_rev)
9671 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9672 else
9673 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9674 }
9675 break;
9676
9677 {
9678 enum rtx_code rcode1;
9679 enum tree_code tcode2;
9680
9681 case UNLT_EXPR:
9682 rcode1 = UNLT;
9683 tcode2 = LT_EXPR;
9684 goto unordered_bcc;
9685 case UNLE_EXPR:
9686 rcode1 = UNLE;
9687 tcode2 = LE_EXPR;
9688 goto unordered_bcc;
9689 case UNGT_EXPR:
9690 rcode1 = UNGT;
9691 tcode2 = GT_EXPR;
9692 goto unordered_bcc;
9693 case UNGE_EXPR:
9694 rcode1 = UNGE;
9695 tcode2 = GE_EXPR;
9696 goto unordered_bcc;
9697 case UNEQ_EXPR:
9698 rcode1 = UNEQ;
9699 tcode2 = EQ_EXPR;
9700 goto unordered_bcc;
9701
9702 unordered_bcc:
9703 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9704 if (can_compare_p (rcode1, mode, ccp_jump))
9705 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9706 if_true_label);
9707 else
9708 {
9709 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9710 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9711 tree cmp0, cmp1;
9712
9713 /* If the target doesn't support combined unordered
9714 compares, decompose into UNORDERED + comparison. */
9715 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9716 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9717 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9718 do_jump (exp, if_false_label, if_true_label);
9719 }
9720 }
9721 break;
9722
9723 /* Special case:
9724 __builtin_expect (<test>, 0) and
9725 __builtin_expect (<test>, 1)
9726
9727 We need to do this here, so that <test> is not converted to a SCC
9728 operation on machines that use condition code registers and COMPARE
9729 like the PowerPC, and then the jump is done based on whether the SCC
9730 operation produced a 1 or 0. */
9731 case CALL_EXPR:
9732 /* Check for a built-in function. */
9733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9734 {
9735 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9736 tree arglist = TREE_OPERAND (exp, 1);
9737
9738 if (TREE_CODE (fndecl) == FUNCTION_DECL
9739 && DECL_BUILT_IN (fndecl)
9740 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9741 && arglist != NULL_TREE
9742 && TREE_CHAIN (arglist) != NULL_TREE)
9743 {
9744 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9745 if_true_label);
9746
9747 if (seq != NULL_RTX)
9748 {
9749 emit_insn (seq);
9750 return;
9751 }
9752 }
9753 }
9754 /* fall through and generate the normal code. */
9755
9756 default:
9757 normal:
9758 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9759 #if 0
9760 /* This is not needed any more and causes poor code since it causes
9761 comparisons and tests from non-SI objects to have different code
9762 sequences. */
9763 /* Copy to register to avoid generating bad insns by cse
9764 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9765 if (!cse_not_expected && GET_CODE (temp) == MEM)
9766 temp = copy_to_reg (temp);
9767 #endif
9768 do_pending_stack_adjust ();
9769 /* Do any postincrements in the expression that was tested. */
9770 emit_queue ();
9771
9772 if (GET_CODE (temp) == CONST_INT
9773 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9774 || GET_CODE (temp) == LABEL_REF)
9775 {
9776 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9777 if (target)
9778 emit_jump (target);
9779 }
9780 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9781 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9782 /* Note swapping the labels gives us not-equal. */
9783 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9784 else if (GET_MODE (temp) != VOIDmode)
9785 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9786 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9787 GET_MODE (temp), NULL_RTX,
9788 if_false_label, if_true_label);
9789 else
9790 abort ();
9791 }
9792
9793 if (drop_through_label)
9794 {
9795 /* If do_jump produces code that might be jumped around,
9796 do any stack adjusts from that code, before the place
9797 where control merges in. */
9798 do_pending_stack_adjust ();
9799 emit_label (drop_through_label);
9800 }
9801 }
9802 \f
9803 /* Given a comparison expression EXP for values too wide to be compared
9804 with one insn, test the comparison and jump to the appropriate label.
9805 The code of EXP is ignored; we always test GT if SWAP is 0,
9806 and LT if SWAP is 1. */
9807
9808 static void
9809 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9810 tree exp;
9811 int swap;
9812 rtx if_false_label, if_true_label;
9813 {
9814 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9815 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9816 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9817 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9818
9819 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9820 }
9821
9822 /* Compare OP0 with OP1, word at a time, in mode MODE.
9823 UNSIGNEDP says to do unsigned comparison.
9824 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9825
9826 void
9827 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9828 enum machine_mode mode;
9829 int unsignedp;
9830 rtx op0, op1;
9831 rtx if_false_label, if_true_label;
9832 {
9833 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9834 rtx drop_through_label = 0;
9835 int i;
9836
9837 if (! if_true_label || ! if_false_label)
9838 drop_through_label = gen_label_rtx ();
9839 if (! if_true_label)
9840 if_true_label = drop_through_label;
9841 if (! if_false_label)
9842 if_false_label = drop_through_label;
9843
9844 /* Compare a word at a time, high order first. */
9845 for (i = 0; i < nwords; i++)
9846 {
9847 rtx op0_word, op1_word;
9848
9849 if (WORDS_BIG_ENDIAN)
9850 {
9851 op0_word = operand_subword_force (op0, i, mode);
9852 op1_word = operand_subword_force (op1, i, mode);
9853 }
9854 else
9855 {
9856 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9857 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9858 }
9859
9860 /* All but high-order word must be compared as unsigned. */
9861 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9862 (unsignedp || i > 0), word_mode, NULL_RTX,
9863 NULL_RTX, if_true_label);
9864
9865 /* Consider lower words only if these are equal. */
9866 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9867 NULL_RTX, NULL_RTX, if_false_label);
9868 }
9869
9870 if (if_false_label)
9871 emit_jump (if_false_label);
9872 if (drop_through_label)
9873 emit_label (drop_through_label);
9874 }
9875
9876 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9877 with one insn, test the comparison and jump to the appropriate label. */
9878
9879 static void
9880 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9881 tree exp;
9882 rtx if_false_label, if_true_label;
9883 {
9884 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9885 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9887 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9888 int i;
9889 rtx drop_through_label = 0;
9890
9891 if (! if_false_label)
9892 drop_through_label = if_false_label = gen_label_rtx ();
9893
9894 for (i = 0; i < nwords; i++)
9895 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9896 operand_subword_force (op1, i, mode),
9897 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9898 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9899
9900 if (if_true_label)
9901 emit_jump (if_true_label);
9902 if (drop_through_label)
9903 emit_label (drop_through_label);
9904 }
9905 \f
9906 /* Jump according to whether OP0 is 0.
9907 We assume that OP0 has an integer mode that is too wide
9908 for the available compare insns. */
9909
9910 void
9911 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9912 rtx op0;
9913 rtx if_false_label, if_true_label;
9914 {
9915 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9916 rtx part;
9917 int i;
9918 rtx drop_through_label = 0;
9919
9920 /* The fastest way of doing this comparison on almost any machine is to
9921 "or" all the words and compare the result. If all have to be loaded
9922 from memory and this is a very wide item, it's possible this may
9923 be slower, but that's highly unlikely. */
9924
9925 part = gen_reg_rtx (word_mode);
9926 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9927 for (i = 1; i < nwords && part != 0; i++)
9928 part = expand_binop (word_mode, ior_optab, part,
9929 operand_subword_force (op0, i, GET_MODE (op0)),
9930 part, 1, OPTAB_WIDEN);
9931
9932 if (part != 0)
9933 {
9934 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9935 NULL_RTX, if_false_label, if_true_label);
9936
9937 return;
9938 }
9939
9940 /* If we couldn't do the "or" simply, do this with a series of compares. */
9941 if (! if_false_label)
9942 drop_through_label = if_false_label = gen_label_rtx ();
9943
9944 for (i = 0; i < nwords; i++)
9945 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9946 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9947 if_false_label, NULL_RTX);
9948
9949 if (if_true_label)
9950 emit_jump (if_true_label);
9951
9952 if (drop_through_label)
9953 emit_label (drop_through_label);
9954 }
9955 \f
9956 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9957 (including code to compute the values to be compared)
9958 and set (CC0) according to the result.
9959 The decision as to signed or unsigned comparison must be made by the caller.
9960
9961 We force a stack adjustment unless there are currently
9962 things pushed on the stack that aren't yet used.
9963
9964 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9965 compared. */
9966
9967 rtx
9968 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9969 rtx op0, op1;
9970 enum rtx_code code;
9971 int unsignedp;
9972 enum machine_mode mode;
9973 rtx size;
9974 {
9975 rtx tem;
9976
9977 /* If one operand is constant, make it the second one. Only do this
9978 if the other operand is not constant as well. */
9979
9980 if (swap_commutative_operands_p (op0, op1))
9981 {
9982 tem = op0;
9983 op0 = op1;
9984 op1 = tem;
9985 code = swap_condition (code);
9986 }
9987
9988 if (flag_force_mem)
9989 {
9990 op0 = force_not_mem (op0);
9991 op1 = force_not_mem (op1);
9992 }
9993
9994 do_pending_stack_adjust ();
9995
9996 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9997 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9998 return tem;
9999
10000 #if 0
10001 /* There's no need to do this now that combine.c can eliminate lots of
10002 sign extensions. This can be less efficient in certain cases on other
10003 machines. */
10004
10005 /* If this is a signed equality comparison, we can do it as an
10006 unsigned comparison since zero-extension is cheaper than sign
10007 extension and comparisons with zero are done as unsigned. This is
10008 the case even on machines that can do fast sign extension, since
10009 zero-extension is easier to combine with other operations than
10010 sign-extension is. If we are comparing against a constant, we must
10011 convert it to what it would look like unsigned. */
10012 if ((code == EQ || code == NE) && ! unsignedp
10013 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10014 {
10015 if (GET_CODE (op1) == CONST_INT
10016 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10017 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10018 unsignedp = 1;
10019 }
10020 #endif
10021
10022 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10023
10024 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10025 }
10026
10027 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10028 The decision as to signed or unsigned comparison must be made by the caller.
10029
10030 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10031 compared. */
10032
10033 void
10034 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10035 if_false_label, if_true_label)
10036 rtx op0, op1;
10037 enum rtx_code code;
10038 int unsignedp;
10039 enum machine_mode mode;
10040 rtx size;
10041 rtx if_false_label, if_true_label;
10042 {
10043 rtx tem;
10044 int dummy_true_label = 0;
10045
10046 /* Reverse the comparison if that is safe and we want to jump if it is
10047 false. */
10048 if (! if_true_label && ! FLOAT_MODE_P (mode))
10049 {
10050 if_true_label = if_false_label;
10051 if_false_label = 0;
10052 code = reverse_condition (code);
10053 }
10054
10055 /* If one operand is constant, make it the second one. Only do this
10056 if the other operand is not constant as well. */
10057
10058 if (swap_commutative_operands_p (op0, op1))
10059 {
10060 tem = op0;
10061 op0 = op1;
10062 op1 = tem;
10063 code = swap_condition (code);
10064 }
10065
10066 if (flag_force_mem)
10067 {
10068 op0 = force_not_mem (op0);
10069 op1 = force_not_mem (op1);
10070 }
10071
10072 do_pending_stack_adjust ();
10073
10074 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10075 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10076 {
10077 if (tem == const_true_rtx)
10078 {
10079 if (if_true_label)
10080 emit_jump (if_true_label);
10081 }
10082 else
10083 {
10084 if (if_false_label)
10085 emit_jump (if_false_label);
10086 }
10087 return;
10088 }
10089
10090 #if 0
10091 /* There's no need to do this now that combine.c can eliminate lots of
10092 sign extensions. This can be less efficient in certain cases on other
10093 machines. */
10094
10095 /* If this is a signed equality comparison, we can do it as an
10096 unsigned comparison since zero-extension is cheaper than sign
10097 extension and comparisons with zero are done as unsigned. This is
10098 the case even on machines that can do fast sign extension, since
10099 zero-extension is easier to combine with other operations than
10100 sign-extension is. If we are comparing against a constant, we must
10101 convert it to what it would look like unsigned. */
10102 if ((code == EQ || code == NE) && ! unsignedp
10103 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10104 {
10105 if (GET_CODE (op1) == CONST_INT
10106 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10107 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10108 unsignedp = 1;
10109 }
10110 #endif
10111
10112 if (! if_true_label)
10113 {
10114 dummy_true_label = 1;
10115 if_true_label = gen_label_rtx ();
10116 }
10117
10118 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10119 if_true_label);
10120
10121 if (if_false_label)
10122 emit_jump (if_false_label);
10123 if (dummy_true_label)
10124 emit_label (if_true_label);
10125 }
10126
10127 /* Generate code for a comparison expression EXP (including code to compute
10128 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10129 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10130 generated code will drop through.
10131 SIGNED_CODE should be the rtx operation for this comparison for
10132 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10133
10134 We force a stack adjustment unless there are currently
10135 things pushed on the stack that aren't yet used. */
10136
10137 static void
10138 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10139 if_true_label)
10140 tree exp;
10141 enum rtx_code signed_code, unsigned_code;
10142 rtx if_false_label, if_true_label;
10143 {
10144 rtx op0, op1;
10145 tree type;
10146 enum machine_mode mode;
10147 int unsignedp;
10148 enum rtx_code code;
10149
10150 /* Don't crash if the comparison was erroneous. */
10151 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10152 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10153 return;
10154
10155 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10156 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10157 return;
10158
10159 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10160 mode = TYPE_MODE (type);
10161 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10162 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10163 || (GET_MODE_BITSIZE (mode)
10164 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10165 1)))))))
10166 {
10167 /* op0 might have been replaced by promoted constant, in which
10168 case the type of second argument should be used. */
10169 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10170 mode = TYPE_MODE (type);
10171 }
10172 unsignedp = TREE_UNSIGNED (type);
10173 code = unsignedp ? unsigned_code : signed_code;
10174
10175 #ifdef HAVE_canonicalize_funcptr_for_compare
10176 /* If function pointers need to be "canonicalized" before they can
10177 be reliably compared, then canonicalize them. */
10178 if (HAVE_canonicalize_funcptr_for_compare
10179 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10180 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10181 == FUNCTION_TYPE))
10182 {
10183 rtx new_op0 = gen_reg_rtx (mode);
10184
10185 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10186 op0 = new_op0;
10187 }
10188
10189 if (HAVE_canonicalize_funcptr_for_compare
10190 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10191 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10192 == FUNCTION_TYPE))
10193 {
10194 rtx new_op1 = gen_reg_rtx (mode);
10195
10196 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10197 op1 = new_op1;
10198 }
10199 #endif
10200
10201 /* Do any postincrements in the expression that was tested. */
10202 emit_queue ();
10203
10204 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10205 ((mode == BLKmode)
10206 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10207 if_false_label, if_true_label);
10208 }
10209 \f
10210 /* Generate code to calculate EXP using a store-flag instruction
10211 and return an rtx for the result. EXP is either a comparison
10212 or a TRUTH_NOT_EXPR whose operand is a comparison.
10213
10214 If TARGET is nonzero, store the result there if convenient.
10215
10216 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10217 cheap.
10218
10219 Return zero if there is no suitable set-flag instruction
10220 available on this machine.
10221
10222 Once expand_expr has been called on the arguments of the comparison,
10223 we are committed to doing the store flag, since it is not safe to
10224 re-evaluate the expression. We emit the store-flag insn by calling
10225 emit_store_flag, but only expand the arguments if we have a reason
10226 to believe that emit_store_flag will be successful. If we think that
10227 it will, but it isn't, we have to simulate the store-flag with a
10228 set/jump/set sequence. */
10229
10230 static rtx
10231 do_store_flag (exp, target, mode, only_cheap)
10232 tree exp;
10233 rtx target;
10234 enum machine_mode mode;
10235 int only_cheap;
10236 {
10237 enum rtx_code code;
10238 tree arg0, arg1, type;
10239 tree tem;
10240 enum machine_mode operand_mode;
10241 int invert = 0;
10242 int unsignedp;
10243 rtx op0, op1;
10244 enum insn_code icode;
10245 rtx subtarget = target;
10246 rtx result, label;
10247
10248 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10249 result at the end. We can't simply invert the test since it would
10250 have already been inverted if it were valid. This case occurs for
10251 some floating-point comparisons. */
10252
10253 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10254 invert = 1, exp = TREE_OPERAND (exp, 0);
10255
10256 arg0 = TREE_OPERAND (exp, 0);
10257 arg1 = TREE_OPERAND (exp, 1);
10258
10259 /* Don't crash if the comparison was erroneous. */
10260 if (arg0 == error_mark_node || arg1 == error_mark_node)
10261 return const0_rtx;
10262
10263 type = TREE_TYPE (arg0);
10264 operand_mode = TYPE_MODE (type);
10265 unsignedp = TREE_UNSIGNED (type);
10266
10267 /* We won't bother with BLKmode store-flag operations because it would mean
10268 passing a lot of information to emit_store_flag. */
10269 if (operand_mode == BLKmode)
10270 return 0;
10271
10272 /* We won't bother with store-flag operations involving function pointers
10273 when function pointers must be canonicalized before comparisons. */
10274 #ifdef HAVE_canonicalize_funcptr_for_compare
10275 if (HAVE_canonicalize_funcptr_for_compare
10276 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10277 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10278 == FUNCTION_TYPE))
10279 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10280 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10281 == FUNCTION_TYPE))))
10282 return 0;
10283 #endif
10284
10285 STRIP_NOPS (arg0);
10286 STRIP_NOPS (arg1);
10287
10288 /* Get the rtx comparison code to use. We know that EXP is a comparison
10289 operation of some type. Some comparisons against 1 and -1 can be
10290 converted to comparisons with zero. Do so here so that the tests
10291 below will be aware that we have a comparison with zero. These
10292 tests will not catch constants in the first operand, but constants
10293 are rarely passed as the first operand. */
10294
10295 switch (TREE_CODE (exp))
10296 {
10297 case EQ_EXPR:
10298 code = EQ;
10299 break;
10300 case NE_EXPR:
10301 code = NE;
10302 break;
10303 case LT_EXPR:
10304 if (integer_onep (arg1))
10305 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10306 else
10307 code = unsignedp ? LTU : LT;
10308 break;
10309 case LE_EXPR:
10310 if (! unsignedp && integer_all_onesp (arg1))
10311 arg1 = integer_zero_node, code = LT;
10312 else
10313 code = unsignedp ? LEU : LE;
10314 break;
10315 case GT_EXPR:
10316 if (! unsignedp && integer_all_onesp (arg1))
10317 arg1 = integer_zero_node, code = GE;
10318 else
10319 code = unsignedp ? GTU : GT;
10320 break;
10321 case GE_EXPR:
10322 if (integer_onep (arg1))
10323 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10324 else
10325 code = unsignedp ? GEU : GE;
10326 break;
10327
10328 case UNORDERED_EXPR:
10329 code = UNORDERED;
10330 break;
10331 case ORDERED_EXPR:
10332 code = ORDERED;
10333 break;
10334 case UNLT_EXPR:
10335 code = UNLT;
10336 break;
10337 case UNLE_EXPR:
10338 code = UNLE;
10339 break;
10340 case UNGT_EXPR:
10341 code = UNGT;
10342 break;
10343 case UNGE_EXPR:
10344 code = UNGE;
10345 break;
10346 case UNEQ_EXPR:
10347 code = UNEQ;
10348 break;
10349
10350 default:
10351 abort ();
10352 }
10353
10354 /* Put a constant second. */
10355 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10356 {
10357 tem = arg0; arg0 = arg1; arg1 = tem;
10358 code = swap_condition (code);
10359 }
10360
10361 /* If this is an equality or inequality test of a single bit, we can
10362 do this by shifting the bit being tested to the low-order bit and
10363 masking the result with the constant 1. If the condition was EQ,
10364 we xor it with 1. This does not require an scc insn and is faster
10365 than an scc insn even if we have it. */
10366
10367 if ((code == NE || code == EQ)
10368 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10369 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10370 {
10371 tree inner = TREE_OPERAND (arg0, 0);
10372 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10373 int ops_unsignedp;
10374
10375 /* If INNER is a right shift of a constant and it plus BITNUM does
10376 not overflow, adjust BITNUM and INNER. */
10377
10378 if (TREE_CODE (inner) == RSHIFT_EXPR
10379 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10380 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10381 && bitnum < TYPE_PRECISION (type)
10382 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10383 bitnum - TYPE_PRECISION (type)))
10384 {
10385 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10386 inner = TREE_OPERAND (inner, 0);
10387 }
10388
10389 /* If we are going to be able to omit the AND below, we must do our
10390 operations as unsigned. If we must use the AND, we have a choice.
10391 Normally unsigned is faster, but for some machines signed is. */
10392 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10393 #ifdef LOAD_EXTEND_OP
10394 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10395 #else
10396 : 1
10397 #endif
10398 );
10399
10400 if (! get_subtarget (subtarget)
10401 || GET_MODE (subtarget) != operand_mode
10402 || ! safe_from_p (subtarget, inner, 1))
10403 subtarget = 0;
10404
10405 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10406
10407 if (bitnum != 0)
10408 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10409 size_int (bitnum), subtarget, ops_unsignedp);
10410
10411 if (GET_MODE (op0) != mode)
10412 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10413
10414 if ((code == EQ && ! invert) || (code == NE && invert))
10415 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10416 ops_unsignedp, OPTAB_LIB_WIDEN);
10417
10418 /* Put the AND last so it can combine with more things. */
10419 if (bitnum != TYPE_PRECISION (type) - 1)
10420 op0 = expand_and (op0, const1_rtx, subtarget);
10421
10422 return op0;
10423 }
10424
10425 /* Now see if we are likely to be able to do this. Return if not. */
10426 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10427 return 0;
10428
10429 icode = setcc_gen_code[(int) code];
10430 if (icode == CODE_FOR_nothing
10431 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10432 {
10433 /* We can only do this if it is one of the special cases that
10434 can be handled without an scc insn. */
10435 if ((code == LT && integer_zerop (arg1))
10436 || (! only_cheap && code == GE && integer_zerop (arg1)))
10437 ;
10438 else if (BRANCH_COST >= 0
10439 && ! only_cheap && (code == NE || code == EQ)
10440 && TREE_CODE (type) != REAL_TYPE
10441 && ((abs_optab->handlers[(int) operand_mode].insn_code
10442 != CODE_FOR_nothing)
10443 || (ffs_optab->handlers[(int) operand_mode].insn_code
10444 != CODE_FOR_nothing)))
10445 ;
10446 else
10447 return 0;
10448 }
10449
10450 if (! get_subtarget (target)
10451 || GET_MODE (subtarget) != operand_mode
10452 || ! safe_from_p (subtarget, arg1, 1))
10453 subtarget = 0;
10454
10455 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10456 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10457
10458 if (target == 0)
10459 target = gen_reg_rtx (mode);
10460
10461 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10462 because, if the emit_store_flag does anything it will succeed and
10463 OP0 and OP1 will not be used subsequently. */
10464
10465 result = emit_store_flag (target, code,
10466 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10467 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10468 operand_mode, unsignedp, 1);
10469
10470 if (result)
10471 {
10472 if (invert)
10473 result = expand_binop (mode, xor_optab, result, const1_rtx,
10474 result, 0, OPTAB_LIB_WIDEN);
10475 return result;
10476 }
10477
10478 /* If this failed, we have to do this with set/compare/jump/set code. */
10479 if (GET_CODE (target) != REG
10480 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10481 target = gen_reg_rtx (GET_MODE (target));
10482
10483 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10484 result = compare_from_rtx (op0, op1, code, unsignedp,
10485 operand_mode, NULL_RTX);
10486 if (GET_CODE (result) == CONST_INT)
10487 return (((result == const0_rtx && ! invert)
10488 || (result != const0_rtx && invert))
10489 ? const0_rtx : const1_rtx);
10490
10491 label = gen_label_rtx ();
10492 if (bcc_gen_fctn[(int) code] == 0)
10493 abort ();
10494
10495 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10496 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10497 emit_label (label);
10498
10499 return target;
10500 }
10501 \f
10502
10503 /* Stubs in case we haven't got a casesi insn. */
10504 #ifndef HAVE_casesi
10505 # define HAVE_casesi 0
10506 # define gen_casesi(a, b, c, d, e) (0)
10507 # define CODE_FOR_casesi CODE_FOR_nothing
10508 #endif
10509
10510 /* If the machine does not have a case insn that compares the bounds,
10511 this means extra overhead for dispatch tables, which raises the
10512 threshold for using them. */
10513 #ifndef CASE_VALUES_THRESHOLD
10514 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10515 #endif /* CASE_VALUES_THRESHOLD */
10516
10517 unsigned int
10518 case_values_threshold ()
10519 {
10520 return CASE_VALUES_THRESHOLD;
10521 }
10522
10523 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10524 0 otherwise (i.e. if there is no casesi instruction). */
10525 int
10526 try_casesi (index_type, index_expr, minval, range,
10527 table_label, default_label)
10528 tree index_type, index_expr, minval, range;
10529 rtx table_label ATTRIBUTE_UNUSED;
10530 rtx default_label;
10531 {
10532 enum machine_mode index_mode = SImode;
10533 int index_bits = GET_MODE_BITSIZE (index_mode);
10534 rtx op1, op2, index;
10535 enum machine_mode op_mode;
10536
10537 if (! HAVE_casesi)
10538 return 0;
10539
10540 /* Convert the index to SImode. */
10541 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10542 {
10543 enum machine_mode omode = TYPE_MODE (index_type);
10544 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10545
10546 /* We must handle the endpoints in the original mode. */
10547 index_expr = build (MINUS_EXPR, index_type,
10548 index_expr, minval);
10549 minval = integer_zero_node;
10550 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10551 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10552 omode, 1, default_label);
10553 /* Now we can safely truncate. */
10554 index = convert_to_mode (index_mode, index, 0);
10555 }
10556 else
10557 {
10558 if (TYPE_MODE (index_type) != index_mode)
10559 {
10560 index_expr = convert (type_for_size (index_bits, 0),
10561 index_expr);
10562 index_type = TREE_TYPE (index_expr);
10563 }
10564
10565 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10566 }
10567 emit_queue ();
10568 index = protect_from_queue (index, 0);
10569 do_pending_stack_adjust ();
10570
10571 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10572 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10573 (index, op_mode))
10574 index = copy_to_mode_reg (op_mode, index);
10575
10576 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10577
10578 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10579 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10580 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10582 (op1, op_mode))
10583 op1 = copy_to_mode_reg (op_mode, op1);
10584
10585 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10586
10587 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10588 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10589 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10590 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10591 (op2, op_mode))
10592 op2 = copy_to_mode_reg (op_mode, op2);
10593
10594 emit_jump_insn (gen_casesi (index, op1, op2,
10595 table_label, default_label));
10596 return 1;
10597 }
10598
10599 /* Attempt to generate a tablejump instruction; same concept. */
10600 #ifndef HAVE_tablejump
10601 #define HAVE_tablejump 0
10602 #define gen_tablejump(x, y) (0)
10603 #endif
10604
10605 /* Subroutine of the next function.
10606
10607 INDEX is the value being switched on, with the lowest value
10608 in the table already subtracted.
10609 MODE is its expected mode (needed if INDEX is constant).
10610 RANGE is the length of the jump table.
10611 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10612
10613 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10614 index value is out of range. */
10615
10616 static void
10617 do_tablejump (index, mode, range, table_label, default_label)
10618 rtx index, range, table_label, default_label;
10619 enum machine_mode mode;
10620 {
10621 rtx temp, vector;
10622
10623 /* Do an unsigned comparison (in the proper mode) between the index
10624 expression and the value which represents the length of the range.
10625 Since we just finished subtracting the lower bound of the range
10626 from the index expression, this comparison allows us to simultaneously
10627 check that the original index expression value is both greater than
10628 or equal to the minimum value of the range and less than or equal to
10629 the maximum value of the range. */
10630
10631 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10632 default_label);
10633
10634 /* If index is in range, it must fit in Pmode.
10635 Convert to Pmode so we can index with it. */
10636 if (mode != Pmode)
10637 index = convert_to_mode (Pmode, index, 1);
10638
10639 /* Don't let a MEM slip thru, because then INDEX that comes
10640 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10641 and break_out_memory_refs will go to work on it and mess it up. */
10642 #ifdef PIC_CASE_VECTOR_ADDRESS
10643 if (flag_pic && GET_CODE (index) != REG)
10644 index = copy_to_mode_reg (Pmode, index);
10645 #endif
10646
10647 /* If flag_force_addr were to affect this address
10648 it could interfere with the tricky assumptions made
10649 about addresses that contain label-refs,
10650 which may be valid only very near the tablejump itself. */
10651 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10652 GET_MODE_SIZE, because this indicates how large insns are. The other
10653 uses should all be Pmode, because they are addresses. This code
10654 could fail if addresses and insns are not the same size. */
10655 index = gen_rtx_PLUS (Pmode,
10656 gen_rtx_MULT (Pmode, index,
10657 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10658 gen_rtx_LABEL_REF (Pmode, table_label));
10659 #ifdef PIC_CASE_VECTOR_ADDRESS
10660 if (flag_pic)
10661 index = PIC_CASE_VECTOR_ADDRESS (index);
10662 else
10663 #endif
10664 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10665 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10666 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10667 RTX_UNCHANGING_P (vector) = 1;
10668 convert_move (temp, vector, 0);
10669
10670 emit_jump_insn (gen_tablejump (temp, table_label));
10671
10672 /* If we are generating PIC code or if the table is PC-relative, the
10673 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10674 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10675 emit_barrier ();
10676 }
10677
10678 int
10679 try_tablejump (index_type, index_expr, minval, range,
10680 table_label, default_label)
10681 tree index_type, index_expr, minval, range;
10682 rtx table_label, default_label;
10683 {
10684 rtx index;
10685
10686 if (! HAVE_tablejump)
10687 return 0;
10688
10689 index_expr = fold (build (MINUS_EXPR, index_type,
10690 convert (index_type, index_expr),
10691 convert (index_type, minval)));
10692 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10693 emit_queue ();
10694 index = protect_from_queue (index, 0);
10695 do_pending_stack_adjust ();
10696
10697 do_tablejump (index, TYPE_MODE (index_type),
10698 convert_modes (TYPE_MODE (index_type),
10699 TYPE_MODE (TREE_TYPE (range)),
10700 expand_expr (range, NULL_RTX,
10701 VOIDmode, 0),
10702 TREE_UNSIGNED (TREE_TYPE (range))),
10703 table_label, default_label);
10704 return 1;
10705 }