expr.c (expand_assignment, [...]): Set in_check_memory_usage while emitting chkr_chec...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 int to_struct;
119 int to_readonly;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 int from_struct;
125 int from_readonly;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147
148 static rtx get_push_address PARAMS ((int));
149
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
156 enum machine_mode,
157 struct clear_by_pieces *));
158 static rtx get_subtarget PARAMS ((rtx));
159 static int is_zeros_p PARAMS ((tree));
160 static int mostly_zeros_p PARAMS ((tree));
161 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
162 HOST_WIDE_INT, enum machine_mode,
163 tree, tree, unsigned int, int));
164 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
165 HOST_WIDE_INT));
166 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
167 HOST_WIDE_INT, enum machine_mode,
168 tree, enum machine_mode, int,
169 unsigned int, HOST_WIDE_INT, int));
170 static enum memory_use_mode
171 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
172 static tree save_noncopied_parts PARAMS ((tree, tree));
173 static tree init_noncopied_parts PARAMS ((tree, tree));
174 static int safe_from_p PARAMS ((rtx, tree, int));
175 static int fixed_type_p PARAMS ((tree));
176 static rtx var_rtx PARAMS ((tree));
177 static int readonly_fields_p PARAMS ((tree));
178 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
179 static rtx expand_increment PARAMS ((tree, int, int));
180 static void preexpand_calls PARAMS ((tree));
181 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
182 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
183 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
184 rtx, rtx));
185 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
186
187 /* Record for each mode whether we can move a register directly to or
188 from an object of that mode in memory. If we can't, we won't try
189 to use that mode directly when accessing a field of that mode. */
190
191 static char direct_load[NUM_MACHINE_MODES];
192 static char direct_store[NUM_MACHINE_MODES];
193
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
196
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
205
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
211 #endif
212
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215
216 /* This array records the insn_code of insns to perform block clears. */
217 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
218
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
223 #endif
224 \f
225 /* This is run once per compilation to set up which modes can be used
226 directly in memory and to initialize the block move optab. */
227
228 void
229 init_expr_once ()
230 {
231 rtx insn, pat;
232 enum machine_mode mode;
233 int num_clobbers;
234 rtx mem, mem1;
235 char *free_point;
236
237 start_sequence ();
238
239 /* Since we are on the permanent obstack, we must be sure we save this
240 spot AFTER we call start_sequence, since it will reuse the rtl it
241 makes. */
242 free_point = (char *) oballoc (0);
243
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
247 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
248 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
252
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
255 {
256 int regno;
257 rtx reg;
258
259 direct_load[(int) mode] = direct_store[(int) mode] = 0;
260 PUT_MODE (mem, mode);
261 PUT_MODE (mem1, mode);
262
263 /* See if there is some register that can be used in this mode and
264 directly loaded or stored from memory. */
265
266 if (mode != VOIDmode && mode != BLKmode)
267 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
268 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
269 regno++)
270 {
271 if (! HARD_REGNO_MODE_OK (regno, mode))
272 continue;
273
274 reg = gen_rtx_REG (mode, regno);
275
276 SET_SRC (pat) = mem;
277 SET_DEST (pat) = reg;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_load[(int) mode] = 1;
280
281 SET_SRC (pat) = mem1;
282 SET_DEST (pat) = reg;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_load[(int) mode] = 1;
285
286 SET_SRC (pat) = reg;
287 SET_DEST (pat) = mem;
288 if (recog (pat, insn, &num_clobbers) >= 0)
289 direct_store[(int) mode] = 1;
290
291 SET_SRC (pat) = reg;
292 SET_DEST (pat) = mem1;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_store[(int) mode] = 1;
295 }
296 }
297
298 end_sequence ();
299 obfree (free_point);
300 }
301
302 /* This is run at the start of compiling a function. */
303
304 void
305 init_expr ()
306 {
307 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
308
309 pending_chain = 0;
310 pending_stack_adjust = 0;
311 stack_pointer_delta = 0;
312 inhibit_defer_pop = 0;
313 saveregs_value = 0;
314 apply_args_value = 0;
315 forced_labels = 0;
316 }
317
318 void
319 mark_expr_status (p)
320 struct expr_status *p;
321 {
322 if (p == NULL)
323 return;
324
325 ggc_mark_rtx (p->x_saveregs_value);
326 ggc_mark_rtx (p->x_apply_args_value);
327 ggc_mark_rtx (p->x_forced_labels);
328 }
329
330 void
331 free_expr_status (f)
332 struct function *f;
333 {
334 free (f->expr);
335 f->expr = NULL;
336 }
337
338 /* Small sanity check that the queue is empty at the end of a function. */
339
340 void
341 finish_expr_for_function ()
342 {
343 if (pending_chain)
344 abort ();
345 }
346 \f
347 /* Manage the queue of increment instructions to be output
348 for POSTINCREMENT_EXPR expressions, etc. */
349
350 /* Queue up to increment (or change) VAR later. BODY says how:
351 BODY should be the same thing you would pass to emit_insn
352 to increment right away. It will go to emit_insn later on.
353
354 The value is a QUEUED expression to be used in place of VAR
355 where you want to guarantee the pre-incrementation value of VAR. */
356
357 static rtx
358 enqueue_insn (var, body)
359 rtx var, body;
360 {
361 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
362 body, pending_chain);
363 return pending_chain;
364 }
365
366 /* Use protect_from_queue to convert a QUEUED expression
367 into something that you can put immediately into an instruction.
368 If the queued incrementation has not happened yet,
369 protect_from_queue returns the variable itself.
370 If the incrementation has happened, protect_from_queue returns a temp
371 that contains a copy of the old value of the variable.
372
373 Any time an rtx which might possibly be a QUEUED is to be put
374 into an instruction, it must be passed through protect_from_queue first.
375 QUEUED expressions are not meaningful in instructions.
376
377 Do not pass a value through protect_from_queue and then hold
378 on to it for a while before putting it in an instruction!
379 If the queue is flushed in between, incorrect code will result. */
380
381 rtx
382 protect_from_queue (x, modify)
383 register rtx x;
384 int modify;
385 {
386 register RTX_CODE code = GET_CODE (x);
387
388 #if 0 /* A QUEUED can hang around after the queue is forced out. */
389 /* Shortcut for most common case. */
390 if (pending_chain == 0)
391 return x;
392 #endif
393
394 if (code != QUEUED)
395 {
396 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
397 use of autoincrement. Make a copy of the contents of the memory
398 location rather than a copy of the address, but not if the value is
399 of mode BLKmode. Don't modify X in place since it might be
400 shared. */
401 if (code == MEM && GET_MODE (x) != BLKmode
402 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
403 {
404 register rtx y = XEXP (x, 0);
405 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
406
407 MEM_COPY_ATTRIBUTES (new, x);
408
409 if (QUEUED_INSN (y))
410 {
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
413 QUEUED_INSN (y));
414 return temp;
415 }
416 return new;
417 }
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
420 if (code == MEM)
421 {
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
424 {
425 x = copy_rtx (x);
426 XEXP (x, 0) = tem;
427 }
428 }
429 else if (code == PLUS || code == MULT)
430 {
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
434 {
435 x = copy_rtx (x);
436 XEXP (x, 0) = new0;
437 XEXP (x, 1) = new1;
438 }
439 }
440 return x;
441 }
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
446 use that copy. */
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
453 QUEUED_INSN (x));
454 return QUEUED_COPY (x);
455 }
456
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
461
462 int
463 queued_subexp_p (x)
464 rtx x;
465 {
466 register enum rtx_code code = GET_CODE (x);
467 switch (code)
468 {
469 case QUEUED:
470 return 1;
471 case MEM:
472 return queued_subexp_p (XEXP (x, 0));
473 case MULT:
474 case PLUS:
475 case MINUS:
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
478 default:
479 return 0;
480 }
481 }
482
483 /* Perform all the pending incrementations. */
484
485 void
486 emit_queue ()
487 {
488 register rtx p;
489 while ((p = pending_chain))
490 {
491 rtx body = QUEUED_BODY (p);
492
493 if (GET_CODE (body) == SEQUENCE)
494 {
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
497 }
498 else
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
501 }
502 }
503 \f
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
508
509 void
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
512 int unsignedp;
513 {
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
518 enum insn_code code;
519 rtx libcall;
520
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
526
527 if (to_real != from_real)
528 abort ();
529
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
532 TO here. */
533
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 abort ();
542
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 {
546 emit_move_insn (to, from);
547 return;
548 }
549
550 if (to_real)
551 {
552 rtx value;
553
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
555 {
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
559 {
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
562 }
563 }
564
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708
709 libcall = (rtx) 0;
710 switch (from_mode)
711 {
712 case SFmode:
713 switch (to_mode)
714 {
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
718
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
722
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
726
727 default:
728 break;
729 }
730 break;
731
732 case DFmode:
733 switch (to_mode)
734 {
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
738
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
742
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
746
747 default:
748 break;
749 }
750 break;
751
752 case XFmode:
753 switch (to_mode)
754 {
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
758
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
762
763 default:
764 break;
765 }
766 break;
767
768 case TFmode:
769 switch (to_mode)
770 {
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
774
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
778
779 default:
780 break;
781 }
782 break;
783
784 default:
785 break;
786 }
787
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
791
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
793 1, from, from_mode);
794 emit_move_insn (to, value);
795 return;
796 }
797
798 /* Now both modes are integers. */
799
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 {
804 rtx insns;
805 rtx lowpart;
806 rtx fill_value;
807 rtx lowfrom;
808 int i;
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
811
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
814 != CODE_FOR_nothing)
815 {
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
823 return;
824 }
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
829 {
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
835 return;
836 }
837
838 /* No special multiword conversion insn; do it by hand. */
839 start_sequence ();
840
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
843
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
846
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
850 else
851 lowpart_mode = from_mode;
852
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
854
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
857
858 /* Compute the value to put in each remaining word. */
859 if (unsignedp)
860 fill_value = const0_rtx;
861 else
862 {
863 #ifdef HAVE_slt
864 if (HAVE_slt
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
867 {
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
869 lowpart_mode, 0, 0);
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
872 }
873 else
874 #endif
875 {
876 fill_value
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
879 NULL_RTX, 0);
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
881 }
882 }
883
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
886 {
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
889
890 if (subword == 0)
891 abort ();
892
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
895 }
896
897 insns = get_insns ();
898 end_sequence ();
899
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
902 return;
903 }
904
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
908 {
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
917 return;
918 }
919
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
922 {
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
925
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
928 {
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_truncqipqi2 */
933 abort ();
934 }
935
936 if (from_mode == PQImode)
937 {
938 if (to_mode != QImode)
939 {
940 from = convert_to_mode (QImode, from, unsignedp);
941 from_mode = QImode;
942 }
943 else
944 {
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
947 {
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
949 return;
950 }
951 #endif /* HAVE_extendpqiqi2 */
952 abort ();
953 }
954 }
955
956 if (to_mode == PSImode)
957 {
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
960
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
963 {
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_truncsipsi2 */
968 abort ();
969 }
970
971 if (from_mode == PSImode)
972 {
973 if (to_mode != SImode)
974 {
975 from = convert_to_mode (SImode, from, unsignedp);
976 from_mode = SImode;
977 }
978 else
979 {
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 register rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (val);
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1366
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1370
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1373 #endif
1374
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1380
1381 void
1382 move_by_pieces (to, from, len, align)
1383 rtx to, from;
1384 int len;
1385 unsigned int align;
1386 {
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1392
1393 data.offset = 0;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1396 data.to = to;
1397 data.from = from;
1398 data.autinc_to
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 data.autinc_from
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1405
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1408 data.reverse
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1411 data.len = len;
1412
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1417
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1423 {
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1428 mode = tmode;
1429
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1431 {
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1435 }
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1441 }
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1445 {
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1447 data.autinc_to = 1;
1448 data.explicit_inc_to = -1;
1449 }
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1451 {
1452 data.to_addr = copy_addr_to_reg (to_addr);
1453 data.autinc_to = 1;
1454 data.explicit_inc_to = 1;
1455 }
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1458 }
1459
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1463
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1466
1467 while (max_size > 1)
1468 {
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1472 mode = tmode;
1473
1474 if (mode == VOIDmode)
1475 break;
1476
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1480
1481 max_size = GET_MODE_SIZE (mode);
1482 }
1483
1484 /* The code above should have handled everything. */
1485 if (data.len > 0)
1486 abort ();
1487 }
1488
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1491
1492 static int
1493 move_by_pieces_ninsns (l, align)
1494 unsigned int l;
1495 unsigned int align;
1496 {
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1499
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1503
1504 while (max_size > 1)
1505 {
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1508
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1512 mode = tmode;
1513
1514 if (mode == VOIDmode)
1515 break;
1516
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 return n_insns;
1525 }
1526
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1530
1531 static void
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1536 {
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1539
1540 while (data->len >= size)
1541 {
1542 if (data->reverse) data->offset -= size;
1543
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1548 data->offset))));
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1551
1552 from1
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1557 data->offset))));
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1560
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1571
1572 if (! data->reverse) data->offset += size;
1573
1574 data->len -= size;
1575 }
1576 }
1577 \f
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1581
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1583 with mode BLKmode.
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1586
1587 Return the address of the new block, if memcpy is called and returns it,
1588 0 otherwise. */
1589
1590 rtx
1591 emit_block_move (x, y, size, align)
1592 rtx x, y;
1593 rtx size;
1594 unsigned int align;
1595 {
1596 rtx retval = 0;
1597 #ifdef TARGET_MEM_FUNCTIONS
1598 static tree fn;
1599 tree call_expr, arg_list;
1600 #endif
1601
1602 if (GET_MODE (x) != BLKmode)
1603 abort ();
1604
1605 if (GET_MODE (y) != BLKmode)
1606 abort ();
1607
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1611
1612 if (GET_CODE (x) != MEM)
1613 abort ();
1614 if (GET_CODE (y) != MEM)
1615 abort ();
1616 if (size == 0)
1617 abort ();
1618
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1621 else
1622 {
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1626
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1629
1630 /* Since this is a move insn, we don't care about volatility. */
1631 volatile_ok = 1;
1632
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1635 {
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1638
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1654 {
1655 rtx op2;
1656 rtx last = get_last_insn ();
1657 rtx pat;
1658
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1663
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1665 if (pat)
1666 {
1667 emit_insn (pat);
1668 volatile_ok = 0;
1669 return 0;
1670 }
1671 else
1672 delete_insns_since (last);
1673 }
1674 }
1675
1676 volatile_ok = 0;
1677
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1679
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1683
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1687
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1691 emit_queue.
1692
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1703 #else
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1707 #endif
1708
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1712
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1715
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1718
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1722 {
1723 tree fntype;
1724
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1739 pop_obstacks ();
1740 }
1741
1742 /* We need to make an argument list for the function call.
1743
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1746 arg_list
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1755
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1761
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1763 #else
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1769 #endif
1770 }
1771
1772 return retval;
1773 }
1774 \f
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1777
1778 void
1779 move_block_to_reg (regno, x, nregs, mode)
1780 int regno;
1781 rtx x;
1782 int nregs;
1783 enum machine_mode mode;
1784 {
1785 int i;
1786 #ifdef HAVE_load_multiple
1787 rtx pat;
1788 rtx last;
1789 #endif
1790
1791 if (nregs == 0)
1792 return;
1793
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1796
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1800 {
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1803 GEN_INT (nregs));
1804 if (pat)
1805 {
1806 emit_insn (pat);
1807 return;
1808 }
1809 else
1810 delete_insns_since (last);
1811 }
1812 #endif
1813
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1817 }
1818
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1822
1823
1824 void
1825 move_block_from_reg (regno, x, nregs, size)
1826 int regno;
1827 rtx x;
1828 int nregs;
1829 int size;
1830 {
1831 int i;
1832 #ifdef HAVE_store_multiple
1833 rtx pat;
1834 rtx last;
1835 #endif
1836 enum machine_mode mode;
1837
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1842 {
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1845 return;
1846 }
1847
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1852 {
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1854 rtx shift;
1855
1856 if (tem == 0)
1857 abort ();
1858
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1864 return;
1865 }
1866
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1870 {
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1873 GEN_INT (nregs));
1874 if (pat)
1875 {
1876 emit_insn (pat);
1877 return;
1878 }
1879 else
1880 delete_insns_since (last);
1881 }
1882 #endif
1883
1884 for (i = 0; i < nregs; i++)
1885 {
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1892 }
1893 }
1894
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1898 SRC in bits. */
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1904 would be needed. */
1905
1906 void
1907 emit_group_load (dst, orig_src, ssize, align)
1908 rtx dst, orig_src;
1909 unsigned int align;
1910 int ssize;
1911 {
1912 rtx *tmps, src;
1913 int start, i;
1914
1915 if (GET_CODE (dst) != PARALLEL)
1916 abort ();
1917
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1921 start = 0;
1922 else
1923 start = 1;
1924
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1926
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1929 src = orig_src;
1930 if (GET_CODE (src) != MEM)
1931 {
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1934 else
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1937 }
1938
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1941 {
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1945 int shift = 0;
1946
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1949 {
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1952 if (bytelen <= 0)
1953 abort ();
1954 }
1955
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1961 {
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1966 bytepos)));
1967 }
1968 else if (GET_CODE (src) == CONCAT)
1969 {
1970 if (bytepos == 0
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1976 else
1977 abort ();
1978 }
1979 else
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1983
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1987 }
1988
1989 emit_queue();
1990
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1994 }
1995
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1999
2000 void
2001 emit_group_store (orig_dst, src, ssize, align)
2002 rtx orig_dst, src;
2003 int ssize;
2004 unsigned int align;
2005 {
2006 rtx *tmps, dst;
2007 int start, i;
2008
2009 if (GET_CODE (src) != PARALLEL)
2010 abort ();
2011
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2015 start = 0;
2016 else
2017 start = 1;
2018
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2020
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2023 {
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2027 }
2028 emit_queue();
2029
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2032 dst = orig_dst;
2033 if (GET_CODE (dst) == PARALLEL)
2034 {
2035 rtx temp;
2036
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2041 return;
2042
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2045 the temporary. */
2046
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2050 return;
2051 }
2052 else if (GET_CODE (dst) != MEM)
2053 {
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2057 }
2058 else if (! MEM_IN_STRUCT_P (dst))
2059 {
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2062
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2065 }
2066
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2073
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2076 {
2077 if (BYTES_BIG_ENDIAN)
2078 {
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2082 }
2083 bytelen = ssize - bytepos;
2084 }
2085
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2093 bytepos)),
2094 tmps[i]);
2095 else
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2098 }
2099
2100 emit_queue();
2101
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2105 }
2106
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2110
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2115
2116 rtx
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2118 rtx tgtblk;
2119 rtx srcreg;
2120 tree type;
2121 {
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2126
2127 if (tgtblk == 0)
2128 {
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2132 }
2133
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2139
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2147
2148 /* Copy the structure BITSIZE bites at a time.
2149
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2152 time. */
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2156 {
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2163
2164 /* We need a new destination operand each time bitpos is on
2165 a word boundary. */
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2177 }
2178
2179 return tgtblk;
2180 }
2181
2182 /* Add a USE expression for REG to the (possibly empty) list pointed
2183 to by CALL_FUSAGE. REG must denote a hard register. */
2184
2185 void
2186 use_reg (call_fusage, reg)
2187 rtx *call_fusage, reg;
2188 {
2189 if (GET_CODE (reg) != REG
2190 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2191 abort();
2192
2193 *call_fusage
2194 = gen_rtx_EXPR_LIST (VOIDmode,
2195 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 }
2197
2198 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2199 starting at REGNO. All of these registers must be hard registers. */
2200
2201 void
2202 use_regs (call_fusage, regno, nregs)
2203 rtx *call_fusage;
2204 int regno;
2205 int nregs;
2206 {
2207 int i;
2208
2209 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2210 abort ();
2211
2212 for (i = 0; i < nregs; i++)
2213 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2214 }
2215
2216 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2217 PARALLEL REGS. This is for calls that pass values in multiple
2218 non-contiguous locations. The Irix 6 ABI has examples of this. */
2219
2220 void
2221 use_group_regs (call_fusage, regs)
2222 rtx *call_fusage;
2223 rtx regs;
2224 {
2225 int i;
2226
2227 for (i = 0; i < XVECLEN (regs, 0); i++)
2228 {
2229 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2230
2231 /* A NULL entry means the parameter goes both on the stack and in
2232 registers. This can also be a MEM for targets that pass values
2233 partially on the stack and partially in registers. */
2234 if (reg != 0 && GET_CODE (reg) == REG)
2235 use_reg (call_fusage, reg);
2236 }
2237 }
2238 \f
2239 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2240 rtx with BLKmode). The caller must pass TO through protect_from_queue
2241 before calling. ALIGN is maximum alignment we can assume. */
2242
2243 static void
2244 clear_by_pieces (to, len, align)
2245 rtx to;
2246 int len;
2247 unsigned int align;
2248 {
2249 struct clear_by_pieces data;
2250 rtx to_addr = XEXP (to, 0);
2251 unsigned int max_size = MOVE_MAX_PIECES + 1;
2252 enum machine_mode mode = VOIDmode, tmode;
2253 enum insn_code icode;
2254
2255 data.offset = 0;
2256 data.to_addr = to_addr;
2257 data.to = to;
2258 data.autinc_to
2259 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2260 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2261
2262 data.explicit_inc_to = 0;
2263 data.reverse
2264 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2265 if (data.reverse) data.offset = len;
2266 data.len = len;
2267
2268 data.to_struct = MEM_IN_STRUCT_P (to);
2269
2270 /* If copying requires more than two move insns,
2271 copy addresses to registers (to make displacements shorter)
2272 and use post-increment if available. */
2273 if (!data.autinc_to
2274 && move_by_pieces_ninsns (len, align) > 2)
2275 {
2276 /* Determine the main mode we'll be using */
2277 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2278 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2279 if (GET_MODE_SIZE (tmode) < max_size)
2280 mode = tmode;
2281
2282 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2283 {
2284 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = -1;
2287 }
2288 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2289 {
2290 data.to_addr = copy_addr_to_reg (to_addr);
2291 data.autinc_to = 1;
2292 data.explicit_inc_to = 1;
2293 }
2294 if (!data.autinc_to && CONSTANT_P (to_addr))
2295 data.to_addr = copy_addr_to_reg (to_addr);
2296 }
2297
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2301
2302 /* First move what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2304
2305 while (max_size > 1)
2306 {
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2310 mode = tmode;
2311
2312 if (mode == VOIDmode)
2313 break;
2314
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2317 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2318
2319 max_size = GET_MODE_SIZE (mode);
2320 }
2321
2322 /* The code above should have handled everything. */
2323 if (data.len != 0)
2324 abort ();
2325 }
2326
2327 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2328 with move instructions for mode MODE. GENFUN is the gen_... function
2329 to make a move insn for that mode. DATA has all the other info. */
2330
2331 static void
2332 clear_by_pieces_1 (genfun, mode, data)
2333 rtx (*genfun) PARAMS ((rtx, ...));
2334 enum machine_mode mode;
2335 struct clear_by_pieces *data;
2336 {
2337 register int size = GET_MODE_SIZE (mode);
2338 register rtx to1;
2339
2340 while (data->len >= size)
2341 {
2342 if (data->reverse) data->offset -= size;
2343
2344 to1 = (data->autinc_to
2345 ? gen_rtx_MEM (mode, data->to_addr)
2346 : copy_rtx (change_address (data->to, mode,
2347 plus_constant (data->to_addr,
2348 data->offset))));
2349 MEM_IN_STRUCT_P (to1) = data->to_struct;
2350
2351 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2352 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2353
2354 emit_insn ((*genfun) (to1, const0_rtx));
2355 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2356 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2357
2358 if (! data->reverse) data->offset += size;
2359
2360 data->len -= size;
2361 }
2362 }
2363 \f
2364 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2365 its length in bytes and ALIGN is the maximum alignment we can is has.
2366
2367 If we call a function that returns the length of the block, return it. */
2368
2369 rtx
2370 clear_storage (object, size, align)
2371 rtx object;
2372 rtx size;
2373 unsigned int align;
2374 {
2375 #ifdef TARGET_MEM_FUNCTIONS
2376 static tree fn;
2377 tree call_expr, arg_list;
2378 #endif
2379 rtx retval = 0;
2380
2381 if (GET_MODE (object) == BLKmode)
2382 {
2383 object = protect_from_queue (object, 1);
2384 size = protect_from_queue (size, 0);
2385
2386 if (GET_CODE (size) == CONST_INT
2387 && MOVE_BY_PIECES_P (INTVAL (size), align))
2388 clear_by_pieces (object, INTVAL (size), align);
2389 else
2390 {
2391 /* Try the most limited insn first, because there's no point
2392 including more than one in the machine description unless
2393 the more limited one has some advantage. */
2394
2395 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2396 enum machine_mode mode;
2397
2398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2399 mode = GET_MODE_WIDER_MODE (mode))
2400 {
2401 enum insn_code code = clrstr_optab[(int) mode];
2402 insn_operand_predicate_fn pred;
2403
2404 if (code != CODE_FOR_nothing
2405 /* We don't need MODE to be narrower than
2406 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2407 the mode mask, as it is returned by the macro, it will
2408 definitely be less than the actual mode mask. */
2409 && ((GET_CODE (size) == CONST_INT
2410 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2411 <= (GET_MODE_MASK (mode) >> 1)))
2412 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2413 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2414 || (*pred) (object, BLKmode))
2415 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2416 || (*pred) (opalign, VOIDmode)))
2417 {
2418 rtx op1;
2419 rtx last = get_last_insn ();
2420 rtx pat;
2421
2422 op1 = convert_to_mode (mode, size, 1);
2423 pred = insn_data[(int) code].operand[1].predicate;
2424 if (pred != 0 && ! (*pred) (op1, mode))
2425 op1 = copy_to_mode_reg (mode, op1);
2426
2427 pat = GEN_FCN ((int) code) (object, op1, opalign);
2428 if (pat)
2429 {
2430 emit_insn (pat);
2431 return 0;
2432 }
2433 else
2434 delete_insns_since (last);
2435 }
2436 }
2437
2438 /* OBJECT or SIZE may have been passed through protect_from_queue.
2439
2440 It is unsafe to save the value generated by protect_from_queue
2441 and reuse it later. Consider what happens if emit_queue is
2442 called before the return value from protect_from_queue is used.
2443
2444 Expansion of the CALL_EXPR below will call emit_queue before
2445 we are finished emitting RTL for argument setup. So if we are
2446 not careful we could get the wrong value for an argument.
2447
2448 To avoid this problem we go ahead and emit code to copy OBJECT
2449 and SIZE into new pseudos. We can then place those new pseudos
2450 into an RTL_EXPR and use them later, even after a call to
2451 emit_queue.
2452
2453 Note this is not strictly needed for library calls since they
2454 do not call emit_queue before loading their arguments. However,
2455 we may need to have library calls call emit_queue in the future
2456 since failing to do so could cause problems for targets which
2457 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2458 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2459
2460 #ifdef TARGET_MEM_FUNCTIONS
2461 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2462 #else
2463 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2464 TREE_UNSIGNED (integer_type_node));
2465 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2466 #endif
2467
2468
2469 #ifdef TARGET_MEM_FUNCTIONS
2470 /* It is incorrect to use the libcall calling conventions to call
2471 memset in this context.
2472
2473 This could be a user call to memset and the user may wish to
2474 examine the return value from memset.
2475
2476 For targets where libcalls and normal calls have different
2477 conventions for returning pointers, we could end up generating
2478 incorrect code.
2479
2480 So instead of using a libcall sequence we build up a suitable
2481 CALL_EXPR and expand the call in the normal fashion. */
2482 if (fn == NULL_TREE)
2483 {
2484 tree fntype;
2485
2486 /* This was copied from except.c, I don't know if all this is
2487 necessary in this context or not. */
2488 fn = get_identifier ("memset");
2489 push_obstacks_nochange ();
2490 end_temporary_allocation ();
2491 fntype = build_pointer_type (void_type_node);
2492 fntype = build_function_type (fntype, NULL_TREE);
2493 fn = build_decl (FUNCTION_DECL, fn, fntype);
2494 ggc_add_tree_root (&fn, 1);
2495 DECL_EXTERNAL (fn) = 1;
2496 TREE_PUBLIC (fn) = 1;
2497 DECL_ARTIFICIAL (fn) = 1;
2498 make_decl_rtl (fn, NULL_PTR, 1);
2499 assemble_external (fn);
2500 pop_obstacks ();
2501 }
2502
2503 /* We need to make an argument list for the function call.
2504
2505 memset has three arguments, the first is a void * addresses, the
2506 second a integer with the initialization value, the last is a
2507 size_t byte count for the copy. */
2508 arg_list
2509 = build_tree_list (NULL_TREE,
2510 make_tree (build_pointer_type (void_type_node),
2511 object));
2512 TREE_CHAIN (arg_list)
2513 = build_tree_list (NULL_TREE,
2514 make_tree (integer_type_node, const0_rtx));
2515 TREE_CHAIN (TREE_CHAIN (arg_list))
2516 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2517 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2518
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR,
2521 build_pointer_type (TREE_TYPE (fn)), fn);
2522 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2523 call_expr, arg_list, NULL_TREE);
2524 TREE_SIDE_EFFECTS (call_expr) = 1;
2525
2526 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2527 #else
2528 emit_library_call (bzero_libfunc, 0,
2529 VOIDmode, 2, object, Pmode, size,
2530 TYPE_MODE (integer_type_node));
2531 #endif
2532 }
2533 }
2534 else
2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2536
2537 return retval;
2538 }
2539
2540 /* Generate code to copy Y into X.
2541 Both Y and X must have the same mode, except that
2542 Y can be a constant with VOIDmode.
2543 This mode cannot be BLKmode; use emit_block_move for that.
2544
2545 Return the last instruction emitted. */
2546
2547 rtx
2548 emit_move_insn (x, y)
2549 rtx x, y;
2550 {
2551 enum machine_mode mode = GET_MODE (x);
2552
2553 x = protect_from_queue (x, 1);
2554 y = protect_from_queue (y, 0);
2555
2556 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2557 abort ();
2558
2559 /* Never force constant_p_rtx to memory. */
2560 if (GET_CODE (y) == CONSTANT_P_RTX)
2561 ;
2562 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2563 y = force_const_mem (mode, y);
2564
2565 /* If X or Y are memory references, verify that their addresses are valid
2566 for the machine. */
2567 if (GET_CODE (x) == MEM
2568 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2569 && ! push_operand (x, GET_MODE (x)))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2572 x = change_address (x, VOIDmode, XEXP (x, 0));
2573
2574 if (GET_CODE (y) == MEM
2575 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2576 || (flag_force_addr
2577 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2578 y = change_address (y, VOIDmode, XEXP (y, 0));
2579
2580 if (mode == BLKmode)
2581 abort ();
2582
2583 return emit_move_insn_1 (x, y);
2584 }
2585
2586 /* Low level part of emit_move_insn.
2587 Called just like emit_move_insn, but assumes X and Y
2588 are basically valid. */
2589
2590 rtx
2591 emit_move_insn_1 (x, y)
2592 rtx x, y;
2593 {
2594 enum machine_mode mode = GET_MODE (x);
2595 enum machine_mode submode;
2596 enum mode_class class = GET_MODE_CLASS (mode);
2597 unsigned int i;
2598
2599 if (mode >= MAX_MACHINE_MODE)
2600 abort ();
2601
2602 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2603 return
2604 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2605
2606 /* Expand complex moves by moving real part and imag part, if possible. */
2607 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2608 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2609 * BITS_PER_UNIT),
2610 (class == MODE_COMPLEX_INT
2611 ? MODE_INT : MODE_FLOAT),
2612 0))
2613 && (mov_optab->handlers[(int) submode].insn_code
2614 != CODE_FOR_nothing))
2615 {
2616 /* Don't split destination if it is a stack push. */
2617 int stack = push_operand (x, GET_MODE (x));
2618
2619 /* If this is a stack, push the highpart first, so it
2620 will be in the argument order.
2621
2622 In that case, change_address is used only to convert
2623 the mode, not to change the address. */
2624 if (stack)
2625 {
2626 /* Note that the real part always precedes the imag part in memory
2627 regardless of machine's endianness. */
2628 #ifdef STACK_GROWS_DOWNWARD
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_imagpart (submode, y)));
2632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2633 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2634 gen_realpart (submode, y)));
2635 #else
2636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2637 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2638 gen_realpart (submode, y)));
2639 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2640 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2641 gen_imagpart (submode, y)));
2642 #endif
2643 }
2644 else
2645 {
2646 rtx realpart_x, realpart_y;
2647 rtx imagpart_x, imagpart_y;
2648
2649 /* If this is a complex value with each part being smaller than a
2650 word, the usual calling sequence will likely pack the pieces into
2651 a single register. Unfortunately, SUBREG of hard registers only
2652 deals in terms of words, so we have a problem converting input
2653 arguments to the CONCAT of two registers that is used elsewhere
2654 for complex values. If this is before reload, we can copy it into
2655 memory and reload. FIXME, we should see about using extract and
2656 insert on integer registers, but complex short and complex char
2657 variables should be rarely used. */
2658 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2659 && (reload_in_progress | reload_completed) == 0)
2660 {
2661 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2662 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2663
2664 if (packed_dest_p || packed_src_p)
2665 {
2666 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2667 ? MODE_FLOAT : MODE_INT);
2668
2669 enum machine_mode reg_mode =
2670 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2671
2672 if (reg_mode != BLKmode)
2673 {
2674 rtx mem = assign_stack_temp (reg_mode,
2675 GET_MODE_SIZE (mode), 0);
2676
2677 rtx cmem = change_address (mem, mode, NULL_RTX);
2678
2679 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2680
2681 if (packed_dest_p)
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2684 emit_move_insn_1 (cmem, y);
2685 return emit_move_insn_1 (sreg, mem);
2686 }
2687 else
2688 {
2689 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2690 emit_move_insn_1 (mem, sreg);
2691 return emit_move_insn_1 (x, cmem);
2692 }
2693 }
2694 }
2695 }
2696
2697 realpart_x = gen_realpart (submode, x);
2698 realpart_y = gen_realpart (submode, y);
2699 imagpart_x = gen_imagpart (submode, x);
2700 imagpart_y = gen_imagpart (submode, y);
2701
2702 /* Show the output dies here. This is necessary for SUBREGs
2703 of pseudos since we cannot track their lifetimes correctly;
2704 hard regs shouldn't appear here except as return values.
2705 We never want to emit such a clobber after reload. */
2706 if (x != y
2707 && ! (reload_in_progress || reload_completed)
2708 && (GET_CODE (realpart_x) == SUBREG
2709 || GET_CODE (imagpart_x) == SUBREG))
2710 {
2711 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2712 }
2713
2714 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2715 (realpart_x, realpart_y));
2716 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2717 (imagpart_x, imagpart_y));
2718 }
2719
2720 return get_last_insn ();
2721 }
2722
2723 /* This will handle any multi-word mode that lacks a move_insn pattern.
2724 However, you will get better code if you define such patterns,
2725 even if they must turn into multiple assembler instructions. */
2726 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2727 {
2728 rtx last_insn = 0;
2729 rtx seq, inner;
2730 int need_clobber;
2731
2732 #ifdef PUSH_ROUNDING
2733
2734 /* If X is a push on the stack, do the push now and replace
2735 X with a reference to the stack pointer. */
2736 if (push_operand (x, GET_MODE (x)))
2737 {
2738 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2739 x = change_address (x, VOIDmode, stack_pointer_rtx);
2740 }
2741 #endif
2742
2743 /* If we are in reload, see if either operand is a MEM whose address
2744 is scheduled for replacement. */
2745 if (reload_in_progress && GET_CODE (x) == MEM
2746 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2747 {
2748 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2749
2750 MEM_COPY_ATTRIBUTES (new, x);
2751 x = new;
2752 }
2753 if (reload_in_progress && GET_CODE (y) == MEM
2754 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2755 {
2756 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2757
2758 MEM_COPY_ATTRIBUTES (new, y);
2759 y = new;
2760 }
2761
2762 start_sequence ();
2763
2764 need_clobber = 0;
2765 for (i = 0;
2766 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2767 i++)
2768 {
2769 rtx xpart = operand_subword (x, i, 1, mode);
2770 rtx ypart = operand_subword (y, i, 1, mode);
2771
2772 /* If we can't get a part of Y, put Y into memory if it is a
2773 constant. Otherwise, force it into a register. If we still
2774 can't get a part of Y, abort. */
2775 if (ypart == 0 && CONSTANT_P (y))
2776 {
2777 y = force_const_mem (mode, y);
2778 ypart = operand_subword (y, i, 1, mode);
2779 }
2780 else if (ypart == 0)
2781 ypart = operand_subword_force (y, i, mode);
2782
2783 if (xpart == 0 || ypart == 0)
2784 abort ();
2785
2786 need_clobber |= (GET_CODE (xpart) == SUBREG);
2787
2788 last_insn = emit_move_insn (xpart, ypart);
2789 }
2790
2791 seq = gen_sequence ();
2792 end_sequence ();
2793
2794 /* Show the output dies here. This is necessary for SUBREGs
2795 of pseudos since we cannot track their lifetimes correctly;
2796 hard regs shouldn't appear here except as return values.
2797 We never want to emit such a clobber after reload. */
2798 if (x != y
2799 && ! (reload_in_progress || reload_completed)
2800 && need_clobber != 0)
2801 {
2802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2803 }
2804
2805 emit_insn (seq);
2806
2807 return last_insn;
2808 }
2809 else
2810 abort ();
2811 }
2812 \f
2813 /* Pushing data onto the stack. */
2814
2815 /* Push a block of length SIZE (perhaps variable)
2816 and return an rtx to address the beginning of the block.
2817 Note that it is not possible for the value returned to be a QUEUED.
2818 The value may be virtual_outgoing_args_rtx.
2819
2820 EXTRA is the number of bytes of padding to push in addition to SIZE.
2821 BELOW nonzero means this padding comes at low addresses;
2822 otherwise, the padding comes at high addresses. */
2823
2824 rtx
2825 push_block (size, extra, below)
2826 rtx size;
2827 int extra, below;
2828 {
2829 register rtx temp;
2830
2831 size = convert_modes (Pmode, ptr_mode, size, 1);
2832 if (CONSTANT_P (size))
2833 anti_adjust_stack (plus_constant (size, extra));
2834 else if (GET_CODE (size) == REG && extra == 0)
2835 anti_adjust_stack (size);
2836 else
2837 {
2838 temp = copy_to_mode_reg (Pmode, size);
2839 if (extra != 0)
2840 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2841 temp, 0, OPTAB_LIB_WIDEN);
2842 anti_adjust_stack (temp);
2843 }
2844
2845 #ifndef STACK_GROWS_DOWNWARD
2846 #ifdef ARGS_GROW_DOWNWARD
2847 if (!ACCUMULATE_OUTGOING_ARGS)
2848 #else
2849 if (0)
2850 #endif
2851 #else
2852 if (1)
2853 #endif
2854 {
2855 /* Return the lowest stack address when STACK or ARGS grow downward and
2856 we are not aaccumulating outgoing arguments (the c4x port uses such
2857 conventions). */
2858 temp = virtual_outgoing_args_rtx;
2859 if (extra != 0 && below)
2860 temp = plus_constant (temp, extra);
2861 }
2862 else
2863 {
2864 if (GET_CODE (size) == CONST_INT)
2865 temp = plus_constant (virtual_outgoing_args_rtx,
2866 - INTVAL (size) - (below ? 0 : extra));
2867 else if (extra != 0 && !below)
2868 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2869 negate_rtx (Pmode, plus_constant (size, extra)));
2870 else
2871 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2872 negate_rtx (Pmode, size));
2873 }
2874
2875 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2876 }
2877
2878 rtx
2879 gen_push_operand ()
2880 {
2881 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2882 }
2883
2884 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2885 block of SIZE bytes. */
2886
2887 static rtx
2888 get_push_address (size)
2889 int size;
2890 {
2891 register rtx temp;
2892
2893 if (STACK_PUSH_CODE == POST_DEC)
2894 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2895 else if (STACK_PUSH_CODE == POST_INC)
2896 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2897 else
2898 temp = stack_pointer_rtx;
2899
2900 return copy_to_reg (temp);
2901 }
2902
2903 /* Generate code to push X onto the stack, assuming it has mode MODE and
2904 type TYPE.
2905 MODE is redundant except when X is a CONST_INT (since they don't
2906 carry mode info).
2907 SIZE is an rtx for the size of data to be copied (in bytes),
2908 needed only if X is BLKmode.
2909
2910 ALIGN is maximum alignment we can assume.
2911
2912 If PARTIAL and REG are both nonzero, then copy that many of the first
2913 words of X into registers starting with REG, and push the rest of X.
2914 The amount of space pushed is decreased by PARTIAL words,
2915 rounded *down* to a multiple of PARM_BOUNDARY.
2916 REG must be a hard register in this case.
2917 If REG is zero but PARTIAL is not, take any all others actions for an
2918 argument partially in registers, but do not actually load any
2919 registers.
2920
2921 EXTRA is the amount in bytes of extra space to leave next to this arg.
2922 This is ignored if an argument block has already been allocated.
2923
2924 On a machine that lacks real push insns, ARGS_ADDR is the address of
2925 the bottom of the argument block for this call. We use indexing off there
2926 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2927 argument block has not been preallocated.
2928
2929 ARGS_SO_FAR is the size of args previously pushed for this call.
2930
2931 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2932 for arguments passed in registers. If nonzero, it will be the number
2933 of bytes required. */
2934
2935 void
2936 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2937 args_addr, args_so_far, reg_parm_stack_space,
2938 alignment_pad)
2939 register rtx x;
2940 enum machine_mode mode;
2941 tree type;
2942 rtx size;
2943 unsigned int align;
2944 int partial;
2945 rtx reg;
2946 int extra;
2947 rtx args_addr;
2948 rtx args_so_far;
2949 int reg_parm_stack_space;
2950 rtx alignment_pad;
2951 {
2952 rtx xinner;
2953 enum direction stack_direction
2954 #ifdef STACK_GROWS_DOWNWARD
2955 = downward;
2956 #else
2957 = upward;
2958 #endif
2959
2960 /* Decide where to pad the argument: `downward' for below,
2961 `upward' for above, or `none' for don't pad it.
2962 Default is below for small data on big-endian machines; else above. */
2963 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2964
2965 /* Invert direction if stack is post-update. */
2966 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2967 if (where_pad != none)
2968 where_pad = (where_pad == downward ? upward : downward);
2969
2970 xinner = x = protect_from_queue (x, 0);
2971
2972 if (mode == BLKmode)
2973 {
2974 /* Copy a block into the stack, entirely or partially. */
2975
2976 register rtx temp;
2977 int used = partial * UNITS_PER_WORD;
2978 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2979 int skip;
2980
2981 if (size == 0)
2982 abort ();
2983
2984 used -= offset;
2985
2986 /* USED is now the # of bytes we need not copy to the stack
2987 because registers will take care of them. */
2988
2989 if (partial != 0)
2990 xinner = change_address (xinner, BLKmode,
2991 plus_constant (XEXP (xinner, 0), used));
2992
2993 /* If the partial register-part of the arg counts in its stack size,
2994 skip the part of stack space corresponding to the registers.
2995 Otherwise, start copying to the beginning of the stack space,
2996 by setting SKIP to 0. */
2997 skip = (reg_parm_stack_space == 0) ? 0 : used;
2998
2999 #ifdef PUSH_ROUNDING
3000 /* Do it with several push insns if that doesn't take lots of insns
3001 and if there is no difficulty with push insns that skip bytes
3002 on the stack for alignment purposes. */
3003 if (args_addr == 0
3004 && PUSH_ARGS
3005 && GET_CODE (size) == CONST_INT
3006 && skip == 0
3007 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3008 /* Here we avoid the case of a structure whose weak alignment
3009 forces many pushes of a small amount of data,
3010 and such small pushes do rounding that causes trouble. */
3011 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3012 || align >= BIGGEST_ALIGNMENT
3013 || PUSH_ROUNDING (align) == align)
3014 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3015 {
3016 /* Push padding now if padding above and stack grows down,
3017 or if padding below and stack grows up.
3018 But if space already allocated, this has already been done. */
3019 if (extra && args_addr == 0
3020 && where_pad != none && where_pad != stack_direction)
3021 anti_adjust_stack (GEN_INT (extra));
3022
3023 stack_pointer_delta += INTVAL (size) - used;
3024 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3025 INTVAL (size) - used, align);
3026
3027 if (current_function_check_memory_usage && ! in_check_memory_usage)
3028 {
3029 rtx temp;
3030
3031 in_check_memory_usage = 1;
3032 temp = get_push_address (INTVAL(size) - used);
3033 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3034 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3035 temp, Pmode,
3036 XEXP (xinner, 0), Pmode,
3037 GEN_INT (INTVAL(size) - used),
3038 TYPE_MODE (sizetype));
3039 else
3040 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3041 temp, Pmode,
3042 GEN_INT (INTVAL(size) - used),
3043 TYPE_MODE (sizetype),
3044 GEN_INT (MEMORY_USE_RW),
3045 TYPE_MODE (integer_type_node));
3046 in_check_memory_usage = 0;
3047 }
3048 }
3049 else
3050 #endif /* PUSH_ROUNDING */
3051 {
3052 /* Otherwise make space on the stack and copy the data
3053 to the address of that space. */
3054
3055 /* Deduct words put into registers from the size we must copy. */
3056 if (partial != 0)
3057 {
3058 if (GET_CODE (size) == CONST_INT)
3059 size = GEN_INT (INTVAL (size) - used);
3060 else
3061 size = expand_binop (GET_MODE (size), sub_optab, size,
3062 GEN_INT (used), NULL_RTX, 0,
3063 OPTAB_LIB_WIDEN);
3064 }
3065
3066 /* Get the address of the stack space.
3067 In this case, we do not deal with EXTRA separately.
3068 A single stack adjust will do. */
3069 if (! args_addr)
3070 {
3071 temp = push_block (size, extra, where_pad == downward);
3072 extra = 0;
3073 }
3074 else if (GET_CODE (args_so_far) == CONST_INT)
3075 temp = memory_address (BLKmode,
3076 plus_constant (args_addr,
3077 skip + INTVAL (args_so_far)));
3078 else
3079 temp = memory_address (BLKmode,
3080 plus_constant (gen_rtx_PLUS (Pmode,
3081 args_addr,
3082 args_so_far),
3083 skip));
3084 if (current_function_check_memory_usage && ! in_check_memory_usage)
3085 {
3086 rtx target;
3087
3088 in_check_memory_usage = 1;
3089 target = copy_to_reg (temp);
3090 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3091 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3092 target, Pmode,
3093 XEXP (xinner, 0), Pmode,
3094 size, TYPE_MODE (sizetype));
3095 else
3096 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3097 target, Pmode,
3098 size, TYPE_MODE (sizetype),
3099 GEN_INT (MEMORY_USE_RW),
3100 TYPE_MODE (integer_type_node));
3101 in_check_memory_usage = 0;
3102 }
3103
3104 /* TEMP is the address of the block. Copy the data there. */
3105 if (GET_CODE (size) == CONST_INT
3106 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3107 {
3108 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3109 INTVAL (size), align);
3110 goto ret;
3111 }
3112 else
3113 {
3114 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3115 enum machine_mode mode;
3116 rtx target = gen_rtx_MEM (BLKmode, temp);
3117
3118 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3119 mode != VOIDmode;
3120 mode = GET_MODE_WIDER_MODE (mode))
3121 {
3122 enum insn_code code = movstr_optab[(int) mode];
3123 insn_operand_predicate_fn pred;
3124
3125 if (code != CODE_FOR_nothing
3126 && ((GET_CODE (size) == CONST_INT
3127 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3128 <= (GET_MODE_MASK (mode) >> 1)))
3129 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3130 && (!(pred = insn_data[(int) code].operand[0].predicate)
3131 || ((*pred) (target, BLKmode)))
3132 && (!(pred = insn_data[(int) code].operand[1].predicate)
3133 || ((*pred) (xinner, BLKmode)))
3134 && (!(pred = insn_data[(int) code].operand[3].predicate)
3135 || ((*pred) (opalign, VOIDmode))))
3136 {
3137 rtx op2 = convert_to_mode (mode, size, 1);
3138 rtx last = get_last_insn ();
3139 rtx pat;
3140
3141 pred = insn_data[(int) code].operand[2].predicate;
3142 if (pred != 0 && ! (*pred) (op2, mode))
3143 op2 = copy_to_mode_reg (mode, op2);
3144
3145 pat = GEN_FCN ((int) code) (target, xinner,
3146 op2, opalign);
3147 if (pat)
3148 {
3149 emit_insn (pat);
3150 goto ret;
3151 }
3152 else
3153 delete_insns_since (last);
3154 }
3155 }
3156 }
3157
3158 if (!ACCUMULATE_OUTGOING_ARGS)
3159 {
3160 /* If the source is referenced relative to the stack pointer,
3161 copy it to another register to stabilize it. We do not need
3162 to do this if we know that we won't be changing sp. */
3163
3164 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3165 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3166 temp = copy_to_reg (temp);
3167 }
3168
3169 /* Make inhibit_defer_pop nonzero around the library call
3170 to force it to pop the bcopy-arguments right away. */
3171 NO_DEFER_POP;
3172 #ifdef TARGET_MEM_FUNCTIONS
3173 emit_library_call (memcpy_libfunc, 0,
3174 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3175 convert_to_mode (TYPE_MODE (sizetype),
3176 size, TREE_UNSIGNED (sizetype)),
3177 TYPE_MODE (sizetype));
3178 #else
3179 emit_library_call (bcopy_libfunc, 0,
3180 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3181 convert_to_mode (TYPE_MODE (integer_type_node),
3182 size,
3183 TREE_UNSIGNED (integer_type_node)),
3184 TYPE_MODE (integer_type_node));
3185 #endif
3186 OK_DEFER_POP;
3187 }
3188 }
3189 else if (partial > 0)
3190 {
3191 /* Scalar partly in registers. */
3192
3193 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3194 int i;
3195 int not_stack;
3196 /* # words of start of argument
3197 that we must make space for but need not store. */
3198 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3199 int args_offset = INTVAL (args_so_far);
3200 int skip;
3201
3202 /* Push padding now if padding above and stack grows down,
3203 or if padding below and stack grows up.
3204 But if space already allocated, this has already been done. */
3205 if (extra && args_addr == 0
3206 && where_pad != none && where_pad != stack_direction)
3207 anti_adjust_stack (GEN_INT (extra));
3208
3209 /* If we make space by pushing it, we might as well push
3210 the real data. Otherwise, we can leave OFFSET nonzero
3211 and leave the space uninitialized. */
3212 if (args_addr == 0)
3213 offset = 0;
3214
3215 /* Now NOT_STACK gets the number of words that we don't need to
3216 allocate on the stack. */
3217 not_stack = partial - offset;
3218
3219 /* If the partial register-part of the arg counts in its stack size,
3220 skip the part of stack space corresponding to the registers.
3221 Otherwise, start copying to the beginning of the stack space,
3222 by setting SKIP to 0. */
3223 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3224
3225 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3226 x = validize_mem (force_const_mem (mode, x));
3227
3228 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3229 SUBREGs of such registers are not allowed. */
3230 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3231 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3232 x = copy_to_reg (x);
3233
3234 /* Loop over all the words allocated on the stack for this arg. */
3235 /* We can do it by words, because any scalar bigger than a word
3236 has a size a multiple of a word. */
3237 #ifndef PUSH_ARGS_REVERSED
3238 for (i = not_stack; i < size; i++)
3239 #else
3240 for (i = size - 1; i >= not_stack; i--)
3241 #endif
3242 if (i >= not_stack + offset)
3243 emit_push_insn (operand_subword_force (x, i, mode),
3244 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3245 0, args_addr,
3246 GEN_INT (args_offset + ((i - not_stack + skip)
3247 * UNITS_PER_WORD)),
3248 reg_parm_stack_space, alignment_pad);
3249 }
3250 else
3251 {
3252 rtx addr;
3253 rtx target = NULL_RTX;
3254
3255 /* Push padding now if padding above and stack grows down,
3256 or if padding below and stack grows up.
3257 But if space already allocated, this has already been done. */
3258 if (extra && args_addr == 0
3259 && where_pad != none && where_pad != stack_direction)
3260 anti_adjust_stack (GEN_INT (extra));
3261
3262 #ifdef PUSH_ROUNDING
3263 if (args_addr == 0 && PUSH_ARGS)
3264 {
3265 addr = gen_push_operand ();
3266 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3267 }
3268 else
3269 #endif
3270 {
3271 if (GET_CODE (args_so_far) == CONST_INT)
3272 addr
3273 = memory_address (mode,
3274 plus_constant (args_addr,
3275 INTVAL (args_so_far)));
3276 else
3277 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3278 args_so_far));
3279 target = addr;
3280 }
3281
3282 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3283
3284 if (current_function_check_memory_usage && ! in_check_memory_usage)
3285 {
3286 in_check_memory_usage = 1;
3287 if (target == 0)
3288 target = get_push_address (GET_MODE_SIZE (mode));
3289
3290 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3291 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3292 target, Pmode,
3293 XEXP (x, 0), Pmode,
3294 GEN_INT (GET_MODE_SIZE (mode)),
3295 TYPE_MODE (sizetype));
3296 else
3297 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3298 target, Pmode,
3299 GEN_INT (GET_MODE_SIZE (mode)),
3300 TYPE_MODE (sizetype),
3301 GEN_INT (MEMORY_USE_RW),
3302 TYPE_MODE (integer_type_node));
3303 in_check_memory_usage = 0;
3304 }
3305 }
3306
3307 ret:
3308 /* If part should go in registers, copy that part
3309 into the appropriate registers. Do this now, at the end,
3310 since mem-to-mem copies above may do function calls. */
3311 if (partial > 0 && reg != 0)
3312 {
3313 /* Handle calls that pass values in multiple non-contiguous locations.
3314 The Irix 6 ABI has examples of this. */
3315 if (GET_CODE (reg) == PARALLEL)
3316 emit_group_load (reg, x, -1, align); /* ??? size? */
3317 else
3318 move_block_to_reg (REGNO (reg), x, partial, mode);
3319 }
3320
3321 if (extra && args_addr == 0 && where_pad == stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3323
3324 if (alignment_pad && args_addr == 0)
3325 anti_adjust_stack (alignment_pad);
3326 }
3327 \f
3328 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3329 operations. */
3330
3331 static rtx
3332 get_subtarget (x)
3333 rtx x;
3334 {
3335 return ((x == 0
3336 /* Only registers can be subtargets. */
3337 || GET_CODE (x) != REG
3338 /* If the register is readonly, it can't be set more than once. */
3339 || RTX_UNCHANGING_P (x)
3340 /* Don't use hard regs to avoid extending their life. */
3341 || REGNO (x) < FIRST_PSEUDO_REGISTER
3342 /* Avoid subtargets inside loops,
3343 since they hide some invariant expressions. */
3344 || preserve_subexpressions_p ())
3345 ? 0 : x);
3346 }
3347
3348 /* Expand an assignment that stores the value of FROM into TO.
3349 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3350 (This may contain a QUEUED rtx;
3351 if the value is constant, this rtx is a constant.)
3352 Otherwise, the returned value is NULL_RTX.
3353
3354 SUGGEST_REG is no longer actually used.
3355 It used to mean, copy the value through a register
3356 and return that register, if that is possible.
3357 We now use WANT_VALUE to decide whether to do this. */
3358
3359 rtx
3360 expand_assignment (to, from, want_value, suggest_reg)
3361 tree to, from;
3362 int want_value;
3363 int suggest_reg ATTRIBUTE_UNUSED;
3364 {
3365 register rtx to_rtx = 0;
3366 rtx result;
3367
3368 /* Don't crash if the lhs of the assignment was erroneous. */
3369
3370 if (TREE_CODE (to) == ERROR_MARK)
3371 {
3372 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3373 return want_value ? result : NULL_RTX;
3374 }
3375
3376 /* Assignment of a structure component needs special treatment
3377 if the structure component's rtx is not simply a MEM.
3378 Assignment of an array element at a constant index, and assignment of
3379 an array element in an unaligned packed structure field, has the same
3380 problem. */
3381
3382 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3383 || TREE_CODE (to) == ARRAY_REF)
3384 {
3385 enum machine_mode mode1;
3386 HOST_WIDE_INT bitsize, bitpos;
3387 tree offset;
3388 int unsignedp;
3389 int volatilep = 0;
3390 tree tem;
3391 unsigned int alignment;
3392
3393 push_temp_slots ();
3394 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3395 &unsignedp, &volatilep, &alignment);
3396
3397 /* If we are going to use store_bit_field and extract_bit_field,
3398 make sure to_rtx will be safe for multiple use. */
3399
3400 if (mode1 == VOIDmode && want_value)
3401 tem = stabilize_reference (tem);
3402
3403 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3404 if (offset != 0)
3405 {
3406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3407
3408 if (GET_CODE (to_rtx) != MEM)
3409 abort ();
3410
3411 if (GET_MODE (offset_rtx) != ptr_mode)
3412 {
3413 #ifdef POINTERS_EXTEND_UNSIGNED
3414 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3415 #else
3416 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3417 #endif
3418 }
3419
3420 /* A constant address in TO_RTX can have VOIDmode, we must not try
3421 to call force_reg for that case. Avoid that case. */
3422 if (GET_CODE (to_rtx) == MEM
3423 && GET_MODE (to_rtx) == BLKmode
3424 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3425 && bitsize
3426 && (bitpos % bitsize) == 0
3427 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3428 && alignment == GET_MODE_ALIGNMENT (mode1))
3429 {
3430 rtx temp = change_address (to_rtx, mode1,
3431 plus_constant (XEXP (to_rtx, 0),
3432 (bitpos /
3433 BITS_PER_UNIT)));
3434 if (GET_CODE (XEXP (temp, 0)) == REG)
3435 to_rtx = temp;
3436 else
3437 to_rtx = change_address (to_rtx, mode1,
3438 force_reg (GET_MODE (XEXP (temp, 0)),
3439 XEXP (temp, 0)));
3440 bitpos = 0;
3441 }
3442
3443 to_rtx = change_address (to_rtx, VOIDmode,
3444 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3445 force_reg (ptr_mode,
3446 offset_rtx)));
3447 }
3448
3449 if (volatilep)
3450 {
3451 if (GET_CODE (to_rtx) == MEM)
3452 {
3453 /* When the offset is zero, to_rtx is the address of the
3454 structure we are storing into, and hence may be shared.
3455 We must make a new MEM before setting the volatile bit. */
3456 if (offset == 0)
3457 to_rtx = copy_rtx (to_rtx);
3458
3459 MEM_VOLATILE_P (to_rtx) = 1;
3460 }
3461 #if 0 /* This was turned off because, when a field is volatile
3462 in an object which is not volatile, the object may be in a register,
3463 and then we would abort over here. */
3464 else
3465 abort ();
3466 #endif
3467 }
3468
3469 if (TREE_CODE (to) == COMPONENT_REF
3470 && TREE_READONLY (TREE_OPERAND (to, 1)))
3471 {
3472 if (offset == 0)
3473 to_rtx = copy_rtx (to_rtx);
3474
3475 RTX_UNCHANGING_P (to_rtx) = 1;
3476 }
3477
3478 /* Check the access. */
3479 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3480 {
3481 rtx to_addr;
3482 int size;
3483 int best_mode_size;
3484 enum machine_mode best_mode;
3485
3486 best_mode = get_best_mode (bitsize, bitpos,
3487 TYPE_ALIGN (TREE_TYPE (tem)),
3488 mode1, volatilep);
3489 if (best_mode == VOIDmode)
3490 best_mode = QImode;
3491
3492 best_mode_size = GET_MODE_BITSIZE (best_mode);
3493 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3494 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3495 size *= GET_MODE_SIZE (best_mode);
3496
3497 /* Check the access right of the pointer. */
3498 in_check_memory_usage = 1;
3499 if (size)
3500 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3501 to_addr, Pmode,
3502 GEN_INT (size), TYPE_MODE (sizetype),
3503 GEN_INT (MEMORY_USE_WO),
3504 TYPE_MODE (integer_type_node));
3505 in_check_memory_usage = 0;
3506 }
3507
3508 /* If this is a varying-length object, we must get the address of
3509 the source and do an explicit block move. */
3510 if (bitsize < 0)
3511 {
3512 unsigned int from_align;
3513 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3514 rtx inner_to_rtx
3515 = change_address (to_rtx, VOIDmode,
3516 plus_constant (XEXP (to_rtx, 0),
3517 bitpos / BITS_PER_UNIT));
3518
3519 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3520 MIN (alignment, from_align));
3521 free_temp_slots ();
3522 pop_temp_slots ();
3523 return to_rtx;
3524 }
3525 else
3526 {
3527 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3528 (want_value
3529 /* Spurious cast for HPUX compiler. */
3530 ? ((enum machine_mode)
3531 TYPE_MODE (TREE_TYPE (to)))
3532 : VOIDmode),
3533 unsignedp,
3534 alignment,
3535 int_size_in_bytes (TREE_TYPE (tem)),
3536 get_alias_set (to));
3537
3538 preserve_temp_slots (result);
3539 free_temp_slots ();
3540 pop_temp_slots ();
3541
3542 /* If the value is meaningful, convert RESULT to the proper mode.
3543 Otherwise, return nothing. */
3544 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3545 TYPE_MODE (TREE_TYPE (from)),
3546 result,
3547 TREE_UNSIGNED (TREE_TYPE (to)))
3548 : NULL_RTX);
3549 }
3550 }
3551
3552 /* If the rhs is a function call and its value is not an aggregate,
3553 call the function before we start to compute the lhs.
3554 This is needed for correct code for cases such as
3555 val = setjmp (buf) on machines where reference to val
3556 requires loading up part of an address in a separate insn.
3557
3558 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3559 since it might be a promoted variable where the zero- or sign- extension
3560 needs to be done. Handling this in the normal way is safe because no
3561 computation is done before the call. */
3562 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3563 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3564 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3565 && GET_CODE (DECL_RTL (to)) == REG))
3566 {
3567 rtx value;
3568
3569 push_temp_slots ();
3570 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3571 if (to_rtx == 0)
3572 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3573
3574 /* Handle calls that return values in multiple non-contiguous locations.
3575 The Irix 6 ABI has examples of this. */
3576 if (GET_CODE (to_rtx) == PARALLEL)
3577 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3578 TYPE_ALIGN (TREE_TYPE (from)));
3579 else if (GET_MODE (to_rtx) == BLKmode)
3580 emit_block_move (to_rtx, value, expr_size (from),
3581 TYPE_ALIGN (TREE_TYPE (from)));
3582 else
3583 {
3584 #ifdef POINTERS_EXTEND_UNSIGNED
3585 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3586 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3587 value = convert_memory_address (GET_MODE (to_rtx), value);
3588 #endif
3589 emit_move_insn (to_rtx, value);
3590 }
3591 preserve_temp_slots (to_rtx);
3592 free_temp_slots ();
3593 pop_temp_slots ();
3594 return want_value ? to_rtx : NULL_RTX;
3595 }
3596
3597 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3598 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3599
3600 if (to_rtx == 0)
3601 {
3602 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3603 if (GET_CODE (to_rtx) == MEM)
3604 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3605 }
3606
3607 /* Don't move directly into a return register. */
3608 if (TREE_CODE (to) == RESULT_DECL
3609 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3610 {
3611 rtx temp;
3612
3613 push_temp_slots ();
3614 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3615
3616 if (GET_CODE (to_rtx) == PARALLEL)
3617 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3618 TYPE_ALIGN (TREE_TYPE (from)));
3619 else
3620 emit_move_insn (to_rtx, temp);
3621
3622 preserve_temp_slots (to_rtx);
3623 free_temp_slots ();
3624 pop_temp_slots ();
3625 return want_value ? to_rtx : NULL_RTX;
3626 }
3627
3628 /* In case we are returning the contents of an object which overlaps
3629 the place the value is being stored, use a safe function when copying
3630 a value through a pointer into a structure value return block. */
3631 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3632 && current_function_returns_struct
3633 && !current_function_returns_pcc_struct)
3634 {
3635 rtx from_rtx, size;
3636
3637 push_temp_slots ();
3638 size = expr_size (from);
3639 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3640 EXPAND_MEMORY_USE_DONT);
3641
3642 /* Copy the rights of the bitmap. */
3643 if (current_function_check_memory_usage)
3644 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3645 XEXP (to_rtx, 0), Pmode,
3646 XEXP (from_rtx, 0), Pmode,
3647 convert_to_mode (TYPE_MODE (sizetype),
3648 size, TREE_UNSIGNED (sizetype)),
3649 TYPE_MODE (sizetype));
3650
3651 #ifdef TARGET_MEM_FUNCTIONS
3652 emit_library_call (memcpy_libfunc, 0,
3653 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3654 XEXP (from_rtx, 0), Pmode,
3655 convert_to_mode (TYPE_MODE (sizetype),
3656 size, TREE_UNSIGNED (sizetype)),
3657 TYPE_MODE (sizetype));
3658 #else
3659 emit_library_call (bcopy_libfunc, 0,
3660 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3661 XEXP (to_rtx, 0), Pmode,
3662 convert_to_mode (TYPE_MODE (integer_type_node),
3663 size, TREE_UNSIGNED (integer_type_node)),
3664 TYPE_MODE (integer_type_node));
3665 #endif
3666
3667 preserve_temp_slots (to_rtx);
3668 free_temp_slots ();
3669 pop_temp_slots ();
3670 return want_value ? to_rtx : NULL_RTX;
3671 }
3672
3673 /* Compute FROM and store the value in the rtx we got. */
3674
3675 push_temp_slots ();
3676 result = store_expr (from, to_rtx, want_value);
3677 preserve_temp_slots (result);
3678 free_temp_slots ();
3679 pop_temp_slots ();
3680 return want_value ? result : NULL_RTX;
3681 }
3682
3683 /* Generate code for computing expression EXP,
3684 and storing the value into TARGET.
3685 TARGET may contain a QUEUED rtx.
3686
3687 If WANT_VALUE is nonzero, return a copy of the value
3688 not in TARGET, so that we can be sure to use the proper
3689 value in a containing expression even if TARGET has something
3690 else stored in it. If possible, we copy the value through a pseudo
3691 and return that pseudo. Or, if the value is constant, we try to
3692 return the constant. In some cases, we return a pseudo
3693 copied *from* TARGET.
3694
3695 If the mode is BLKmode then we may return TARGET itself.
3696 It turns out that in BLKmode it doesn't cause a problem.
3697 because C has no operators that could combine two different
3698 assignments into the same BLKmode object with different values
3699 with no sequence point. Will other languages need this to
3700 be more thorough?
3701
3702 If WANT_VALUE is 0, we return NULL, to make sure
3703 to catch quickly any cases where the caller uses the value
3704 and fails to set WANT_VALUE. */
3705
3706 rtx
3707 store_expr (exp, target, want_value)
3708 register tree exp;
3709 register rtx target;
3710 int want_value;
3711 {
3712 register rtx temp;
3713 int dont_return_target = 0;
3714
3715 if (TREE_CODE (exp) == COMPOUND_EXPR)
3716 {
3717 /* Perform first part of compound expression, then assign from second
3718 part. */
3719 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3720 emit_queue ();
3721 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3722 }
3723 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3724 {
3725 /* For conditional expression, get safe form of the target. Then
3726 test the condition, doing the appropriate assignment on either
3727 side. This avoids the creation of unnecessary temporaries.
3728 For non-BLKmode, it is more efficient not to do this. */
3729
3730 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3731
3732 emit_queue ();
3733 target = protect_from_queue (target, 1);
3734
3735 do_pending_stack_adjust ();
3736 NO_DEFER_POP;
3737 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3738 start_cleanup_deferral ();
3739 store_expr (TREE_OPERAND (exp, 1), target, 0);
3740 end_cleanup_deferral ();
3741 emit_queue ();
3742 emit_jump_insn (gen_jump (lab2));
3743 emit_barrier ();
3744 emit_label (lab1);
3745 start_cleanup_deferral ();
3746 store_expr (TREE_OPERAND (exp, 2), target, 0);
3747 end_cleanup_deferral ();
3748 emit_queue ();
3749 emit_label (lab2);
3750 OK_DEFER_POP;
3751
3752 return want_value ? target : NULL_RTX;
3753 }
3754 else if (queued_subexp_p (target))
3755 /* If target contains a postincrement, let's not risk
3756 using it as the place to generate the rhs. */
3757 {
3758 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3759 {
3760 /* Expand EXP into a new pseudo. */
3761 temp = gen_reg_rtx (GET_MODE (target));
3762 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3763 }
3764 else
3765 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3766
3767 /* If target is volatile, ANSI requires accessing the value
3768 *from* the target, if it is accessed. So make that happen.
3769 In no case return the target itself. */
3770 if (! MEM_VOLATILE_P (target) && want_value)
3771 dont_return_target = 1;
3772 }
3773 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3774 && GET_MODE (target) != BLKmode)
3775 /* If target is in memory and caller wants value in a register instead,
3776 arrange that. Pass TARGET as target for expand_expr so that,
3777 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3778 We know expand_expr will not use the target in that case.
3779 Don't do this if TARGET is volatile because we are supposed
3780 to write it and then read it. */
3781 {
3782 temp = expand_expr (exp, target, GET_MODE (target), 0);
3783 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3784 temp = copy_to_reg (temp);
3785 dont_return_target = 1;
3786 }
3787 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3788 /* If this is an scalar in a register that is stored in a wider mode
3789 than the declared mode, compute the result into its declared mode
3790 and then convert to the wider mode. Our value is the computed
3791 expression. */
3792 {
3793 /* If we don't want a value, we can do the conversion inside EXP,
3794 which will often result in some optimizations. Do the conversion
3795 in two steps: first change the signedness, if needed, then
3796 the extend. But don't do this if the type of EXP is a subtype
3797 of something else since then the conversion might involve
3798 more than just converting modes. */
3799 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3800 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3801 {
3802 if (TREE_UNSIGNED (TREE_TYPE (exp))
3803 != SUBREG_PROMOTED_UNSIGNED_P (target))
3804 exp
3805 = convert
3806 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3807 TREE_TYPE (exp)),
3808 exp);
3809
3810 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3811 SUBREG_PROMOTED_UNSIGNED_P (target)),
3812 exp);
3813 }
3814
3815 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3816
3817 /* If TEMP is a volatile MEM and we want a result value, make
3818 the access now so it gets done only once. Likewise if
3819 it contains TARGET. */
3820 if (GET_CODE (temp) == MEM && want_value
3821 && (MEM_VOLATILE_P (temp)
3822 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3823 temp = copy_to_reg (temp);
3824
3825 /* If TEMP is a VOIDmode constant, use convert_modes to make
3826 sure that we properly convert it. */
3827 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3828 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3829 TYPE_MODE (TREE_TYPE (exp)), temp,
3830 SUBREG_PROMOTED_UNSIGNED_P (target));
3831
3832 convert_move (SUBREG_REG (target), temp,
3833 SUBREG_PROMOTED_UNSIGNED_P (target));
3834
3835 /* If we promoted a constant, change the mode back down to match
3836 target. Otherwise, the caller might get confused by a result whose
3837 mode is larger than expected. */
3838
3839 if (want_value && GET_MODE (temp) != GET_MODE (target)
3840 && GET_MODE (temp) != VOIDmode)
3841 {
3842 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3843 SUBREG_PROMOTED_VAR_P (temp) = 1;
3844 SUBREG_PROMOTED_UNSIGNED_P (temp)
3845 = SUBREG_PROMOTED_UNSIGNED_P (target);
3846 }
3847
3848 return want_value ? temp : NULL_RTX;
3849 }
3850 else
3851 {
3852 temp = expand_expr (exp, target, GET_MODE (target), 0);
3853 /* Return TARGET if it's a specified hardware register.
3854 If TARGET is a volatile mem ref, either return TARGET
3855 or return a reg copied *from* TARGET; ANSI requires this.
3856
3857 Otherwise, if TEMP is not TARGET, return TEMP
3858 if it is constant (for efficiency),
3859 or if we really want the correct value. */
3860 if (!(target && GET_CODE (target) == REG
3861 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3862 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3863 && ! rtx_equal_p (temp, target)
3864 && (CONSTANT_P (temp) || want_value))
3865 dont_return_target = 1;
3866 }
3867
3868 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3869 the same as that of TARGET, adjust the constant. This is needed, for
3870 example, in case it is a CONST_DOUBLE and we want only a word-sized
3871 value. */
3872 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3873 && TREE_CODE (exp) != ERROR_MARK
3874 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3875 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3876 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3877
3878 if (current_function_check_memory_usage
3879 && GET_CODE (target) == MEM
3880 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3881 {
3882 in_check_memory_usage = 1;
3883 if (GET_CODE (temp) == MEM)
3884 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3885 XEXP (target, 0), Pmode,
3886 XEXP (temp, 0), Pmode,
3887 expr_size (exp), TYPE_MODE (sizetype));
3888 else
3889 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3890 XEXP (target, 0), Pmode,
3891 expr_size (exp), TYPE_MODE (sizetype),
3892 GEN_INT (MEMORY_USE_WO),
3893 TYPE_MODE (integer_type_node));
3894 in_check_memory_usage = 0;
3895 }
3896
3897 /* If value was not generated in the target, store it there.
3898 Convert the value to TARGET's type first if nec. */
3899 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3900 one or both of them are volatile memory refs, we have to distinguish
3901 two cases:
3902 - expand_expr has used TARGET. In this case, we must not generate
3903 another copy. This can be detected by TARGET being equal according
3904 to == .
3905 - expand_expr has not used TARGET - that means that the source just
3906 happens to have the same RTX form. Since temp will have been created
3907 by expand_expr, it will compare unequal according to == .
3908 We must generate a copy in this case, to reach the correct number
3909 of volatile memory references. */
3910
3911 if ((! rtx_equal_p (temp, target)
3912 || (temp != target && (side_effects_p (temp)
3913 || side_effects_p (target))))
3914 && TREE_CODE (exp) != ERROR_MARK)
3915 {
3916 target = protect_from_queue (target, 1);
3917 if (GET_MODE (temp) != GET_MODE (target)
3918 && GET_MODE (temp) != VOIDmode)
3919 {
3920 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3921 if (dont_return_target)
3922 {
3923 /* In this case, we will return TEMP,
3924 so make sure it has the proper mode.
3925 But don't forget to store the value into TARGET. */
3926 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3927 emit_move_insn (target, temp);
3928 }
3929 else
3930 convert_move (target, temp, unsignedp);
3931 }
3932
3933 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3934 {
3935 /* Handle copying a string constant into an array.
3936 The string constant may be shorter than the array.
3937 So copy just the string's actual length, and clear the rest. */
3938 rtx size;
3939 rtx addr;
3940
3941 /* Get the size of the data type of the string,
3942 which is actually the size of the target. */
3943 size = expr_size (exp);
3944 if (GET_CODE (size) == CONST_INT
3945 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3946 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3947 else
3948 {
3949 /* Compute the size of the data to copy from the string. */
3950 tree copy_size
3951 = size_binop (MIN_EXPR,
3952 make_tree (sizetype, size),
3953 size_int (TREE_STRING_LENGTH (exp)));
3954 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3955 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3956 VOIDmode, 0);
3957 rtx label = 0;
3958
3959 /* Copy that much. */
3960 emit_block_move (target, temp, copy_size_rtx,
3961 TYPE_ALIGN (TREE_TYPE (exp)));
3962
3963 /* Figure out how much is left in TARGET that we have to clear.
3964 Do all calculations in ptr_mode. */
3965
3966 addr = XEXP (target, 0);
3967 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3968
3969 if (GET_CODE (copy_size_rtx) == CONST_INT)
3970 {
3971 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3972 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3973 align = MIN (align, (BITS_PER_UNIT
3974 * (INTVAL (copy_size_rtx)
3975 & - INTVAL (copy_size_rtx))));
3976 }
3977 else
3978 {
3979 addr = force_reg (ptr_mode, addr);
3980 addr = expand_binop (ptr_mode, add_optab, addr,
3981 copy_size_rtx, NULL_RTX, 0,
3982 OPTAB_LIB_WIDEN);
3983
3984 size = expand_binop (ptr_mode, sub_optab, size,
3985 copy_size_rtx, NULL_RTX, 0,
3986 OPTAB_LIB_WIDEN);
3987
3988 align = BITS_PER_UNIT;
3989 label = gen_label_rtx ();
3990 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3991 GET_MODE (size), 0, 0, label);
3992 }
3993 align = MIN (align, expr_align (copy_size));
3994
3995 if (size != const0_rtx)
3996 {
3997 /* Be sure we can write on ADDR. */
3998 in_check_memory_usage = 1;
3999 if (current_function_check_memory_usage)
4000 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4001 addr, Pmode,
4002 size, TYPE_MODE (sizetype),
4003 GEN_INT (MEMORY_USE_WO),
4004 TYPE_MODE (integer_type_node));
4005 in_check_memory_usage = 0;
4006 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
4007 }
4008
4009 if (label)
4010 emit_label (label);
4011 }
4012 }
4013 /* Handle calls that return values in multiple non-contiguous locations.
4014 The Irix 6 ABI has examples of this. */
4015 else if (GET_CODE (target) == PARALLEL)
4016 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4017 TYPE_ALIGN (TREE_TYPE (exp)));
4018 else if (GET_MODE (temp) == BLKmode)
4019 emit_block_move (target, temp, expr_size (exp),
4020 TYPE_ALIGN (TREE_TYPE (exp)));
4021 else
4022 emit_move_insn (target, temp);
4023 }
4024
4025 /* If we don't want a value, return NULL_RTX. */
4026 if (! want_value)
4027 return NULL_RTX;
4028
4029 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4030 ??? The latter test doesn't seem to make sense. */
4031 else if (dont_return_target && GET_CODE (temp) != MEM)
4032 return temp;
4033
4034 /* Return TARGET itself if it is a hard register. */
4035 else if (want_value && GET_MODE (target) != BLKmode
4036 && ! (GET_CODE (target) == REG
4037 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4038 return copy_to_reg (target);
4039
4040 else
4041 return target;
4042 }
4043 \f
4044 /* Return 1 if EXP just contains zeros. */
4045
4046 static int
4047 is_zeros_p (exp)
4048 tree exp;
4049 {
4050 tree elt;
4051
4052 switch (TREE_CODE (exp))
4053 {
4054 case CONVERT_EXPR:
4055 case NOP_EXPR:
4056 case NON_LVALUE_EXPR:
4057 return is_zeros_p (TREE_OPERAND (exp, 0));
4058
4059 case INTEGER_CST:
4060 return integer_zerop (exp);
4061
4062 case COMPLEX_CST:
4063 return
4064 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4065
4066 case REAL_CST:
4067 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4068
4069 case CONSTRUCTOR:
4070 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4071 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4072 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4073 if (! is_zeros_p (TREE_VALUE (elt)))
4074 return 0;
4075
4076 return 1;
4077
4078 default:
4079 return 0;
4080 }
4081 }
4082
4083 /* Return 1 if EXP contains mostly (3/4) zeros. */
4084
4085 static int
4086 mostly_zeros_p (exp)
4087 tree exp;
4088 {
4089 if (TREE_CODE (exp) == CONSTRUCTOR)
4090 {
4091 int elts = 0, zeros = 0;
4092 tree elt = CONSTRUCTOR_ELTS (exp);
4093 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4094 {
4095 /* If there are no ranges of true bits, it is all zero. */
4096 return elt == NULL_TREE;
4097 }
4098 for (; elt; elt = TREE_CHAIN (elt))
4099 {
4100 /* We do not handle the case where the index is a RANGE_EXPR,
4101 so the statistic will be somewhat inaccurate.
4102 We do make a more accurate count in store_constructor itself,
4103 so since this function is only used for nested array elements,
4104 this should be close enough. */
4105 if (mostly_zeros_p (TREE_VALUE (elt)))
4106 zeros++;
4107 elts++;
4108 }
4109
4110 return 4 * zeros >= 3 * elts;
4111 }
4112
4113 return is_zeros_p (exp);
4114 }
4115 \f
4116 /* Helper function for store_constructor.
4117 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4118 TYPE is the type of the CONSTRUCTOR, not the element type.
4119 ALIGN and CLEARED are as for store_constructor.
4120
4121 This provides a recursive shortcut back to store_constructor when it isn't
4122 necessary to go through store_field. This is so that we can pass through
4123 the cleared field to let store_constructor know that we may not have to
4124 clear a substructure if the outer structure has already been cleared. */
4125
4126 static void
4127 store_constructor_field (target, bitsize, bitpos,
4128 mode, exp, type, align, cleared)
4129 rtx target;
4130 unsigned HOST_WIDE_INT bitsize;
4131 HOST_WIDE_INT bitpos;
4132 enum machine_mode mode;
4133 tree exp, type;
4134 unsigned int align;
4135 int cleared;
4136 {
4137 if (TREE_CODE (exp) == CONSTRUCTOR
4138 && bitpos % BITS_PER_UNIT == 0
4139 /* If we have a non-zero bitpos for a register target, then we just
4140 let store_field do the bitfield handling. This is unlikely to
4141 generate unnecessary clear instructions anyways. */
4142 && (bitpos == 0 || GET_CODE (target) == MEM))
4143 {
4144 if (bitpos != 0)
4145 target
4146 = change_address (target,
4147 GET_MODE (target) == BLKmode
4148 || 0 != (bitpos
4149 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4150 ? BLKmode : VOIDmode,
4151 plus_constant (XEXP (target, 0),
4152 bitpos / BITS_PER_UNIT));
4153 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4154 }
4155 else
4156 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4157 int_size_in_bytes (type), 0);
4158 }
4159
4160 /* Store the value of constructor EXP into the rtx TARGET.
4161 TARGET is either a REG or a MEM.
4162 ALIGN is the maximum known alignment for TARGET.
4163 CLEARED is true if TARGET is known to have been zero'd.
4164 SIZE is the number of bytes of TARGET we are allowed to modify: this
4165 may not be the same as the size of EXP if we are assigning to a field
4166 which has been packed to exclude padding bits. */
4167
4168 static void
4169 store_constructor (exp, target, align, cleared, size)
4170 tree exp;
4171 rtx target;
4172 unsigned int align;
4173 int cleared;
4174 HOST_WIDE_INT size;
4175 {
4176 tree type = TREE_TYPE (exp);
4177 #ifdef WORD_REGISTER_OPERATIONS
4178 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4179 #endif
4180
4181 /* We know our target cannot conflict, since safe_from_p has been called. */
4182 #if 0
4183 /* Don't try copying piece by piece into a hard register
4184 since that is vulnerable to being clobbered by EXP.
4185 Instead, construct in a pseudo register and then copy it all. */
4186 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4187 {
4188 rtx temp = gen_reg_rtx (GET_MODE (target));
4189 store_constructor (exp, temp, align, cleared, size);
4190 emit_move_insn (target, temp);
4191 return;
4192 }
4193 #endif
4194
4195 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4196 || TREE_CODE (type) == QUAL_UNION_TYPE)
4197 {
4198 register tree elt;
4199
4200 /* Inform later passes that the whole union value is dead. */
4201 if ((TREE_CODE (type) == UNION_TYPE
4202 || TREE_CODE (type) == QUAL_UNION_TYPE)
4203 && ! cleared)
4204 {
4205 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4206
4207 /* If the constructor is empty, clear the union. */
4208 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4209 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4210 }
4211
4212 /* If we are building a static constructor into a register,
4213 set the initial value as zero so we can fold the value into
4214 a constant. But if more than one register is involved,
4215 this probably loses. */
4216 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4217 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4218 {
4219 if (! cleared)
4220 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4221
4222 cleared = 1;
4223 }
4224
4225 /* If the constructor has fewer fields than the structure
4226 or if we are initializing the structure to mostly zeros,
4227 clear the whole structure first. */
4228 else if (size > 0
4229 && ((list_length (CONSTRUCTOR_ELTS (exp))
4230 != fields_length (type))
4231 || mostly_zeros_p (exp)))
4232 {
4233 if (! cleared)
4234 clear_storage (target, GEN_INT (size), align);
4235
4236 cleared = 1;
4237 }
4238 else if (! cleared)
4239 /* Inform later passes that the old value is dead. */
4240 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4241
4242 /* Store each element of the constructor into
4243 the corresponding field of TARGET. */
4244
4245 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4246 {
4247 register tree field = TREE_PURPOSE (elt);
4248 #ifdef WORD_REGISTER_OPERATIONS
4249 tree value = TREE_VALUE (elt);
4250 #endif
4251 register enum machine_mode mode;
4252 HOST_WIDE_INT bitsize;
4253 HOST_WIDE_INT bitpos = 0;
4254 int unsignedp;
4255 tree offset;
4256 rtx to_rtx = target;
4257
4258 /* Just ignore missing fields.
4259 We cleared the whole structure, above,
4260 if any fields are missing. */
4261 if (field == 0)
4262 continue;
4263
4264 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4265 continue;
4266
4267 if (host_integerp (DECL_SIZE (field), 1))
4268 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4269 else
4270 bitsize = -1;
4271
4272 unsignedp = TREE_UNSIGNED (field);
4273 mode = DECL_MODE (field);
4274 if (DECL_BIT_FIELD (field))
4275 mode = VOIDmode;
4276
4277 offset = DECL_FIELD_OFFSET (field);
4278 if (host_integerp (offset, 0)
4279 && host_integerp (bit_position (field), 0))
4280 {
4281 bitpos = int_bit_position (field);
4282 offset = 0;
4283 }
4284 else
4285 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4286
4287 if (offset)
4288 {
4289 rtx offset_rtx;
4290
4291 if (contains_placeholder_p (offset))
4292 offset = build (WITH_RECORD_EXPR, sizetype,
4293 offset, make_tree (TREE_TYPE (exp), target));
4294
4295 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4296 if (GET_CODE (to_rtx) != MEM)
4297 abort ();
4298
4299 if (GET_MODE (offset_rtx) != ptr_mode)
4300 {
4301 #ifdef POINTERS_EXTEND_UNSIGNED
4302 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4303 #else
4304 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4305 #endif
4306 }
4307
4308 to_rtx
4309 = change_address (to_rtx, VOIDmode,
4310 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4311 force_reg (ptr_mode,
4312 offset_rtx)));
4313 align = DECL_OFFSET_ALIGN (field);
4314 }
4315
4316 if (TREE_READONLY (field))
4317 {
4318 if (GET_CODE (to_rtx) == MEM)
4319 to_rtx = copy_rtx (to_rtx);
4320
4321 RTX_UNCHANGING_P (to_rtx) = 1;
4322 }
4323
4324 #ifdef WORD_REGISTER_OPERATIONS
4325 /* If this initializes a field that is smaller than a word, at the
4326 start of a word, try to widen it to a full word.
4327 This special case allows us to output C++ member function
4328 initializations in a form that the optimizers can understand. */
4329 if (GET_CODE (target) == REG
4330 && bitsize < BITS_PER_WORD
4331 && bitpos % BITS_PER_WORD == 0
4332 && GET_MODE_CLASS (mode) == MODE_INT
4333 && TREE_CODE (value) == INTEGER_CST
4334 && exp_size >= 0
4335 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4336 {
4337 tree type = TREE_TYPE (value);
4338 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4339 {
4340 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4341 value = convert (type, value);
4342 }
4343 if (BYTES_BIG_ENDIAN)
4344 value
4345 = fold (build (LSHIFT_EXPR, type, value,
4346 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4347 bitsize = BITS_PER_WORD;
4348 mode = word_mode;
4349 }
4350 #endif
4351 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4352 TREE_VALUE (elt), type, align, cleared);
4353 }
4354 }
4355 else if (TREE_CODE (type) == ARRAY_TYPE)
4356 {
4357 register tree elt;
4358 register int i;
4359 int need_to_clear;
4360 tree domain = TYPE_DOMAIN (type);
4361 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4362 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4363 tree elttype = TREE_TYPE (type);
4364
4365 /* If the constructor has fewer elements than the array,
4366 clear the whole array first. Similarly if this is
4367 static constructor of a non-BLKmode object. */
4368 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4369 need_to_clear = 1;
4370 else
4371 {
4372 HOST_WIDE_INT count = 0, zero_count = 0;
4373 need_to_clear = 0;
4374 /* This loop is a more accurate version of the loop in
4375 mostly_zeros_p (it handles RANGE_EXPR in an index).
4376 It is also needed to check for missing elements. */
4377 for (elt = CONSTRUCTOR_ELTS (exp);
4378 elt != NULL_TREE;
4379 elt = TREE_CHAIN (elt))
4380 {
4381 tree index = TREE_PURPOSE (elt);
4382 HOST_WIDE_INT this_node_count;
4383
4384 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4385 {
4386 tree lo_index = TREE_OPERAND (index, 0);
4387 tree hi_index = TREE_OPERAND (index, 1);
4388
4389 if (! host_integerp (lo_index, 1)
4390 || ! host_integerp (hi_index, 1))
4391 {
4392 need_to_clear = 1;
4393 break;
4394 }
4395
4396 this_node_count = (tree_low_cst (hi_index, 1)
4397 - tree_low_cst (lo_index, 1) + 1);
4398 }
4399 else
4400 this_node_count = 1;
4401 count += this_node_count;
4402 if (mostly_zeros_p (TREE_VALUE (elt)))
4403 zero_count += this_node_count;
4404 }
4405 /* Clear the entire array first if there are any missing elements,
4406 or if the incidence of zero elements is >= 75%. */
4407 if (count < maxelt - minelt + 1
4408 || 4 * zero_count >= 3 * count)
4409 need_to_clear = 1;
4410 }
4411 if (need_to_clear && size > 0)
4412 {
4413 if (! cleared)
4414 clear_storage (target, GEN_INT (size), align);
4415 cleared = 1;
4416 }
4417 else
4418 /* Inform later passes that the old value is dead. */
4419 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4420
4421 /* Store each element of the constructor into
4422 the corresponding element of TARGET, determined
4423 by counting the elements. */
4424 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4425 elt;
4426 elt = TREE_CHAIN (elt), i++)
4427 {
4428 register enum machine_mode mode;
4429 HOST_WIDE_INT bitsize;
4430 HOST_WIDE_INT bitpos;
4431 int unsignedp;
4432 tree value = TREE_VALUE (elt);
4433 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4434 tree index = TREE_PURPOSE (elt);
4435 rtx xtarget = target;
4436
4437 if (cleared && is_zeros_p (value))
4438 continue;
4439
4440 unsignedp = TREE_UNSIGNED (elttype);
4441 mode = TYPE_MODE (elttype);
4442 if (mode == BLKmode)
4443 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4444 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4445 : -1);
4446 else
4447 bitsize = GET_MODE_BITSIZE (mode);
4448
4449 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4450 {
4451 tree lo_index = TREE_OPERAND (index, 0);
4452 tree hi_index = TREE_OPERAND (index, 1);
4453 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4454 struct nesting *loop;
4455 HOST_WIDE_INT lo, hi, count;
4456 tree position;
4457
4458 /* If the range is constant and "small", unroll the loop. */
4459 if (host_integerp (lo_index, 0)
4460 && host_integerp (hi_index, 0)
4461 && (lo = tree_low_cst (lo_index, 0),
4462 hi = tree_low_cst (hi_index, 0),
4463 count = hi - lo + 1,
4464 (GET_CODE (target) != MEM
4465 || count <= 2
4466 || (host_integerp (TYPE_SIZE (elttype), 1)
4467 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4468 <= 40 * 8)))))
4469 {
4470 lo -= minelt; hi -= minelt;
4471 for (; lo <= hi; lo++)
4472 {
4473 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4474 store_constructor_field (target, bitsize, bitpos, mode,
4475 value, type, align, cleared);
4476 }
4477 }
4478 else
4479 {
4480 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4481 loop_top = gen_label_rtx ();
4482 loop_end = gen_label_rtx ();
4483
4484 unsignedp = TREE_UNSIGNED (domain);
4485
4486 index = build_decl (VAR_DECL, NULL_TREE, domain);
4487
4488 DECL_RTL (index) = index_r
4489 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4490 &unsignedp, 0));
4491
4492 if (TREE_CODE (value) == SAVE_EXPR
4493 && SAVE_EXPR_RTL (value) == 0)
4494 {
4495 /* Make sure value gets expanded once before the
4496 loop. */
4497 expand_expr (value, const0_rtx, VOIDmode, 0);
4498 emit_queue ();
4499 }
4500 store_expr (lo_index, index_r, 0);
4501 loop = expand_start_loop (0);
4502
4503 /* Assign value to element index. */
4504 position
4505 = convert (ssizetype,
4506 fold (build (MINUS_EXPR, TREE_TYPE (index),
4507 index, TYPE_MIN_VALUE (domain))));
4508 position = size_binop (MULT_EXPR, position,
4509 convert (ssizetype,
4510 TYPE_SIZE_UNIT (elttype)));
4511
4512 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4513 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4514 xtarget = change_address (target, mode, addr);
4515 if (TREE_CODE (value) == CONSTRUCTOR)
4516 store_constructor (value, xtarget, align, cleared,
4517 bitsize / BITS_PER_UNIT);
4518 else
4519 store_expr (value, xtarget, 0);
4520
4521 expand_exit_loop_if_false (loop,
4522 build (LT_EXPR, integer_type_node,
4523 index, hi_index));
4524
4525 expand_increment (build (PREINCREMENT_EXPR,
4526 TREE_TYPE (index),
4527 index, integer_one_node), 0, 0);
4528 expand_end_loop ();
4529 emit_label (loop_end);
4530 }
4531 }
4532 else if ((index != 0 && ! host_integerp (index, 0))
4533 || ! host_integerp (TYPE_SIZE (elttype), 1))
4534 {
4535 rtx pos_rtx, addr;
4536 tree position;
4537
4538 if (index == 0)
4539 index = ssize_int (1);
4540
4541 if (minelt)
4542 index = convert (ssizetype,
4543 fold (build (MINUS_EXPR, index,
4544 TYPE_MIN_VALUE (domain))));
4545
4546 position = size_binop (MULT_EXPR, index,
4547 convert (ssizetype,
4548 TYPE_SIZE_UNIT (elttype)));
4549 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4550 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4551 xtarget = change_address (target, mode, addr);
4552 store_expr (value, xtarget, 0);
4553 }
4554 else
4555 {
4556 if (index != 0)
4557 bitpos = ((tree_low_cst (index, 0) - minelt)
4558 * tree_low_cst (TYPE_SIZE (elttype), 1));
4559 else
4560 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4561
4562 store_constructor_field (target, bitsize, bitpos, mode, value,
4563 type, align, cleared);
4564 }
4565 }
4566 }
4567
4568 /* Set constructor assignments */
4569 else if (TREE_CODE (type) == SET_TYPE)
4570 {
4571 tree elt = CONSTRUCTOR_ELTS (exp);
4572 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4573 tree domain = TYPE_DOMAIN (type);
4574 tree domain_min, domain_max, bitlength;
4575
4576 /* The default implementation strategy is to extract the constant
4577 parts of the constructor, use that to initialize the target,
4578 and then "or" in whatever non-constant ranges we need in addition.
4579
4580 If a large set is all zero or all ones, it is
4581 probably better to set it using memset (if available) or bzero.
4582 Also, if a large set has just a single range, it may also be
4583 better to first clear all the first clear the set (using
4584 bzero/memset), and set the bits we want. */
4585
4586 /* Check for all zeros. */
4587 if (elt == NULL_TREE && size > 0)
4588 {
4589 if (!cleared)
4590 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4591 return;
4592 }
4593
4594 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4595 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4596 bitlength = size_binop (PLUS_EXPR,
4597 size_diffop (domain_max, domain_min),
4598 ssize_int (1));
4599
4600 nbits = tree_low_cst (bitlength, 1);
4601
4602 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4603 are "complicated" (more than one range), initialize (the
4604 constant parts) by copying from a constant. */
4605 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4606 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4607 {
4608 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4609 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4610 char *bit_buffer = (char *) alloca (nbits);
4611 HOST_WIDE_INT word = 0;
4612 unsigned int bit_pos = 0;
4613 unsigned int ibit = 0;
4614 unsigned int offset = 0; /* In bytes from beginning of set. */
4615
4616 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4617 for (;;)
4618 {
4619 if (bit_buffer[ibit])
4620 {
4621 if (BYTES_BIG_ENDIAN)
4622 word |= (1 << (set_word_size - 1 - bit_pos));
4623 else
4624 word |= 1 << bit_pos;
4625 }
4626
4627 bit_pos++; ibit++;
4628 if (bit_pos >= set_word_size || ibit == nbits)
4629 {
4630 if (word != 0 || ! cleared)
4631 {
4632 rtx datum = GEN_INT (word);
4633 rtx to_rtx;
4634
4635 /* The assumption here is that it is safe to use
4636 XEXP if the set is multi-word, but not if
4637 it's single-word. */
4638 if (GET_CODE (target) == MEM)
4639 {
4640 to_rtx = plus_constant (XEXP (target, 0), offset);
4641 to_rtx = change_address (target, mode, to_rtx);
4642 }
4643 else if (offset == 0)
4644 to_rtx = target;
4645 else
4646 abort ();
4647 emit_move_insn (to_rtx, datum);
4648 }
4649
4650 if (ibit == nbits)
4651 break;
4652 word = 0;
4653 bit_pos = 0;
4654 offset += set_word_size / BITS_PER_UNIT;
4655 }
4656 }
4657 }
4658 else if (!cleared)
4659 /* Don't bother clearing storage if the set is all ones. */
4660 if (TREE_CHAIN (elt) != NULL_TREE
4661 || (TREE_PURPOSE (elt) == NULL_TREE
4662 ? nbits != 1
4663 : ( ! host_integerp (TREE_VALUE (elt), 0)
4664 || ! host_integerp (TREE_PURPOSE (elt), 0)
4665 || (tree_low_cst (TREE_VALUE (elt), 0)
4666 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4667 != (HOST_WIDE_INT) nbits))))
4668 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4669
4670 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4671 {
4672 /* start of range of element or NULL */
4673 tree startbit = TREE_PURPOSE (elt);
4674 /* end of range of element, or element value */
4675 tree endbit = TREE_VALUE (elt);
4676 #ifdef TARGET_MEM_FUNCTIONS
4677 HOST_WIDE_INT startb, endb;
4678 #endif
4679 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4680
4681 bitlength_rtx = expand_expr (bitlength,
4682 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4683
4684 /* handle non-range tuple element like [ expr ] */
4685 if (startbit == NULL_TREE)
4686 {
4687 startbit = save_expr (endbit);
4688 endbit = startbit;
4689 }
4690
4691 startbit = convert (sizetype, startbit);
4692 endbit = convert (sizetype, endbit);
4693 if (! integer_zerop (domain_min))
4694 {
4695 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4696 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4697 }
4698 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4699 EXPAND_CONST_ADDRESS);
4700 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4701 EXPAND_CONST_ADDRESS);
4702
4703 if (REG_P (target))
4704 {
4705 targetx = assign_stack_temp (GET_MODE (target),
4706 GET_MODE_SIZE (GET_MODE (target)),
4707 0);
4708 emit_move_insn (targetx, target);
4709 }
4710
4711 else if (GET_CODE (target) == MEM)
4712 targetx = target;
4713 else
4714 abort ();
4715
4716 #ifdef TARGET_MEM_FUNCTIONS
4717 /* Optimization: If startbit and endbit are
4718 constants divisible by BITS_PER_UNIT,
4719 call memset instead. */
4720 if (TREE_CODE (startbit) == INTEGER_CST
4721 && TREE_CODE (endbit) == INTEGER_CST
4722 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4723 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4724 {
4725 emit_library_call (memset_libfunc, 0,
4726 VOIDmode, 3,
4727 plus_constant (XEXP (targetx, 0),
4728 startb / BITS_PER_UNIT),
4729 Pmode,
4730 constm1_rtx, TYPE_MODE (integer_type_node),
4731 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4732 TYPE_MODE (sizetype));
4733 }
4734 else
4735 #endif
4736 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4737 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4738 bitlength_rtx, TYPE_MODE (sizetype),
4739 startbit_rtx, TYPE_MODE (sizetype),
4740 endbit_rtx, TYPE_MODE (sizetype));
4741
4742 if (REG_P (target))
4743 emit_move_insn (target, targetx);
4744 }
4745 }
4746
4747 else
4748 abort ();
4749 }
4750
4751 /* Store the value of EXP (an expression tree)
4752 into a subfield of TARGET which has mode MODE and occupies
4753 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4754 If MODE is VOIDmode, it means that we are storing into a bit-field.
4755
4756 If VALUE_MODE is VOIDmode, return nothing in particular.
4757 UNSIGNEDP is not used in this case.
4758
4759 Otherwise, return an rtx for the value stored. This rtx
4760 has mode VALUE_MODE if that is convenient to do.
4761 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4762
4763 ALIGN is the alignment that TARGET is known to have.
4764 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4765
4766 ALIAS_SET is the alias set for the destination. This value will
4767 (in general) be different from that for TARGET, since TARGET is a
4768 reference to the containing structure. */
4769
4770 static rtx
4771 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4772 unsignedp, align, total_size, alias_set)
4773 rtx target;
4774 HOST_WIDE_INT bitsize;
4775 HOST_WIDE_INT bitpos;
4776 enum machine_mode mode;
4777 tree exp;
4778 enum machine_mode value_mode;
4779 int unsignedp;
4780 unsigned int align;
4781 HOST_WIDE_INT total_size;
4782 int alias_set;
4783 {
4784 HOST_WIDE_INT width_mask = 0;
4785
4786 if (TREE_CODE (exp) == ERROR_MARK)
4787 return const0_rtx;
4788
4789 if (bitsize < HOST_BITS_PER_WIDE_INT)
4790 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4791
4792 /* If we are storing into an unaligned field of an aligned union that is
4793 in a register, we may have the mode of TARGET being an integer mode but
4794 MODE == BLKmode. In that case, get an aligned object whose size and
4795 alignment are the same as TARGET and store TARGET into it (we can avoid
4796 the store if the field being stored is the entire width of TARGET). Then
4797 call ourselves recursively to store the field into a BLKmode version of
4798 that object. Finally, load from the object into TARGET. This is not
4799 very efficient in general, but should only be slightly more expensive
4800 than the otherwise-required unaligned accesses. Perhaps this can be
4801 cleaned up later. */
4802
4803 if (mode == BLKmode
4804 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4805 {
4806 rtx object = assign_stack_temp (GET_MODE (target),
4807 GET_MODE_SIZE (GET_MODE (target)), 0);
4808 rtx blk_object = copy_rtx (object);
4809
4810 MEM_SET_IN_STRUCT_P (object, 1);
4811 MEM_SET_IN_STRUCT_P (blk_object, 1);
4812 PUT_MODE (blk_object, BLKmode);
4813
4814 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4815 emit_move_insn (object, target);
4816
4817 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4818 align, total_size, alias_set);
4819
4820 /* Even though we aren't returning target, we need to
4821 give it the updated value. */
4822 emit_move_insn (target, object);
4823
4824 return blk_object;
4825 }
4826
4827 if (GET_CODE (target) == CONCAT)
4828 {
4829 /* We're storing into a struct containing a single __complex. */
4830
4831 if (bitpos != 0)
4832 abort ();
4833 return store_expr (exp, target, 0);
4834 }
4835
4836 /* If the structure is in a register or if the component
4837 is a bit field, we cannot use addressing to access it.
4838 Use bit-field techniques or SUBREG to store in it. */
4839
4840 if (mode == VOIDmode
4841 || (mode != BLKmode && ! direct_store[(int) mode]
4842 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4843 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4844 || GET_CODE (target) == REG
4845 || GET_CODE (target) == SUBREG
4846 /* If the field isn't aligned enough to store as an ordinary memref,
4847 store it as a bit field. */
4848 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4849 && (align < GET_MODE_ALIGNMENT (mode)
4850 || bitpos % GET_MODE_ALIGNMENT (mode)))
4851 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4852 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4853 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4854 /* If the RHS and field are a constant size and the size of the
4855 RHS isn't the same size as the bitfield, we must use bitfield
4856 operations. */
4857 || (bitsize >= 0
4858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4859 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4860 {
4861 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4862
4863 /* If BITSIZE is narrower than the size of the type of EXP
4864 we will be narrowing TEMP. Normally, what's wanted are the
4865 low-order bits. However, if EXP's type is a record and this is
4866 big-endian machine, we want the upper BITSIZE bits. */
4867 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4868 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4869 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4870 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4871 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4872 - bitsize),
4873 temp, 1);
4874
4875 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4876 MODE. */
4877 if (mode != VOIDmode && mode != BLKmode
4878 && mode != TYPE_MODE (TREE_TYPE (exp)))
4879 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4880
4881 /* If the modes of TARGET and TEMP are both BLKmode, both
4882 must be in memory and BITPOS must be aligned on a byte
4883 boundary. If so, we simply do a block copy. */
4884 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4885 {
4886 unsigned int exp_align = expr_align (exp);
4887
4888 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4889 || bitpos % BITS_PER_UNIT != 0)
4890 abort ();
4891
4892 target = change_address (target, VOIDmode,
4893 plus_constant (XEXP (target, 0),
4894 bitpos / BITS_PER_UNIT));
4895
4896 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4897 align = MIN (exp_align, align);
4898
4899 /* Find an alignment that is consistent with the bit position. */
4900 while ((bitpos % align) != 0)
4901 align >>= 1;
4902
4903 emit_block_move (target, temp,
4904 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4905 / BITS_PER_UNIT),
4906 align);
4907
4908 return value_mode == VOIDmode ? const0_rtx : target;
4909 }
4910
4911 /* Store the value in the bitfield. */
4912 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4913 if (value_mode != VOIDmode)
4914 {
4915 /* The caller wants an rtx for the value. */
4916 /* If possible, avoid refetching from the bitfield itself. */
4917 if (width_mask != 0
4918 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4919 {
4920 tree count;
4921 enum machine_mode tmode;
4922
4923 if (unsignedp)
4924 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4925 tmode = GET_MODE (temp);
4926 if (tmode == VOIDmode)
4927 tmode = value_mode;
4928 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4929 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4930 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4931 }
4932 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4933 NULL_RTX, value_mode, 0, align,
4934 total_size);
4935 }
4936 return const0_rtx;
4937 }
4938 else
4939 {
4940 rtx addr = XEXP (target, 0);
4941 rtx to_rtx;
4942
4943 /* If a value is wanted, it must be the lhs;
4944 so make the address stable for multiple use. */
4945
4946 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4947 && ! CONSTANT_ADDRESS_P (addr)
4948 /* A frame-pointer reference is already stable. */
4949 && ! (GET_CODE (addr) == PLUS
4950 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4951 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4952 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4953 addr = copy_to_reg (addr);
4954
4955 /* Now build a reference to just the desired component. */
4956
4957 to_rtx = copy_rtx (change_address (target, mode,
4958 plus_constant (addr,
4959 (bitpos
4960 / BITS_PER_UNIT))));
4961 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4962 MEM_ALIAS_SET (to_rtx) = alias_set;
4963
4964 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4965 }
4966 }
4967 \f
4968 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4969 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4970 ARRAY_REFs and find the ultimate containing object, which we return.
4971
4972 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4973 bit position, and *PUNSIGNEDP to the signedness of the field.
4974 If the position of the field is variable, we store a tree
4975 giving the variable offset (in units) in *POFFSET.
4976 This offset is in addition to the bit position.
4977 If the position is not variable, we store 0 in *POFFSET.
4978 We set *PALIGNMENT to the alignment of the address that will be
4979 computed. This is the alignment of the thing we return if *POFFSET
4980 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4981
4982 If any of the extraction expressions is volatile,
4983 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4984
4985 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4986 is a mode that can be used to access the field. In that case, *PBITSIZE
4987 is redundant.
4988
4989 If the field describes a variable-sized object, *PMODE is set to
4990 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4991 this case, but the address of the object can be found. */
4992
4993 tree
4994 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4995 punsignedp, pvolatilep, palignment)
4996 tree exp;
4997 HOST_WIDE_INT *pbitsize;
4998 HOST_WIDE_INT *pbitpos;
4999 tree *poffset;
5000 enum machine_mode *pmode;
5001 int *punsignedp;
5002 int *pvolatilep;
5003 unsigned int *palignment;
5004 {
5005 tree size_tree = 0;
5006 enum machine_mode mode = VOIDmode;
5007 tree offset = size_zero_node;
5008 tree bit_offset = bitsize_zero_node;
5009 unsigned int alignment = BIGGEST_ALIGNMENT;
5010 tree tem;
5011
5012 /* First get the mode, signedness, and size. We do this from just the
5013 outermost expression. */
5014 if (TREE_CODE (exp) == COMPONENT_REF)
5015 {
5016 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5017 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5018 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5019
5020 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5021 }
5022 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5023 {
5024 size_tree = TREE_OPERAND (exp, 1);
5025 *punsignedp = TREE_UNSIGNED (exp);
5026 }
5027 else
5028 {
5029 mode = TYPE_MODE (TREE_TYPE (exp));
5030 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5031
5032 if (mode == BLKmode)
5033 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5034 else
5035 *pbitsize = GET_MODE_BITSIZE (mode);
5036 }
5037
5038 if (size_tree != 0)
5039 {
5040 if (! host_integerp (size_tree, 1))
5041 mode = BLKmode, *pbitsize = -1;
5042 else
5043 *pbitsize = tree_low_cst (size_tree, 1);
5044 }
5045
5046 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5047 and find the ultimate containing object. */
5048 while (1)
5049 {
5050 if (TREE_CODE (exp) == BIT_FIELD_REF)
5051 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5052 else if (TREE_CODE (exp) == COMPONENT_REF)
5053 {
5054 tree field = TREE_OPERAND (exp, 1);
5055 tree this_offset = DECL_FIELD_OFFSET (field);
5056
5057 /* If this field hasn't been filled in yet, don't go
5058 past it. This should only happen when folding expressions
5059 made during type construction. */
5060 if (this_offset == 0)
5061 break;
5062 else if (! TREE_CONSTANT (this_offset)
5063 && contains_placeholder_p (this_offset))
5064 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5065
5066 offset = size_binop (PLUS_EXPR, offset, this_offset);
5067 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5068 DECL_FIELD_BIT_OFFSET (field));
5069
5070 if (! host_integerp (offset, 0))
5071 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5072 }
5073
5074 else if (TREE_CODE (exp) == ARRAY_REF)
5075 {
5076 tree index = TREE_OPERAND (exp, 1);
5077 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5078 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5079 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5080
5081 /* We assume all arrays have sizes that are a multiple of a byte.
5082 First subtract the lower bound, if any, in the type of the
5083 index, then convert to sizetype and multiply by the size of the
5084 array element. */
5085 if (low_bound != 0 && ! integer_zerop (low_bound))
5086 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5087 index, low_bound));
5088
5089 /* If the index has a self-referential type, pass it to a
5090 WITH_RECORD_EXPR; if the component size is, pass our
5091 component to one. */
5092 if (! TREE_CONSTANT (index)
5093 && contains_placeholder_p (index))
5094 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5095 if (! TREE_CONSTANT (unit_size)
5096 && contains_placeholder_p (unit_size))
5097 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5098 TREE_OPERAND (exp, 0));
5099
5100 offset = size_binop (PLUS_EXPR, offset,
5101 size_binop (MULT_EXPR,
5102 convert (sizetype, index),
5103 unit_size));
5104 }
5105
5106 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5107 && ! ((TREE_CODE (exp) == NOP_EXPR
5108 || TREE_CODE (exp) == CONVERT_EXPR)
5109 && (TYPE_MODE (TREE_TYPE (exp))
5110 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5111 break;
5112
5113 /* If any reference in the chain is volatile, the effect is volatile. */
5114 if (TREE_THIS_VOLATILE (exp))
5115 *pvolatilep = 1;
5116
5117 /* If the offset is non-constant already, then we can't assume any
5118 alignment more than the alignment here. */
5119 if (! TREE_CONSTANT (offset))
5120 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5121
5122 exp = TREE_OPERAND (exp, 0);
5123 }
5124
5125 if (DECL_P (exp))
5126 alignment = MIN (alignment, DECL_ALIGN (exp));
5127 else if (TREE_TYPE (exp) != 0)
5128 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5129
5130 /* If OFFSET is constant, see if we can return the whole thing as a
5131 constant bit position. Otherwise, split it up. */
5132 if (host_integerp (offset, 0)
5133 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5134 bitsize_unit_node))
5135 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5136 && host_integerp (tem, 0))
5137 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5138 else
5139 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5140
5141 *pmode = mode;
5142 *palignment = alignment;
5143 return exp;
5144 }
5145
5146 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5147
5148 static enum memory_use_mode
5149 get_memory_usage_from_modifier (modifier)
5150 enum expand_modifier modifier;
5151 {
5152 switch (modifier)
5153 {
5154 case EXPAND_NORMAL:
5155 case EXPAND_SUM:
5156 return MEMORY_USE_RO;
5157 break;
5158 case EXPAND_MEMORY_USE_WO:
5159 return MEMORY_USE_WO;
5160 break;
5161 case EXPAND_MEMORY_USE_RW:
5162 return MEMORY_USE_RW;
5163 break;
5164 case EXPAND_MEMORY_USE_DONT:
5165 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5166 MEMORY_USE_DONT, because they are modifiers to a call of
5167 expand_expr in the ADDR_EXPR case of expand_expr. */
5168 case EXPAND_CONST_ADDRESS:
5169 case EXPAND_INITIALIZER:
5170 return MEMORY_USE_DONT;
5171 case EXPAND_MEMORY_USE_BAD:
5172 default:
5173 abort ();
5174 }
5175 }
5176 \f
5177 /* Given an rtx VALUE that may contain additions and multiplications,
5178 return an equivalent value that just refers to a register or memory.
5179 This is done by generating instructions to perform the arithmetic
5180 and returning a pseudo-register containing the value.
5181
5182 The returned value may be a REG, SUBREG, MEM or constant. */
5183
5184 rtx
5185 force_operand (value, target)
5186 rtx value, target;
5187 {
5188 register optab binoptab = 0;
5189 /* Use a temporary to force order of execution of calls to
5190 `force_operand'. */
5191 rtx tmp;
5192 register rtx op2;
5193 /* Use subtarget as the target for operand 0 of a binary operation. */
5194 register rtx subtarget = get_subtarget (target);
5195
5196 /* Check for a PIC address load. */
5197 if (flag_pic
5198 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5199 && XEXP (value, 0) == pic_offset_table_rtx
5200 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5201 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5202 || GET_CODE (XEXP (value, 1)) == CONST))
5203 {
5204 if (!subtarget)
5205 subtarget = gen_reg_rtx (GET_MODE (value));
5206 emit_move_insn (subtarget, value);
5207 return subtarget;
5208 }
5209
5210 if (GET_CODE (value) == PLUS)
5211 binoptab = add_optab;
5212 else if (GET_CODE (value) == MINUS)
5213 binoptab = sub_optab;
5214 else if (GET_CODE (value) == MULT)
5215 {
5216 op2 = XEXP (value, 1);
5217 if (!CONSTANT_P (op2)
5218 && !(GET_CODE (op2) == REG && op2 != subtarget))
5219 subtarget = 0;
5220 tmp = force_operand (XEXP (value, 0), subtarget);
5221 return expand_mult (GET_MODE (value), tmp,
5222 force_operand (op2, NULL_RTX),
5223 target, 0);
5224 }
5225
5226 if (binoptab)
5227 {
5228 op2 = XEXP (value, 1);
5229 if (!CONSTANT_P (op2)
5230 && !(GET_CODE (op2) == REG && op2 != subtarget))
5231 subtarget = 0;
5232 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5233 {
5234 binoptab = add_optab;
5235 op2 = negate_rtx (GET_MODE (value), op2);
5236 }
5237
5238 /* Check for an addition with OP2 a constant integer and our first
5239 operand a PLUS of a virtual register and something else. In that
5240 case, we want to emit the sum of the virtual register and the
5241 constant first and then add the other value. This allows virtual
5242 register instantiation to simply modify the constant rather than
5243 creating another one around this addition. */
5244 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5245 && GET_CODE (XEXP (value, 0)) == PLUS
5246 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5247 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5248 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5249 {
5250 rtx temp = expand_binop (GET_MODE (value), binoptab,
5251 XEXP (XEXP (value, 0), 0), op2,
5252 subtarget, 0, OPTAB_LIB_WIDEN);
5253 return expand_binop (GET_MODE (value), binoptab, temp,
5254 force_operand (XEXP (XEXP (value, 0), 1), 0),
5255 target, 0, OPTAB_LIB_WIDEN);
5256 }
5257
5258 tmp = force_operand (XEXP (value, 0), subtarget);
5259 return expand_binop (GET_MODE (value), binoptab, tmp,
5260 force_operand (op2, NULL_RTX),
5261 target, 0, OPTAB_LIB_WIDEN);
5262 /* We give UNSIGNEDP = 0 to expand_binop
5263 because the only operations we are expanding here are signed ones. */
5264 }
5265 return value;
5266 }
5267 \f
5268 /* Subroutine of expand_expr:
5269 save the non-copied parts (LIST) of an expr (LHS), and return a list
5270 which can restore these values to their previous values,
5271 should something modify their storage. */
5272
5273 static tree
5274 save_noncopied_parts (lhs, list)
5275 tree lhs;
5276 tree list;
5277 {
5278 tree tail;
5279 tree parts = 0;
5280
5281 for (tail = list; tail; tail = TREE_CHAIN (tail))
5282 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5283 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5284 else
5285 {
5286 tree part = TREE_VALUE (tail);
5287 tree part_type = TREE_TYPE (part);
5288 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5289 rtx target = assign_temp (part_type, 0, 1, 1);
5290 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5291 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5292 parts = tree_cons (to_be_saved,
5293 build (RTL_EXPR, part_type, NULL_TREE,
5294 (tree) target),
5295 parts);
5296 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5297 }
5298 return parts;
5299 }
5300
5301 /* Subroutine of expand_expr:
5302 record the non-copied parts (LIST) of an expr (LHS), and return a list
5303 which specifies the initial values of these parts. */
5304
5305 static tree
5306 init_noncopied_parts (lhs, list)
5307 tree lhs;
5308 tree list;
5309 {
5310 tree tail;
5311 tree parts = 0;
5312
5313 for (tail = list; tail; tail = TREE_CHAIN (tail))
5314 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5315 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5316 else if (TREE_PURPOSE (tail))
5317 {
5318 tree part = TREE_VALUE (tail);
5319 tree part_type = TREE_TYPE (part);
5320 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5321 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5322 }
5323 return parts;
5324 }
5325
5326 /* Subroutine of expand_expr: return nonzero iff there is no way that
5327 EXP can reference X, which is being modified. TOP_P is nonzero if this
5328 call is going to be used to determine whether we need a temporary
5329 for EXP, as opposed to a recursive call to this function.
5330
5331 It is always safe for this routine to return zero since it merely
5332 searches for optimization opportunities. */
5333
5334 static int
5335 safe_from_p (x, exp, top_p)
5336 rtx x;
5337 tree exp;
5338 int top_p;
5339 {
5340 rtx exp_rtl = 0;
5341 int i, nops;
5342 static int save_expr_count;
5343 static int save_expr_size = 0;
5344 static tree *save_expr_rewritten;
5345 static tree save_expr_trees[256];
5346
5347 if (x == 0
5348 /* If EXP has varying size, we MUST use a target since we currently
5349 have no way of allocating temporaries of variable size
5350 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5351 So we assume here that something at a higher level has prevented a
5352 clash. This is somewhat bogus, but the best we can do. Only
5353 do this when X is BLKmode and when we are at the top level. */
5354 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5355 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5356 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5357 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5358 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5359 != INTEGER_CST)
5360 && GET_MODE (x) == BLKmode))
5361 return 1;
5362
5363 if (top_p && save_expr_size == 0)
5364 {
5365 int rtn;
5366
5367 save_expr_count = 0;
5368 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5369 save_expr_rewritten = &save_expr_trees[0];
5370
5371 rtn = safe_from_p (x, exp, 1);
5372
5373 for (i = 0; i < save_expr_count; ++i)
5374 {
5375 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5376 abort ();
5377 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5378 }
5379
5380 save_expr_size = 0;
5381
5382 return rtn;
5383 }
5384
5385 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5386 find the underlying pseudo. */
5387 if (GET_CODE (x) == SUBREG)
5388 {
5389 x = SUBREG_REG (x);
5390 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5391 return 0;
5392 }
5393
5394 /* If X is a location in the outgoing argument area, it is always safe. */
5395 if (GET_CODE (x) == MEM
5396 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5397 || (GET_CODE (XEXP (x, 0)) == PLUS
5398 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5399 return 1;
5400
5401 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5402 {
5403 case 'd':
5404 exp_rtl = DECL_RTL (exp);
5405 break;
5406
5407 case 'c':
5408 return 1;
5409
5410 case 'x':
5411 if (TREE_CODE (exp) == TREE_LIST)
5412 return ((TREE_VALUE (exp) == 0
5413 || safe_from_p (x, TREE_VALUE (exp), 0))
5414 && (TREE_CHAIN (exp) == 0
5415 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5416 else if (TREE_CODE (exp) == ERROR_MARK)
5417 return 1; /* An already-visited SAVE_EXPR? */
5418 else
5419 return 0;
5420
5421 case '1':
5422 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5423
5424 case '2':
5425 case '<':
5426 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5427 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5428
5429 case 'e':
5430 case 'r':
5431 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5432 the expression. If it is set, we conflict iff we are that rtx or
5433 both are in memory. Otherwise, we check all operands of the
5434 expression recursively. */
5435
5436 switch (TREE_CODE (exp))
5437 {
5438 case ADDR_EXPR:
5439 return (staticp (TREE_OPERAND (exp, 0))
5440 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5441 || TREE_STATIC (exp));
5442
5443 case INDIRECT_REF:
5444 if (GET_CODE (x) == MEM)
5445 return 0;
5446 break;
5447
5448 case CALL_EXPR:
5449 exp_rtl = CALL_EXPR_RTL (exp);
5450 if (exp_rtl == 0)
5451 {
5452 /* Assume that the call will clobber all hard registers and
5453 all of memory. */
5454 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5455 || GET_CODE (x) == MEM)
5456 return 0;
5457 }
5458
5459 break;
5460
5461 case RTL_EXPR:
5462 /* If a sequence exists, we would have to scan every instruction
5463 in the sequence to see if it was safe. This is probably not
5464 worthwhile. */
5465 if (RTL_EXPR_SEQUENCE (exp))
5466 return 0;
5467
5468 exp_rtl = RTL_EXPR_RTL (exp);
5469 break;
5470
5471 case WITH_CLEANUP_EXPR:
5472 exp_rtl = RTL_EXPR_RTL (exp);
5473 break;
5474
5475 case CLEANUP_POINT_EXPR:
5476 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5477
5478 case SAVE_EXPR:
5479 exp_rtl = SAVE_EXPR_RTL (exp);
5480 if (exp_rtl)
5481 break;
5482
5483 /* This SAVE_EXPR might appear many times in the top-level
5484 safe_from_p() expression, and if it has a complex
5485 subexpression, examining it multiple times could result
5486 in a combinatorial explosion. E.g. on an Alpha
5487 running at least 200MHz, a Fortran test case compiled with
5488 optimization took about 28 minutes to compile -- even though
5489 it was only a few lines long, and the complicated line causing
5490 so much time to be spent in the earlier version of safe_from_p()
5491 had only 293 or so unique nodes.
5492
5493 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5494 where it is so we can turn it back in the top-level safe_from_p()
5495 when we're done. */
5496
5497 /* For now, don't bother re-sizing the array. */
5498 if (save_expr_count >= save_expr_size)
5499 return 0;
5500 save_expr_rewritten[save_expr_count++] = exp;
5501
5502 nops = tree_code_length[(int) SAVE_EXPR];
5503 for (i = 0; i < nops; i++)
5504 {
5505 tree operand = TREE_OPERAND (exp, i);
5506 if (operand == NULL_TREE)
5507 continue;
5508 TREE_SET_CODE (exp, ERROR_MARK);
5509 if (!safe_from_p (x, operand, 0))
5510 return 0;
5511 TREE_SET_CODE (exp, SAVE_EXPR);
5512 }
5513 TREE_SET_CODE (exp, ERROR_MARK);
5514 return 1;
5515
5516 case BIND_EXPR:
5517 /* The only operand we look at is operand 1. The rest aren't
5518 part of the expression. */
5519 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5520
5521 case METHOD_CALL_EXPR:
5522 /* This takes a rtx argument, but shouldn't appear here. */
5523 abort ();
5524
5525 default:
5526 break;
5527 }
5528
5529 /* If we have an rtx, we do not need to scan our operands. */
5530 if (exp_rtl)
5531 break;
5532
5533 nops = tree_code_length[(int) TREE_CODE (exp)];
5534 for (i = 0; i < nops; i++)
5535 if (TREE_OPERAND (exp, i) != 0
5536 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5537 return 0;
5538 }
5539
5540 /* If we have an rtl, find any enclosed object. Then see if we conflict
5541 with it. */
5542 if (exp_rtl)
5543 {
5544 if (GET_CODE (exp_rtl) == SUBREG)
5545 {
5546 exp_rtl = SUBREG_REG (exp_rtl);
5547 if (GET_CODE (exp_rtl) == REG
5548 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5549 return 0;
5550 }
5551
5552 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5553 are memory and EXP is not readonly. */
5554 return ! (rtx_equal_p (x, exp_rtl)
5555 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5556 && ! TREE_READONLY (exp)));
5557 }
5558
5559 /* If we reach here, it is safe. */
5560 return 1;
5561 }
5562
5563 /* Subroutine of expand_expr: return nonzero iff EXP is an
5564 expression whose type is statically determinable. */
5565
5566 static int
5567 fixed_type_p (exp)
5568 tree exp;
5569 {
5570 if (TREE_CODE (exp) == PARM_DECL
5571 || TREE_CODE (exp) == VAR_DECL
5572 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5573 || TREE_CODE (exp) == COMPONENT_REF
5574 || TREE_CODE (exp) == ARRAY_REF)
5575 return 1;
5576 return 0;
5577 }
5578
5579 /* Subroutine of expand_expr: return rtx if EXP is a
5580 variable or parameter; else return 0. */
5581
5582 static rtx
5583 var_rtx (exp)
5584 tree exp;
5585 {
5586 STRIP_NOPS (exp);
5587 switch (TREE_CODE (exp))
5588 {
5589 case PARM_DECL:
5590 case VAR_DECL:
5591 return DECL_RTL (exp);
5592 default:
5593 return 0;
5594 }
5595 }
5596
5597 #ifdef MAX_INTEGER_COMPUTATION_MODE
5598 void
5599 check_max_integer_computation_mode (exp)
5600 tree exp;
5601 {
5602 enum tree_code code;
5603 enum machine_mode mode;
5604
5605 /* Strip any NOPs that don't change the mode. */
5606 STRIP_NOPS (exp);
5607 code = TREE_CODE (exp);
5608
5609 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5610 if (code == NOP_EXPR
5611 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5612 return;
5613
5614 /* First check the type of the overall operation. We need only look at
5615 unary, binary and relational operations. */
5616 if (TREE_CODE_CLASS (code) == '1'
5617 || TREE_CODE_CLASS (code) == '2'
5618 || TREE_CODE_CLASS (code) == '<')
5619 {
5620 mode = TYPE_MODE (TREE_TYPE (exp));
5621 if (GET_MODE_CLASS (mode) == MODE_INT
5622 && mode > MAX_INTEGER_COMPUTATION_MODE)
5623 fatal ("unsupported wide integer operation");
5624 }
5625
5626 /* Check operand of a unary op. */
5627 if (TREE_CODE_CLASS (code) == '1')
5628 {
5629 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5630 if (GET_MODE_CLASS (mode) == MODE_INT
5631 && mode > MAX_INTEGER_COMPUTATION_MODE)
5632 fatal ("unsupported wide integer operation");
5633 }
5634
5635 /* Check operands of a binary/comparison op. */
5636 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5637 {
5638 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5639 if (GET_MODE_CLASS (mode) == MODE_INT
5640 && mode > MAX_INTEGER_COMPUTATION_MODE)
5641 fatal ("unsupported wide integer operation");
5642
5643 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5644 if (GET_MODE_CLASS (mode) == MODE_INT
5645 && mode > MAX_INTEGER_COMPUTATION_MODE)
5646 fatal ("unsupported wide integer operation");
5647 }
5648 }
5649 #endif
5650
5651 \f
5652 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5653 has any readonly fields. If any of the fields have types that
5654 contain readonly fields, return true as well. */
5655
5656 static int
5657 readonly_fields_p (type)
5658 tree type;
5659 {
5660 tree field;
5661
5662 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5663 if (TREE_CODE (field) == FIELD_DECL
5664 && (TREE_READONLY (field)
5665 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5666 && readonly_fields_p (TREE_TYPE (field)))))
5667 return 1;
5668
5669 return 0;
5670 }
5671 \f
5672 /* expand_expr: generate code for computing expression EXP.
5673 An rtx for the computed value is returned. The value is never null.
5674 In the case of a void EXP, const0_rtx is returned.
5675
5676 The value may be stored in TARGET if TARGET is nonzero.
5677 TARGET is just a suggestion; callers must assume that
5678 the rtx returned may not be the same as TARGET.
5679
5680 If TARGET is CONST0_RTX, it means that the value will be ignored.
5681
5682 If TMODE is not VOIDmode, it suggests generating the
5683 result in mode TMODE. But this is done only when convenient.
5684 Otherwise, TMODE is ignored and the value generated in its natural mode.
5685 TMODE is just a suggestion; callers must assume that
5686 the rtx returned may not have mode TMODE.
5687
5688 Note that TARGET may have neither TMODE nor MODE. In that case, it
5689 probably will not be used.
5690
5691 If MODIFIER is EXPAND_SUM then when EXP is an addition
5692 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5693 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5694 products as above, or REG or MEM, or constant.
5695 Ordinarily in such cases we would output mul or add instructions
5696 and then return a pseudo reg containing the sum.
5697
5698 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5699 it also marks a label as absolutely required (it can't be dead).
5700 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5701 This is used for outputting expressions used in initializers.
5702
5703 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5704 with a constant address even if that address is not normally legitimate.
5705 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5706
5707 rtx
5708 expand_expr (exp, target, tmode, modifier)
5709 register tree exp;
5710 rtx target;
5711 enum machine_mode tmode;
5712 enum expand_modifier modifier;
5713 {
5714 register rtx op0, op1, temp;
5715 tree type = TREE_TYPE (exp);
5716 int unsignedp = TREE_UNSIGNED (type);
5717 register enum machine_mode mode;
5718 register enum tree_code code = TREE_CODE (exp);
5719 optab this_optab;
5720 rtx subtarget, original_target;
5721 int ignore;
5722 tree context;
5723 /* Used by check-memory-usage to make modifier read only. */
5724 enum expand_modifier ro_modifier;
5725
5726 /* Handle ERROR_MARK before anybody tries to access its type. */
5727 if (TREE_CODE (exp) == ERROR_MARK)
5728 {
5729 op0 = CONST0_RTX (tmode);
5730 if (op0 != 0)
5731 return op0;
5732 return const0_rtx;
5733 }
5734
5735 mode = TYPE_MODE (type);
5736 /* Use subtarget as the target for operand 0 of a binary operation. */
5737 subtarget = get_subtarget (target);
5738 original_target = target;
5739 ignore = (target == const0_rtx
5740 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5741 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5742 || code == COND_EXPR)
5743 && TREE_CODE (type) == VOID_TYPE));
5744
5745 /* Make a read-only version of the modifier. */
5746 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5747 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5748 ro_modifier = modifier;
5749 else
5750 ro_modifier = EXPAND_NORMAL;
5751
5752 /* If we are going to ignore this result, we need only do something
5753 if there is a side-effect somewhere in the expression. If there
5754 is, short-circuit the most common cases here. Note that we must
5755 not call expand_expr with anything but const0_rtx in case this
5756 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5757
5758 if (ignore)
5759 {
5760 if (! TREE_SIDE_EFFECTS (exp))
5761 return const0_rtx;
5762
5763 /* Ensure we reference a volatile object even if value is ignored, but
5764 don't do this if all we are doing is taking its address. */
5765 if (TREE_THIS_VOLATILE (exp)
5766 && TREE_CODE (exp) != FUNCTION_DECL
5767 && mode != VOIDmode && mode != BLKmode
5768 && modifier != EXPAND_CONST_ADDRESS)
5769 {
5770 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5771 if (GET_CODE (temp) == MEM)
5772 temp = copy_to_reg (temp);
5773 return const0_rtx;
5774 }
5775
5776 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5777 || code == INDIRECT_REF || code == BUFFER_REF)
5778 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5779 VOIDmode, ro_modifier);
5780 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5781 || code == ARRAY_REF)
5782 {
5783 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5784 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5785 return const0_rtx;
5786 }
5787 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5788 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5789 /* If the second operand has no side effects, just evaluate
5790 the first. */
5791 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5792 VOIDmode, ro_modifier);
5793 else if (code == BIT_FIELD_REF)
5794 {
5795 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5796 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5797 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5798 return const0_rtx;
5799 }
5800 ;
5801 target = 0;
5802 }
5803
5804 #ifdef MAX_INTEGER_COMPUTATION_MODE
5805 /* Only check stuff here if the mode we want is different from the mode
5806 of the expression; if it's the same, check_max_integer_computiation_mode
5807 will handle it. Do we really need to check this stuff at all? */
5808
5809 if (target
5810 && GET_MODE (target) != mode
5811 && TREE_CODE (exp) != INTEGER_CST
5812 && TREE_CODE (exp) != PARM_DECL
5813 && TREE_CODE (exp) != ARRAY_REF
5814 && TREE_CODE (exp) != COMPONENT_REF
5815 && TREE_CODE (exp) != BIT_FIELD_REF
5816 && TREE_CODE (exp) != INDIRECT_REF
5817 && TREE_CODE (exp) != CALL_EXPR
5818 && TREE_CODE (exp) != VAR_DECL
5819 && TREE_CODE (exp) != RTL_EXPR)
5820 {
5821 enum machine_mode mode = GET_MODE (target);
5822
5823 if (GET_MODE_CLASS (mode) == MODE_INT
5824 && mode > MAX_INTEGER_COMPUTATION_MODE)
5825 fatal ("unsupported wide integer operation");
5826 }
5827
5828 if (tmode != mode
5829 && TREE_CODE (exp) != INTEGER_CST
5830 && TREE_CODE (exp) != PARM_DECL
5831 && TREE_CODE (exp) != ARRAY_REF
5832 && TREE_CODE (exp) != COMPONENT_REF
5833 && TREE_CODE (exp) != BIT_FIELD_REF
5834 && TREE_CODE (exp) != INDIRECT_REF
5835 && TREE_CODE (exp) != VAR_DECL
5836 && TREE_CODE (exp) != CALL_EXPR
5837 && TREE_CODE (exp) != RTL_EXPR
5838 && GET_MODE_CLASS (tmode) == MODE_INT
5839 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5840 fatal ("unsupported wide integer operation");
5841
5842 check_max_integer_computation_mode (exp);
5843 #endif
5844
5845 /* If will do cse, generate all results into pseudo registers
5846 since 1) that allows cse to find more things
5847 and 2) otherwise cse could produce an insn the machine
5848 cannot support. */
5849
5850 if (! cse_not_expected && mode != BLKmode && target
5851 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5852 target = subtarget;
5853
5854 switch (code)
5855 {
5856 case LABEL_DECL:
5857 {
5858 tree function = decl_function_context (exp);
5859 /* Handle using a label in a containing function. */
5860 if (function != current_function_decl
5861 && function != inline_function_decl && function != 0)
5862 {
5863 struct function *p = find_function_data (function);
5864 /* Allocate in the memory associated with the function
5865 that the label is in. */
5866 push_obstacks (p->function_obstack,
5867 p->function_maybepermanent_obstack);
5868
5869 p->expr->x_forced_labels
5870 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5871 p->expr->x_forced_labels);
5872 pop_obstacks ();
5873 }
5874 else
5875 {
5876 if (modifier == EXPAND_INITIALIZER)
5877 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5878 label_rtx (exp),
5879 forced_labels);
5880 }
5881
5882 temp = gen_rtx_MEM (FUNCTION_MODE,
5883 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5884 if (function != current_function_decl
5885 && function != inline_function_decl && function != 0)
5886 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5887 return temp;
5888 }
5889
5890 case PARM_DECL:
5891 if (DECL_RTL (exp) == 0)
5892 {
5893 error_with_decl (exp, "prior parameter's size depends on `%s'");
5894 return CONST0_RTX (mode);
5895 }
5896
5897 /* ... fall through ... */
5898
5899 case VAR_DECL:
5900 /* If a static var's type was incomplete when the decl was written,
5901 but the type is complete now, lay out the decl now. */
5902 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5903 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5904 {
5905 push_obstacks_nochange ();
5906 end_temporary_allocation ();
5907 layout_decl (exp, 0);
5908 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5909 pop_obstacks ();
5910 }
5911
5912 /* Although static-storage variables start off initialized, according to
5913 ANSI C, a memcpy could overwrite them with uninitialized values. So
5914 we check them too. This also lets us check for read-only variables
5915 accessed via a non-const declaration, in case it won't be detected
5916 any other way (e.g., in an embedded system or OS kernel without
5917 memory protection).
5918
5919 Aggregates are not checked here; they're handled elsewhere. */
5920 if (cfun && current_function_check_memory_usage
5921 && code == VAR_DECL
5922 && GET_CODE (DECL_RTL (exp)) == MEM
5923 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5924 {
5925 enum memory_use_mode memory_usage;
5926 memory_usage = get_memory_usage_from_modifier (modifier);
5927
5928 in_check_memory_usage = 1;
5929 if (memory_usage != MEMORY_USE_DONT)
5930 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5931 XEXP (DECL_RTL (exp), 0), Pmode,
5932 GEN_INT (int_size_in_bytes (type)),
5933 TYPE_MODE (sizetype),
5934 GEN_INT (memory_usage),
5935 TYPE_MODE (integer_type_node));
5936 in_check_memory_usage = 0;
5937 }
5938
5939 /* ... fall through ... */
5940
5941 case FUNCTION_DECL:
5942 case RESULT_DECL:
5943 if (DECL_RTL (exp) == 0)
5944 abort ();
5945
5946 /* Ensure variable marked as used even if it doesn't go through
5947 a parser. If it hasn't be used yet, write out an external
5948 definition. */
5949 if (! TREE_USED (exp))
5950 {
5951 assemble_external (exp);
5952 TREE_USED (exp) = 1;
5953 }
5954
5955 /* Show we haven't gotten RTL for this yet. */
5956 temp = 0;
5957
5958 /* Handle variables inherited from containing functions. */
5959 context = decl_function_context (exp);
5960
5961 /* We treat inline_function_decl as an alias for the current function
5962 because that is the inline function whose vars, types, etc.
5963 are being merged into the current function.
5964 See expand_inline_function. */
5965
5966 if (context != 0 && context != current_function_decl
5967 && context != inline_function_decl
5968 /* If var is static, we don't need a static chain to access it. */
5969 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5970 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5971 {
5972 rtx addr;
5973
5974 /* Mark as non-local and addressable. */
5975 DECL_NONLOCAL (exp) = 1;
5976 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5977 abort ();
5978 mark_addressable (exp);
5979 if (GET_CODE (DECL_RTL (exp)) != MEM)
5980 abort ();
5981 addr = XEXP (DECL_RTL (exp), 0);
5982 if (GET_CODE (addr) == MEM)
5983 addr = gen_rtx_MEM (Pmode,
5984 fix_lexical_addr (XEXP (addr, 0), exp));
5985 else
5986 addr = fix_lexical_addr (addr, exp);
5987 temp = change_address (DECL_RTL (exp), mode, addr);
5988 }
5989
5990 /* This is the case of an array whose size is to be determined
5991 from its initializer, while the initializer is still being parsed.
5992 See expand_decl. */
5993
5994 else if (GET_CODE (DECL_RTL (exp)) == MEM
5995 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5996 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5997 XEXP (DECL_RTL (exp), 0));
5998
5999 /* If DECL_RTL is memory, we are in the normal case and either
6000 the address is not valid or it is not a register and -fforce-addr
6001 is specified, get the address into a register. */
6002
6003 else if (GET_CODE (DECL_RTL (exp)) == MEM
6004 && modifier != EXPAND_CONST_ADDRESS
6005 && modifier != EXPAND_SUM
6006 && modifier != EXPAND_INITIALIZER
6007 && (! memory_address_p (DECL_MODE (exp),
6008 XEXP (DECL_RTL (exp), 0))
6009 || (flag_force_addr
6010 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6011 temp = change_address (DECL_RTL (exp), VOIDmode,
6012 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6013
6014 /* If we got something, return it. But first, set the alignment
6015 the address is a register. */
6016 if (temp != 0)
6017 {
6018 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6019 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6020
6021 return temp;
6022 }
6023
6024 /* If the mode of DECL_RTL does not match that of the decl, it
6025 must be a promoted value. We return a SUBREG of the wanted mode,
6026 but mark it so that we know that it was already extended. */
6027
6028 if (GET_CODE (DECL_RTL (exp)) == REG
6029 && GET_MODE (DECL_RTL (exp)) != mode)
6030 {
6031 /* Get the signedness used for this variable. Ensure we get the
6032 same mode we got when the variable was declared. */
6033 if (GET_MODE (DECL_RTL (exp))
6034 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6035 abort ();
6036
6037 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6038 SUBREG_PROMOTED_VAR_P (temp) = 1;
6039 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6040 return temp;
6041 }
6042
6043 return DECL_RTL (exp);
6044
6045 case INTEGER_CST:
6046 return immed_double_const (TREE_INT_CST_LOW (exp),
6047 TREE_INT_CST_HIGH (exp), mode);
6048
6049 case CONST_DECL:
6050 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6051 EXPAND_MEMORY_USE_BAD);
6052
6053 case REAL_CST:
6054 /* If optimized, generate immediate CONST_DOUBLE
6055 which will be turned into memory by reload if necessary.
6056
6057 We used to force a register so that loop.c could see it. But
6058 this does not allow gen_* patterns to perform optimizations with
6059 the constants. It also produces two insns in cases like "x = 1.0;".
6060 On most machines, floating-point constants are not permitted in
6061 many insns, so we'd end up copying it to a register in any case.
6062
6063 Now, we do the copying in expand_binop, if appropriate. */
6064 return immed_real_const (exp);
6065
6066 case COMPLEX_CST:
6067 case STRING_CST:
6068 if (! TREE_CST_RTL (exp))
6069 output_constant_def (exp);
6070
6071 /* TREE_CST_RTL probably contains a constant address.
6072 On RISC machines where a constant address isn't valid,
6073 make some insns to get that address into a register. */
6074 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6075 && modifier != EXPAND_CONST_ADDRESS
6076 && modifier != EXPAND_INITIALIZER
6077 && modifier != EXPAND_SUM
6078 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6079 || (flag_force_addr
6080 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6081 return change_address (TREE_CST_RTL (exp), VOIDmode,
6082 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6083 return TREE_CST_RTL (exp);
6084
6085 case EXPR_WITH_FILE_LOCATION:
6086 {
6087 rtx to_return;
6088 const char *saved_input_filename = input_filename;
6089 int saved_lineno = lineno;
6090 input_filename = EXPR_WFL_FILENAME (exp);
6091 lineno = EXPR_WFL_LINENO (exp);
6092 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6093 emit_line_note (input_filename, lineno);
6094 /* Possibly avoid switching back and force here */
6095 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6096 input_filename = saved_input_filename;
6097 lineno = saved_lineno;
6098 return to_return;
6099 }
6100
6101 case SAVE_EXPR:
6102 context = decl_function_context (exp);
6103
6104 /* If this SAVE_EXPR was at global context, assume we are an
6105 initialization function and move it into our context. */
6106 if (context == 0)
6107 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6108
6109 /* We treat inline_function_decl as an alias for the current function
6110 because that is the inline function whose vars, types, etc.
6111 are being merged into the current function.
6112 See expand_inline_function. */
6113 if (context == current_function_decl || context == inline_function_decl)
6114 context = 0;
6115
6116 /* If this is non-local, handle it. */
6117 if (context)
6118 {
6119 /* The following call just exists to abort if the context is
6120 not of a containing function. */
6121 find_function_data (context);
6122
6123 temp = SAVE_EXPR_RTL (exp);
6124 if (temp && GET_CODE (temp) == REG)
6125 {
6126 put_var_into_stack (exp);
6127 temp = SAVE_EXPR_RTL (exp);
6128 }
6129 if (temp == 0 || GET_CODE (temp) != MEM)
6130 abort ();
6131 return change_address (temp, mode,
6132 fix_lexical_addr (XEXP (temp, 0), exp));
6133 }
6134 if (SAVE_EXPR_RTL (exp) == 0)
6135 {
6136 if (mode == VOIDmode)
6137 temp = const0_rtx;
6138 else
6139 temp = assign_temp (type, 3, 0, 0);
6140
6141 SAVE_EXPR_RTL (exp) = temp;
6142 if (!optimize && GET_CODE (temp) == REG)
6143 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6144 save_expr_regs);
6145
6146 /* If the mode of TEMP does not match that of the expression, it
6147 must be a promoted value. We pass store_expr a SUBREG of the
6148 wanted mode but mark it so that we know that it was already
6149 extended. Note that `unsignedp' was modified above in
6150 this case. */
6151
6152 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6153 {
6154 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6155 SUBREG_PROMOTED_VAR_P (temp) = 1;
6156 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6157 }
6158
6159 if (temp == const0_rtx)
6160 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6161 EXPAND_MEMORY_USE_BAD);
6162 else
6163 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6164
6165 TREE_USED (exp) = 1;
6166 }
6167
6168 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6169 must be a promoted value. We return a SUBREG of the wanted mode,
6170 but mark it so that we know that it was already extended. */
6171
6172 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6173 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6174 {
6175 /* Compute the signedness and make the proper SUBREG. */
6176 promote_mode (type, mode, &unsignedp, 0);
6177 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6178 SUBREG_PROMOTED_VAR_P (temp) = 1;
6179 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6180 return temp;
6181 }
6182
6183 return SAVE_EXPR_RTL (exp);
6184
6185 case UNSAVE_EXPR:
6186 {
6187 rtx temp;
6188 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6189 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6190 return temp;
6191 }
6192
6193 case PLACEHOLDER_EXPR:
6194 {
6195 tree placeholder_expr;
6196
6197 /* If there is an object on the head of the placeholder list,
6198 see if some object in it of type TYPE or a pointer to it. For
6199 further information, see tree.def. */
6200 for (placeholder_expr = placeholder_list;
6201 placeholder_expr != 0;
6202 placeholder_expr = TREE_CHAIN (placeholder_expr))
6203 {
6204 tree need_type = TYPE_MAIN_VARIANT (type);
6205 tree object = 0;
6206 tree old_list = placeholder_list;
6207 tree elt;
6208
6209 /* Find the outermost reference that is of the type we want.
6210 If none, see if any object has a type that is a pointer to
6211 the type we want. */
6212 for (elt = TREE_PURPOSE (placeholder_expr);
6213 elt != 0 && object == 0;
6214 elt
6215 = ((TREE_CODE (elt) == COMPOUND_EXPR
6216 || TREE_CODE (elt) == COND_EXPR)
6217 ? TREE_OPERAND (elt, 1)
6218 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6219 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6220 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6221 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6222 ? TREE_OPERAND (elt, 0) : 0))
6223 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6224 object = elt;
6225
6226 for (elt = TREE_PURPOSE (placeholder_expr);
6227 elt != 0 && object == 0;
6228 elt
6229 = ((TREE_CODE (elt) == COMPOUND_EXPR
6230 || TREE_CODE (elt) == COND_EXPR)
6231 ? TREE_OPERAND (elt, 1)
6232 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6233 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6234 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6235 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6236 ? TREE_OPERAND (elt, 0) : 0))
6237 if (POINTER_TYPE_P (TREE_TYPE (elt))
6238 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6239 == need_type))
6240 object = build1 (INDIRECT_REF, need_type, elt);
6241
6242 if (object != 0)
6243 {
6244 /* Expand this object skipping the list entries before
6245 it was found in case it is also a PLACEHOLDER_EXPR.
6246 In that case, we want to translate it using subsequent
6247 entries. */
6248 placeholder_list = TREE_CHAIN (placeholder_expr);
6249 temp = expand_expr (object, original_target, tmode,
6250 ro_modifier);
6251 placeholder_list = old_list;
6252 return temp;
6253 }
6254 }
6255 }
6256
6257 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6258 abort ();
6259
6260 case WITH_RECORD_EXPR:
6261 /* Put the object on the placeholder list, expand our first operand,
6262 and pop the list. */
6263 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6264 placeholder_list);
6265 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6266 tmode, ro_modifier);
6267 placeholder_list = TREE_CHAIN (placeholder_list);
6268 return target;
6269
6270 case GOTO_EXPR:
6271 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6272 expand_goto (TREE_OPERAND (exp, 0));
6273 else
6274 expand_computed_goto (TREE_OPERAND (exp, 0));
6275 return const0_rtx;
6276
6277 case EXIT_EXPR:
6278 expand_exit_loop_if_false (NULL_PTR,
6279 invert_truthvalue (TREE_OPERAND (exp, 0)));
6280 return const0_rtx;
6281
6282 case LABELED_BLOCK_EXPR:
6283 if (LABELED_BLOCK_BODY (exp))
6284 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6285 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6286 return const0_rtx;
6287
6288 case EXIT_BLOCK_EXPR:
6289 if (EXIT_BLOCK_RETURN (exp))
6290 sorry ("returned value in block_exit_expr");
6291 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6292 return const0_rtx;
6293
6294 case LOOP_EXPR:
6295 push_temp_slots ();
6296 expand_start_loop (1);
6297 expand_expr_stmt (TREE_OPERAND (exp, 0));
6298 expand_end_loop ();
6299 pop_temp_slots ();
6300
6301 return const0_rtx;
6302
6303 case BIND_EXPR:
6304 {
6305 tree vars = TREE_OPERAND (exp, 0);
6306 int vars_need_expansion = 0;
6307
6308 /* Need to open a binding contour here because
6309 if there are any cleanups they must be contained here. */
6310 expand_start_bindings (2);
6311
6312 /* Mark the corresponding BLOCK for output in its proper place. */
6313 if (TREE_OPERAND (exp, 2) != 0
6314 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6315 insert_block (TREE_OPERAND (exp, 2));
6316
6317 /* If VARS have not yet been expanded, expand them now. */
6318 while (vars)
6319 {
6320 if (DECL_RTL (vars) == 0)
6321 {
6322 vars_need_expansion = 1;
6323 expand_decl (vars);
6324 }
6325 expand_decl_init (vars);
6326 vars = TREE_CHAIN (vars);
6327 }
6328
6329 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6330
6331 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6332
6333 return temp;
6334 }
6335
6336 case RTL_EXPR:
6337 if (RTL_EXPR_SEQUENCE (exp))
6338 {
6339 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6340 abort ();
6341 emit_insns (RTL_EXPR_SEQUENCE (exp));
6342 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6343 }
6344 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6345 free_temps_for_rtl_expr (exp);
6346 return RTL_EXPR_RTL (exp);
6347
6348 case CONSTRUCTOR:
6349 /* If we don't need the result, just ensure we evaluate any
6350 subexpressions. */
6351 if (ignore)
6352 {
6353 tree elt;
6354 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6355 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6356 EXPAND_MEMORY_USE_BAD);
6357 return const0_rtx;
6358 }
6359
6360 /* All elts simple constants => refer to a constant in memory. But
6361 if this is a non-BLKmode mode, let it store a field at a time
6362 since that should make a CONST_INT or CONST_DOUBLE when we
6363 fold. Likewise, if we have a target we can use, it is best to
6364 store directly into the target unless the type is large enough
6365 that memcpy will be used. If we are making an initializer and
6366 all operands are constant, put it in memory as well. */
6367 else if ((TREE_STATIC (exp)
6368 && ((mode == BLKmode
6369 && ! (target != 0 && safe_from_p (target, exp, 1)))
6370 || TREE_ADDRESSABLE (exp)
6371 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6372 && (! MOVE_BY_PIECES_P
6373 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6374 TYPE_ALIGN (type)))
6375 && ! mostly_zeros_p (exp))))
6376 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6377 {
6378 rtx constructor = output_constant_def (exp);
6379
6380 if (modifier != EXPAND_CONST_ADDRESS
6381 && modifier != EXPAND_INITIALIZER
6382 && modifier != EXPAND_SUM
6383 && (! memory_address_p (GET_MODE (constructor),
6384 XEXP (constructor, 0))
6385 || (flag_force_addr
6386 && GET_CODE (XEXP (constructor, 0)) != REG)))
6387 constructor = change_address (constructor, VOIDmode,
6388 XEXP (constructor, 0));
6389 return constructor;
6390 }
6391
6392 else
6393 {
6394 /* Handle calls that pass values in multiple non-contiguous
6395 locations. The Irix 6 ABI has examples of this. */
6396 if (target == 0 || ! safe_from_p (target, exp, 1)
6397 || GET_CODE (target) == PARALLEL)
6398 {
6399 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6400 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6401 else
6402 target = assign_temp (type, 0, 1, 1);
6403 }
6404
6405 if (TREE_READONLY (exp))
6406 {
6407 if (GET_CODE (target) == MEM)
6408 target = copy_rtx (target);
6409
6410 RTX_UNCHANGING_P (target) = 1;
6411 }
6412
6413 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6414 int_size_in_bytes (TREE_TYPE (exp)));
6415 return target;
6416 }
6417
6418 case INDIRECT_REF:
6419 {
6420 tree exp1 = TREE_OPERAND (exp, 0);
6421 tree exp2;
6422 tree index;
6423 tree string = string_constant (exp1, &index);
6424
6425 /* Try to optimize reads from const strings. */
6426 if (string
6427 && TREE_CODE (string) == STRING_CST
6428 && TREE_CODE (index) == INTEGER_CST
6429 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6430 && GET_MODE_CLASS (mode) == MODE_INT
6431 && GET_MODE_SIZE (mode) == 1
6432 && modifier != EXPAND_MEMORY_USE_WO)
6433 return
6434 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6435
6436 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6437 op0 = memory_address (mode, op0);
6438
6439 if (cfun && current_function_check_memory_usage
6440 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6441 {
6442 enum memory_use_mode memory_usage;
6443 memory_usage = get_memory_usage_from_modifier (modifier);
6444
6445 if (memory_usage != MEMORY_USE_DONT)
6446 {
6447 in_check_memory_usage = 1;
6448 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6449 op0, Pmode,
6450 GEN_INT (int_size_in_bytes (type)),
6451 TYPE_MODE (sizetype),
6452 GEN_INT (memory_usage),
6453 TYPE_MODE (integer_type_node));
6454 in_check_memory_usage = 0;
6455 }
6456 }
6457
6458 temp = gen_rtx_MEM (mode, op0);
6459 /* If address was computed by addition,
6460 mark this as an element of an aggregate. */
6461 if (TREE_CODE (exp1) == PLUS_EXPR
6462 || (TREE_CODE (exp1) == SAVE_EXPR
6463 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6464 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6465 || (TREE_CODE (exp1) == ADDR_EXPR
6466 && (exp2 = TREE_OPERAND (exp1, 0))
6467 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6468 MEM_SET_IN_STRUCT_P (temp, 1);
6469
6470 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6471 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6472
6473 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6474 here, because, in C and C++, the fact that a location is accessed
6475 through a pointer to const does not mean that the value there can
6476 never change. Languages where it can never change should
6477 also set TREE_STATIC. */
6478 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6479
6480 /* If we are writing to this object and its type is a record with
6481 readonly fields, we must mark it as readonly so it will
6482 conflict with readonly references to those fields. */
6483 if (modifier == EXPAND_MEMORY_USE_WO
6484 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6485 RTX_UNCHANGING_P (temp) = 1;
6486
6487 return temp;
6488 }
6489
6490 case ARRAY_REF:
6491 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6492 abort ();
6493
6494 {
6495 tree array = TREE_OPERAND (exp, 0);
6496 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6497 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6498 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6499 HOST_WIDE_INT i;
6500
6501 /* Optimize the special-case of a zero lower bound.
6502
6503 We convert the low_bound to sizetype to avoid some problems
6504 with constant folding. (E.g. suppose the lower bound is 1,
6505 and its mode is QI. Without the conversion, (ARRAY
6506 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6507 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6508
6509 if (! integer_zerop (low_bound))
6510 index = size_diffop (index, convert (sizetype, low_bound));
6511
6512 /* Fold an expression like: "foo"[2].
6513 This is not done in fold so it won't happen inside &.
6514 Don't fold if this is for wide characters since it's too
6515 difficult to do correctly and this is a very rare case. */
6516
6517 if (TREE_CODE (array) == STRING_CST
6518 && TREE_CODE (index) == INTEGER_CST
6519 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6520 && GET_MODE_CLASS (mode) == MODE_INT
6521 && GET_MODE_SIZE (mode) == 1)
6522 return
6523 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6524
6525 /* If this is a constant index into a constant array,
6526 just get the value from the array. Handle both the cases when
6527 we have an explicit constructor and when our operand is a variable
6528 that was declared const. */
6529
6530 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6531 && TREE_CODE (index) == INTEGER_CST
6532 && 0 > compare_tree_int (index,
6533 list_length (CONSTRUCTOR_ELTS
6534 (TREE_OPERAND (exp, 0)))))
6535 {
6536 tree elem;
6537
6538 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6539 i = TREE_INT_CST_LOW (index);
6540 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6541 ;
6542
6543 if (elem)
6544 return expand_expr (fold (TREE_VALUE (elem)), target,
6545 tmode, ro_modifier);
6546 }
6547
6548 else if (optimize >= 1
6549 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6550 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6551 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6552 {
6553 if (TREE_CODE (index) == INTEGER_CST)
6554 {
6555 tree init = DECL_INITIAL (array);
6556
6557 if (TREE_CODE (init) == CONSTRUCTOR)
6558 {
6559 tree elem;
6560
6561 for (elem = CONSTRUCTOR_ELTS (init);
6562 (elem
6563 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6564 elem = TREE_CHAIN (elem))
6565 ;
6566
6567 if (elem)
6568 return expand_expr (fold (TREE_VALUE (elem)), target,
6569 tmode, ro_modifier);
6570 }
6571 else if (TREE_CODE (init) == STRING_CST
6572 && 0 > compare_tree_int (index,
6573 TREE_STRING_LENGTH (init)))
6574 return (GEN_INT
6575 (TREE_STRING_POINTER
6576 (init)[TREE_INT_CST_LOW (index)]));
6577 }
6578 }
6579 }
6580
6581 /* ... fall through ... */
6582
6583 case COMPONENT_REF:
6584 case BIT_FIELD_REF:
6585 /* If the operand is a CONSTRUCTOR, we can just extract the
6586 appropriate field if it is present. Don't do this if we have
6587 already written the data since we want to refer to that copy
6588 and varasm.c assumes that's what we'll do. */
6589 if (code != ARRAY_REF
6590 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6591 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6592 {
6593 tree elt;
6594
6595 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6596 elt = TREE_CHAIN (elt))
6597 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6598 /* We can normally use the value of the field in the
6599 CONSTRUCTOR. However, if this is a bitfield in
6600 an integral mode that we can fit in a HOST_WIDE_INT,
6601 we must mask only the number of bits in the bitfield,
6602 since this is done implicitly by the constructor. If
6603 the bitfield does not meet either of those conditions,
6604 we can't do this optimization. */
6605 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6606 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6607 == MODE_INT)
6608 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6609 <= HOST_BITS_PER_WIDE_INT))))
6610 {
6611 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6612 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6613 {
6614 HOST_WIDE_INT bitsize
6615 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6616
6617 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6618 {
6619 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6620 op0 = expand_and (op0, op1, target);
6621 }
6622 else
6623 {
6624 enum machine_mode imode
6625 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6626 tree count
6627 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6628 0);
6629
6630 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6631 target, 0);
6632 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6633 target, 0);
6634 }
6635 }
6636
6637 return op0;
6638 }
6639 }
6640
6641 {
6642 enum machine_mode mode1;
6643 HOST_WIDE_INT bitsize, bitpos;
6644 tree offset;
6645 int volatilep = 0;
6646 unsigned int alignment;
6647 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6648 &mode1, &unsignedp, &volatilep,
6649 &alignment);
6650
6651 /* If we got back the original object, something is wrong. Perhaps
6652 we are evaluating an expression too early. In any event, don't
6653 infinitely recurse. */
6654 if (tem == exp)
6655 abort ();
6656
6657 /* If TEM's type is a union of variable size, pass TARGET to the inner
6658 computation, since it will need a temporary and TARGET is known
6659 to have to do. This occurs in unchecked conversion in Ada. */
6660
6661 op0 = expand_expr (tem,
6662 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6663 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6664 != INTEGER_CST)
6665 ? target : NULL_RTX),
6666 VOIDmode,
6667 (modifier == EXPAND_INITIALIZER
6668 || modifier == EXPAND_CONST_ADDRESS)
6669 ? modifier : EXPAND_NORMAL);
6670
6671 /* If this is a constant, put it into a register if it is a
6672 legitimate constant and OFFSET is 0 and memory if it isn't. */
6673 if (CONSTANT_P (op0))
6674 {
6675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6676 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6677 && offset == 0)
6678 op0 = force_reg (mode, op0);
6679 else
6680 op0 = validize_mem (force_const_mem (mode, op0));
6681 }
6682
6683 if (offset != 0)
6684 {
6685 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6686
6687 /* If this object is in memory, put it into a register.
6688 This case can't occur in C, but can in Ada if we have
6689 unchecked conversion of an expression from a scalar type to
6690 an array or record type. */
6691 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6692 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6693 {
6694 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6695
6696 mark_temp_addr_taken (memloc);
6697 emit_move_insn (memloc, op0);
6698 op0 = memloc;
6699 }
6700
6701 if (GET_CODE (op0) != MEM)
6702 abort ();
6703
6704 if (GET_MODE (offset_rtx) != ptr_mode)
6705 {
6706 #ifdef POINTERS_EXTEND_UNSIGNED
6707 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6708 #else
6709 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6710 #endif
6711 }
6712
6713 /* A constant address in OP0 can have VOIDmode, we must not try
6714 to call force_reg for that case. Avoid that case. */
6715 if (GET_CODE (op0) == MEM
6716 && GET_MODE (op0) == BLKmode
6717 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6718 && bitsize != 0
6719 && (bitpos % bitsize) == 0
6720 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6721 && alignment == GET_MODE_ALIGNMENT (mode1))
6722 {
6723 rtx temp = change_address (op0, mode1,
6724 plus_constant (XEXP (op0, 0),
6725 (bitpos /
6726 BITS_PER_UNIT)));
6727 if (GET_CODE (XEXP (temp, 0)) == REG)
6728 op0 = temp;
6729 else
6730 op0 = change_address (op0, mode1,
6731 force_reg (GET_MODE (XEXP (temp, 0)),
6732 XEXP (temp, 0)));
6733 bitpos = 0;
6734 }
6735
6736
6737 op0 = change_address (op0, VOIDmode,
6738 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6739 force_reg (ptr_mode,
6740 offset_rtx)));
6741 }
6742
6743 /* Don't forget about volatility even if this is a bitfield. */
6744 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6745 {
6746 op0 = copy_rtx (op0);
6747 MEM_VOLATILE_P (op0) = 1;
6748 }
6749
6750 /* Check the access. */
6751 if (cfun != 0 && current_function_check_memory_usage
6752 && GET_CODE (op0) == MEM)
6753 {
6754 enum memory_use_mode memory_usage;
6755 memory_usage = get_memory_usage_from_modifier (modifier);
6756
6757 if (memory_usage != MEMORY_USE_DONT)
6758 {
6759 rtx to;
6760 int size;
6761
6762 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6763 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6764
6765 /* Check the access right of the pointer. */
6766 in_check_memory_usage = 1;
6767 if (size > BITS_PER_UNIT)
6768 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6769 to, Pmode,
6770 GEN_INT (size / BITS_PER_UNIT),
6771 TYPE_MODE (sizetype),
6772 GEN_INT (memory_usage),
6773 TYPE_MODE (integer_type_node));
6774 in_check_memory_usage = 0;
6775 }
6776 }
6777
6778 /* In cases where an aligned union has an unaligned object
6779 as a field, we might be extracting a BLKmode value from
6780 an integer-mode (e.g., SImode) object. Handle this case
6781 by doing the extract into an object as wide as the field
6782 (which we know to be the width of a basic mode), then
6783 storing into memory, and changing the mode to BLKmode.
6784 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6785 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6786 if (mode1 == VOIDmode
6787 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6788 || (modifier != EXPAND_CONST_ADDRESS
6789 && modifier != EXPAND_INITIALIZER
6790 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6791 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6792 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6793 /* If the field isn't aligned enough to fetch as a memref,
6794 fetch it as a bit field. */
6795 || (mode1 != BLKmode
6796 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6797 && ((TYPE_ALIGN (TREE_TYPE (tem))
6798 < GET_MODE_ALIGNMENT (mode))
6799 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6800 /* If the type and the field are a constant size and the
6801 size of the type isn't the same size as the bitfield,
6802 we must use bitfield operations. */
6803 || ((bitsize >= 0
6804 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6805 == INTEGER_CST)
6806 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6807 bitsize)))))
6808 || (modifier != EXPAND_CONST_ADDRESS
6809 && modifier != EXPAND_INITIALIZER
6810 && mode == BLKmode
6811 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6812 && (TYPE_ALIGN (type) > alignment
6813 || bitpos % TYPE_ALIGN (type) != 0)))
6814 {
6815 enum machine_mode ext_mode = mode;
6816
6817 if (ext_mode == BLKmode
6818 && ! (target != 0 && GET_CODE (op0) == MEM
6819 && GET_CODE (target) == MEM
6820 && bitpos % BITS_PER_UNIT == 0))
6821 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6822
6823 if (ext_mode == BLKmode)
6824 {
6825 /* In this case, BITPOS must start at a byte boundary and
6826 TARGET, if specified, must be a MEM. */
6827 if (GET_CODE (op0) != MEM
6828 || (target != 0 && GET_CODE (target) != MEM)
6829 || bitpos % BITS_PER_UNIT != 0)
6830 abort ();
6831
6832 op0 = change_address (op0, VOIDmode,
6833 plus_constant (XEXP (op0, 0),
6834 bitpos / BITS_PER_UNIT));
6835 if (target == 0)
6836 target = assign_temp (type, 0, 1, 1);
6837
6838 emit_block_move (target, op0,
6839 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6840 / BITS_PER_UNIT),
6841 BITS_PER_UNIT);
6842
6843 return target;
6844 }
6845
6846 op0 = validize_mem (op0);
6847
6848 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6849 mark_reg_pointer (XEXP (op0, 0), alignment);
6850
6851 op0 = extract_bit_field (op0, bitsize, bitpos,
6852 unsignedp, target, ext_mode, ext_mode,
6853 alignment,
6854 int_size_in_bytes (TREE_TYPE (tem)));
6855
6856 /* If the result is a record type and BITSIZE is narrower than
6857 the mode of OP0, an integral mode, and this is a big endian
6858 machine, we must put the field into the high-order bits. */
6859 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6860 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6861 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6862 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6863 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6864 - bitsize),
6865 op0, 1);
6866
6867 if (mode == BLKmode)
6868 {
6869 rtx new = assign_stack_temp (ext_mode,
6870 bitsize / BITS_PER_UNIT, 0);
6871
6872 emit_move_insn (new, op0);
6873 op0 = copy_rtx (new);
6874 PUT_MODE (op0, BLKmode);
6875 MEM_SET_IN_STRUCT_P (op0, 1);
6876 }
6877
6878 return op0;
6879 }
6880
6881 /* If the result is BLKmode, use that to access the object
6882 now as well. */
6883 if (mode == BLKmode)
6884 mode1 = BLKmode;
6885
6886 /* Get a reference to just this component. */
6887 if (modifier == EXPAND_CONST_ADDRESS
6888 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6889 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6890 (bitpos / BITS_PER_UNIT)));
6891 else
6892 op0 = change_address (op0, mode1,
6893 plus_constant (XEXP (op0, 0),
6894 (bitpos / BITS_PER_UNIT)));
6895
6896 if (GET_CODE (op0) == MEM)
6897 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6898
6899 if (GET_CODE (XEXP (op0, 0)) == REG)
6900 mark_reg_pointer (XEXP (op0, 0), alignment);
6901
6902 MEM_SET_IN_STRUCT_P (op0, 1);
6903 MEM_VOLATILE_P (op0) |= volatilep;
6904 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6905 || modifier == EXPAND_CONST_ADDRESS
6906 || modifier == EXPAND_INITIALIZER)
6907 return op0;
6908 else if (target == 0)
6909 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6910
6911 convert_move (target, op0, unsignedp);
6912 return target;
6913 }
6914
6915 /* Intended for a reference to a buffer of a file-object in Pascal.
6916 But it's not certain that a special tree code will really be
6917 necessary for these. INDIRECT_REF might work for them. */
6918 case BUFFER_REF:
6919 abort ();
6920
6921 case IN_EXPR:
6922 {
6923 /* Pascal set IN expression.
6924
6925 Algorithm:
6926 rlo = set_low - (set_low%bits_per_word);
6927 the_word = set [ (index - rlo)/bits_per_word ];
6928 bit_index = index % bits_per_word;
6929 bitmask = 1 << bit_index;
6930 return !!(the_word & bitmask); */
6931
6932 tree set = TREE_OPERAND (exp, 0);
6933 tree index = TREE_OPERAND (exp, 1);
6934 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6935 tree set_type = TREE_TYPE (set);
6936 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6937 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6938 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6939 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6940 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6941 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6942 rtx setaddr = XEXP (setval, 0);
6943 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6944 rtx rlow;
6945 rtx diff, quo, rem, addr, bit, result;
6946
6947 preexpand_calls (exp);
6948
6949 /* If domain is empty, answer is no. Likewise if index is constant
6950 and out of bounds. */
6951 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6952 && TREE_CODE (set_low_bound) == INTEGER_CST
6953 && tree_int_cst_lt (set_high_bound, set_low_bound))
6954 || (TREE_CODE (index) == INTEGER_CST
6955 && TREE_CODE (set_low_bound) == INTEGER_CST
6956 && tree_int_cst_lt (index, set_low_bound))
6957 || (TREE_CODE (set_high_bound) == INTEGER_CST
6958 && TREE_CODE (index) == INTEGER_CST
6959 && tree_int_cst_lt (set_high_bound, index))))
6960 return const0_rtx;
6961
6962 if (target == 0)
6963 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6964
6965 /* If we get here, we have to generate the code for both cases
6966 (in range and out of range). */
6967
6968 op0 = gen_label_rtx ();
6969 op1 = gen_label_rtx ();
6970
6971 if (! (GET_CODE (index_val) == CONST_INT
6972 && GET_CODE (lo_r) == CONST_INT))
6973 {
6974 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6975 GET_MODE (index_val), iunsignedp, 0, op1);
6976 }
6977
6978 if (! (GET_CODE (index_val) == CONST_INT
6979 && GET_CODE (hi_r) == CONST_INT))
6980 {
6981 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6982 GET_MODE (index_val), iunsignedp, 0, op1);
6983 }
6984
6985 /* Calculate the element number of bit zero in the first word
6986 of the set. */
6987 if (GET_CODE (lo_r) == CONST_INT)
6988 rlow = GEN_INT (INTVAL (lo_r)
6989 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6990 else
6991 rlow = expand_binop (index_mode, and_optab, lo_r,
6992 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6993 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6994
6995 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6996 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6997
6998 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6999 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7000 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7001 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7002
7003 addr = memory_address (byte_mode,
7004 expand_binop (index_mode, add_optab, diff,
7005 setaddr, NULL_RTX, iunsignedp,
7006 OPTAB_LIB_WIDEN));
7007
7008 /* Extract the bit we want to examine */
7009 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7010 gen_rtx_MEM (byte_mode, addr),
7011 make_tree (TREE_TYPE (index), rem),
7012 NULL_RTX, 1);
7013 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7014 GET_MODE (target) == byte_mode ? target : 0,
7015 1, OPTAB_LIB_WIDEN);
7016
7017 if (result != target)
7018 convert_move (target, result, 1);
7019
7020 /* Output the code to handle the out-of-range case. */
7021 emit_jump (op0);
7022 emit_label (op1);
7023 emit_move_insn (target, const0_rtx);
7024 emit_label (op0);
7025 return target;
7026 }
7027
7028 case WITH_CLEANUP_EXPR:
7029 if (RTL_EXPR_RTL (exp) == 0)
7030 {
7031 RTL_EXPR_RTL (exp)
7032 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7033 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7034
7035 /* That's it for this cleanup. */
7036 TREE_OPERAND (exp, 2) = 0;
7037 }
7038 return RTL_EXPR_RTL (exp);
7039
7040 case CLEANUP_POINT_EXPR:
7041 {
7042 /* Start a new binding layer that will keep track of all cleanup
7043 actions to be performed. */
7044 expand_start_bindings (2);
7045
7046 target_temp_slot_level = temp_slot_level;
7047
7048 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7049 /* If we're going to use this value, load it up now. */
7050 if (! ignore)
7051 op0 = force_not_mem (op0);
7052 preserve_temp_slots (op0);
7053 expand_end_bindings (NULL_TREE, 0, 0);
7054 }
7055 return op0;
7056
7057 case CALL_EXPR:
7058 /* Check for a built-in function. */
7059 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7060 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7061 == FUNCTION_DECL)
7062 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7063 return expand_builtin (exp, target, subtarget, tmode, ignore);
7064
7065 /* If this call was expanded already by preexpand_calls,
7066 just return the result we got. */
7067 if (CALL_EXPR_RTL (exp) != 0)
7068 return CALL_EXPR_RTL (exp);
7069
7070 return expand_call (exp, target, ignore);
7071
7072 case NON_LVALUE_EXPR:
7073 case NOP_EXPR:
7074 case CONVERT_EXPR:
7075 case REFERENCE_EXPR:
7076 if (TREE_CODE (type) == UNION_TYPE)
7077 {
7078 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7079
7080 /* If both input and output are BLKmode, this conversion
7081 isn't actually doing anything unless we need to make the
7082 alignment stricter. */
7083 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7084 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7085 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7086 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7087 modifier);
7088
7089 if (target == 0)
7090 {
7091 if (mode != BLKmode)
7092 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7093 else
7094 target = assign_temp (type, 0, 1, 1);
7095 }
7096
7097 if (GET_CODE (target) == MEM)
7098 /* Store data into beginning of memory target. */
7099 store_expr (TREE_OPERAND (exp, 0),
7100 change_address (target, TYPE_MODE (valtype), 0), 0);
7101
7102 else if (GET_CODE (target) == REG)
7103 /* Store this field into a union of the proper type. */
7104 store_field (target,
7105 MIN ((int_size_in_bytes (TREE_TYPE
7106 (TREE_OPERAND (exp, 0)))
7107 * BITS_PER_UNIT),
7108 GET_MODE_BITSIZE (mode)),
7109 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7110 VOIDmode, 0, BITS_PER_UNIT,
7111 int_size_in_bytes (type), 0);
7112 else
7113 abort ();
7114
7115 /* Return the entire union. */
7116 return target;
7117 }
7118
7119 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7120 {
7121 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7122 ro_modifier);
7123
7124 /* If the signedness of the conversion differs and OP0 is
7125 a promoted SUBREG, clear that indication since we now
7126 have to do the proper extension. */
7127 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7128 && GET_CODE (op0) == SUBREG)
7129 SUBREG_PROMOTED_VAR_P (op0) = 0;
7130
7131 return op0;
7132 }
7133
7134 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7135 if (GET_MODE (op0) == mode)
7136 return op0;
7137
7138 /* If OP0 is a constant, just convert it into the proper mode. */
7139 if (CONSTANT_P (op0))
7140 return
7141 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7142 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7143
7144 if (modifier == EXPAND_INITIALIZER)
7145 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7146
7147 if (target == 0)
7148 return
7149 convert_to_mode (mode, op0,
7150 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7151 else
7152 convert_move (target, op0,
7153 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7154 return target;
7155
7156 case PLUS_EXPR:
7157 /* We come here from MINUS_EXPR when the second operand is a
7158 constant. */
7159 plus_expr:
7160 this_optab = add_optab;
7161
7162 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7163 something else, make sure we add the register to the constant and
7164 then to the other thing. This case can occur during strength
7165 reduction and doing it this way will produce better code if the
7166 frame pointer or argument pointer is eliminated.
7167
7168 fold-const.c will ensure that the constant is always in the inner
7169 PLUS_EXPR, so the only case we need to do anything about is if
7170 sp, ap, or fp is our second argument, in which case we must swap
7171 the innermost first argument and our second argument. */
7172
7173 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7174 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7175 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7176 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7177 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7178 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7179 {
7180 tree t = TREE_OPERAND (exp, 1);
7181
7182 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7183 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7184 }
7185
7186 /* If the result is to be ptr_mode and we are adding an integer to
7187 something, we might be forming a constant. So try to use
7188 plus_constant. If it produces a sum and we can't accept it,
7189 use force_operand. This allows P = &ARR[const] to generate
7190 efficient code on machines where a SYMBOL_REF is not a valid
7191 address.
7192
7193 If this is an EXPAND_SUM call, always return the sum. */
7194 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7195 || mode == ptr_mode)
7196 {
7197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7198 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7199 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7200 {
7201 rtx constant_part;
7202
7203 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7204 EXPAND_SUM);
7205 /* Use immed_double_const to ensure that the constant is
7206 truncated according to the mode of OP1, then sign extended
7207 to a HOST_WIDE_INT. Using the constant directly can result
7208 in non-canonical RTL in a 64x32 cross compile. */
7209 constant_part
7210 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7211 (HOST_WIDE_INT) 0,
7212 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7213 op1 = plus_constant (op1, INTVAL (constant_part));
7214 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7215 op1 = force_operand (op1, target);
7216 return op1;
7217 }
7218
7219 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7220 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7221 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7222 {
7223 rtx constant_part;
7224
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7226 EXPAND_SUM);
7227 if (! CONSTANT_P (op0))
7228 {
7229 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7230 VOIDmode, modifier);
7231 /* Don't go to both_summands if modifier
7232 says it's not right to return a PLUS. */
7233 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7234 goto binop2;
7235 goto both_summands;
7236 }
7237 /* Use immed_double_const to ensure that the constant is
7238 truncated according to the mode of OP1, then sign extended
7239 to a HOST_WIDE_INT. Using the constant directly can result
7240 in non-canonical RTL in a 64x32 cross compile. */
7241 constant_part
7242 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7243 (HOST_WIDE_INT) 0,
7244 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7245 op0 = plus_constant (op0, INTVAL (constant_part));
7246 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7247 op0 = force_operand (op0, target);
7248 return op0;
7249 }
7250 }
7251
7252 /* No sense saving up arithmetic to be done
7253 if it's all in the wrong mode to form part of an address.
7254 And force_operand won't know whether to sign-extend or
7255 zero-extend. */
7256 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7257 || mode != ptr_mode)
7258 goto binop;
7259
7260 preexpand_calls (exp);
7261 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7262 subtarget = 0;
7263
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7265 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7266
7267 both_summands:
7268 /* Make sure any term that's a sum with a constant comes last. */
7269 if (GET_CODE (op0) == PLUS
7270 && CONSTANT_P (XEXP (op0, 1)))
7271 {
7272 temp = op0;
7273 op0 = op1;
7274 op1 = temp;
7275 }
7276 /* If adding to a sum including a constant,
7277 associate it to put the constant outside. */
7278 if (GET_CODE (op1) == PLUS
7279 && CONSTANT_P (XEXP (op1, 1)))
7280 {
7281 rtx constant_term = const0_rtx;
7282
7283 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7284 if (temp != 0)
7285 op0 = temp;
7286 /* Ensure that MULT comes first if there is one. */
7287 else if (GET_CODE (op0) == MULT)
7288 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7289 else
7290 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7291
7292 /* Let's also eliminate constants from op0 if possible. */
7293 op0 = eliminate_constant_term (op0, &constant_term);
7294
7295 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7296 their sum should be a constant. Form it into OP1, since the
7297 result we want will then be OP0 + OP1. */
7298
7299 temp = simplify_binary_operation (PLUS, mode, constant_term,
7300 XEXP (op1, 1));
7301 if (temp != 0)
7302 op1 = temp;
7303 else
7304 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7305 }
7306
7307 /* Put a constant term last and put a multiplication first. */
7308 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7309 temp = op1, op1 = op0, op0 = temp;
7310
7311 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7312 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7313
7314 case MINUS_EXPR:
7315 /* For initializers, we are allowed to return a MINUS of two
7316 symbolic constants. Here we handle all cases when both operands
7317 are constant. */
7318 /* Handle difference of two symbolic constants,
7319 for the sake of an initializer. */
7320 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7321 && really_constant_p (TREE_OPERAND (exp, 0))
7322 && really_constant_p (TREE_OPERAND (exp, 1)))
7323 {
7324 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7325 VOIDmode, ro_modifier);
7326 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7327 VOIDmode, ro_modifier);
7328
7329 /* If the last operand is a CONST_INT, use plus_constant of
7330 the negated constant. Else make the MINUS. */
7331 if (GET_CODE (op1) == CONST_INT)
7332 return plus_constant (op0, - INTVAL (op1));
7333 else
7334 return gen_rtx_MINUS (mode, op0, op1);
7335 }
7336 /* Convert A - const to A + (-const). */
7337 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7338 {
7339 tree negated = fold (build1 (NEGATE_EXPR, type,
7340 TREE_OPERAND (exp, 1)));
7341
7342 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7343 /* If we can't negate the constant in TYPE, leave it alone and
7344 expand_binop will negate it for us. We used to try to do it
7345 here in the signed version of TYPE, but that doesn't work
7346 on POINTER_TYPEs. */;
7347 else
7348 {
7349 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7350 goto plus_expr;
7351 }
7352 }
7353 this_optab = sub_optab;
7354 goto binop;
7355
7356 case MULT_EXPR:
7357 preexpand_calls (exp);
7358 /* If first operand is constant, swap them.
7359 Thus the following special case checks need only
7360 check the second operand. */
7361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7362 {
7363 register tree t1 = TREE_OPERAND (exp, 0);
7364 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7365 TREE_OPERAND (exp, 1) = t1;
7366 }
7367
7368 /* Attempt to return something suitable for generating an
7369 indexed address, for machines that support that. */
7370
7371 if (modifier == EXPAND_SUM && mode == ptr_mode
7372 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7373 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7374 {
7375 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7376 EXPAND_SUM);
7377
7378 /* Apply distributive law if OP0 is x+c. */
7379 if (GET_CODE (op0) == PLUS
7380 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7381 return
7382 gen_rtx_PLUS
7383 (mode,
7384 gen_rtx_MULT
7385 (mode, XEXP (op0, 0),
7386 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7387 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7388 * INTVAL (XEXP (op0, 1))));
7389
7390 if (GET_CODE (op0) != REG)
7391 op0 = force_operand (op0, NULL_RTX);
7392 if (GET_CODE (op0) != REG)
7393 op0 = copy_to_mode_reg (mode, op0);
7394
7395 return
7396 gen_rtx_MULT (mode, op0,
7397 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7398 }
7399
7400 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7401 subtarget = 0;
7402
7403 /* Check for multiplying things that have been extended
7404 from a narrower type. If this machine supports multiplying
7405 in that narrower type with a result in the desired type,
7406 do it that way, and avoid the explicit type-conversion. */
7407 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7408 && TREE_CODE (type) == INTEGER_TYPE
7409 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7410 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7411 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7412 && int_fits_type_p (TREE_OPERAND (exp, 1),
7413 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7414 /* Don't use a widening multiply if a shift will do. */
7415 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7416 > HOST_BITS_PER_WIDE_INT)
7417 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7418 ||
7419 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7420 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7421 ==
7422 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7423 /* If both operands are extended, they must either both
7424 be zero-extended or both be sign-extended. */
7425 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7426 ==
7427 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7428 {
7429 enum machine_mode innermode
7430 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7431 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7432 ? smul_widen_optab : umul_widen_optab);
7433 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7434 ? umul_widen_optab : smul_widen_optab);
7435 if (mode == GET_MODE_WIDER_MODE (innermode))
7436 {
7437 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7438 {
7439 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7440 NULL_RTX, VOIDmode, 0);
7441 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7442 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7443 VOIDmode, 0);
7444 else
7445 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7446 NULL_RTX, VOIDmode, 0);
7447 goto binop2;
7448 }
7449 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7450 && innermode == word_mode)
7451 {
7452 rtx htem;
7453 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7454 NULL_RTX, VOIDmode, 0);
7455 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7457 VOIDmode, 0);
7458 else
7459 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7460 NULL_RTX, VOIDmode, 0);
7461 temp = expand_binop (mode, other_optab, op0, op1, target,
7462 unsignedp, OPTAB_LIB_WIDEN);
7463 htem = expand_mult_highpart_adjust (innermode,
7464 gen_highpart (innermode, temp),
7465 op0, op1,
7466 gen_highpart (innermode, temp),
7467 unsignedp);
7468 emit_move_insn (gen_highpart (innermode, temp), htem);
7469 return temp;
7470 }
7471 }
7472 }
7473 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7474 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7475 return expand_mult (mode, op0, op1, target, unsignedp);
7476
7477 case TRUNC_DIV_EXPR:
7478 case FLOOR_DIV_EXPR:
7479 case CEIL_DIV_EXPR:
7480 case ROUND_DIV_EXPR:
7481 case EXACT_DIV_EXPR:
7482 preexpand_calls (exp);
7483 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7484 subtarget = 0;
7485 /* Possible optimization: compute the dividend with EXPAND_SUM
7486 then if the divisor is constant can optimize the case
7487 where some terms of the dividend have coeffs divisible by it. */
7488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7490 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7491
7492 case RDIV_EXPR:
7493 this_optab = flodiv_optab;
7494 goto binop;
7495
7496 case TRUNC_MOD_EXPR:
7497 case FLOOR_MOD_EXPR:
7498 case CEIL_MOD_EXPR:
7499 case ROUND_MOD_EXPR:
7500 preexpand_calls (exp);
7501 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7502 subtarget = 0;
7503 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7504 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7505 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7506
7507 case FIX_ROUND_EXPR:
7508 case FIX_FLOOR_EXPR:
7509 case FIX_CEIL_EXPR:
7510 abort (); /* Not used for C. */
7511
7512 case FIX_TRUNC_EXPR:
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7514 if (target == 0)
7515 target = gen_reg_rtx (mode);
7516 expand_fix (target, op0, unsignedp);
7517 return target;
7518
7519 case FLOAT_EXPR:
7520 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7521 if (target == 0)
7522 target = gen_reg_rtx (mode);
7523 /* expand_float can't figure out what to do if FROM has VOIDmode.
7524 So give it the correct mode. With -O, cse will optimize this. */
7525 if (GET_MODE (op0) == VOIDmode)
7526 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7527 op0);
7528 expand_float (target, op0,
7529 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7530 return target;
7531
7532 case NEGATE_EXPR:
7533 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7534 temp = expand_unop (mode, neg_optab, op0, target, 0);
7535 if (temp == 0)
7536 abort ();
7537 return temp;
7538
7539 case ABS_EXPR:
7540 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7541
7542 /* Handle complex values specially. */
7543 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7544 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7545 return expand_complex_abs (mode, op0, target, unsignedp);
7546
7547 /* Unsigned abs is simply the operand. Testing here means we don't
7548 risk generating incorrect code below. */
7549 if (TREE_UNSIGNED (type))
7550 return op0;
7551
7552 return expand_abs (mode, op0, target,
7553 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7554
7555 case MAX_EXPR:
7556 case MIN_EXPR:
7557 target = original_target;
7558 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7559 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7560 || GET_MODE (target) != mode
7561 || (GET_CODE (target) == REG
7562 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7563 target = gen_reg_rtx (mode);
7564 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7565 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7566
7567 /* First try to do it with a special MIN or MAX instruction.
7568 If that does not win, use a conditional jump to select the proper
7569 value. */
7570 this_optab = (TREE_UNSIGNED (type)
7571 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7572 : (code == MIN_EXPR ? smin_optab : smax_optab));
7573
7574 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7575 OPTAB_WIDEN);
7576 if (temp != 0)
7577 return temp;
7578
7579 /* At this point, a MEM target is no longer useful; we will get better
7580 code without it. */
7581
7582 if (GET_CODE (target) == MEM)
7583 target = gen_reg_rtx (mode);
7584
7585 if (target != op0)
7586 emit_move_insn (target, op0);
7587
7588 op0 = gen_label_rtx ();
7589
7590 /* If this mode is an integer too wide to compare properly,
7591 compare word by word. Rely on cse to optimize constant cases. */
7592 if (GET_MODE_CLASS (mode) == MODE_INT
7593 && ! can_compare_p (GE, mode, ccp_jump))
7594 {
7595 if (code == MAX_EXPR)
7596 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7597 target, op1, NULL_RTX, op0);
7598 else
7599 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7600 op1, target, NULL_RTX, op0);
7601 }
7602 else
7603 {
7604 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7605 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7606 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7607 op0);
7608 }
7609 emit_move_insn (target, op1);
7610 emit_label (op0);
7611 return target;
7612
7613 case BIT_NOT_EXPR:
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7616 if (temp == 0)
7617 abort ();
7618 return temp;
7619
7620 case FFS_EXPR:
7621 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7622 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7623 if (temp == 0)
7624 abort ();
7625 return temp;
7626
7627 /* ??? Can optimize bitwise operations with one arg constant.
7628 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7629 and (a bitwise1 b) bitwise2 b (etc)
7630 but that is probably not worth while. */
7631
7632 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7633 boolean values when we want in all cases to compute both of them. In
7634 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7635 as actual zero-or-1 values and then bitwise anding. In cases where
7636 there cannot be any side effects, better code would be made by
7637 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7638 how to recognize those cases. */
7639
7640 case TRUTH_AND_EXPR:
7641 case BIT_AND_EXPR:
7642 this_optab = and_optab;
7643 goto binop;
7644
7645 case TRUTH_OR_EXPR:
7646 case BIT_IOR_EXPR:
7647 this_optab = ior_optab;
7648 goto binop;
7649
7650 case TRUTH_XOR_EXPR:
7651 case BIT_XOR_EXPR:
7652 this_optab = xor_optab;
7653 goto binop;
7654
7655 case LSHIFT_EXPR:
7656 case RSHIFT_EXPR:
7657 case LROTATE_EXPR:
7658 case RROTATE_EXPR:
7659 preexpand_calls (exp);
7660 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7661 subtarget = 0;
7662 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7663 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7664 unsignedp);
7665
7666 /* Could determine the answer when only additive constants differ. Also,
7667 the addition of one can be handled by changing the condition. */
7668 case LT_EXPR:
7669 case LE_EXPR:
7670 case GT_EXPR:
7671 case GE_EXPR:
7672 case EQ_EXPR:
7673 case NE_EXPR:
7674 case UNORDERED_EXPR:
7675 case ORDERED_EXPR:
7676 case UNLT_EXPR:
7677 case UNLE_EXPR:
7678 case UNGT_EXPR:
7679 case UNGE_EXPR:
7680 case UNEQ_EXPR:
7681 preexpand_calls (exp);
7682 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7683 if (temp != 0)
7684 return temp;
7685
7686 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7687 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7688 && original_target
7689 && GET_CODE (original_target) == REG
7690 && (GET_MODE (original_target)
7691 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7692 {
7693 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7694 VOIDmode, 0);
7695
7696 if (temp != original_target)
7697 temp = copy_to_reg (temp);
7698
7699 op1 = gen_label_rtx ();
7700 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7701 GET_MODE (temp), unsignedp, 0, op1);
7702 emit_move_insn (temp, const1_rtx);
7703 emit_label (op1);
7704 return temp;
7705 }
7706
7707 /* If no set-flag instruction, must generate a conditional
7708 store into a temporary variable. Drop through
7709 and handle this like && and ||. */
7710
7711 case TRUTH_ANDIF_EXPR:
7712 case TRUTH_ORIF_EXPR:
7713 if (! ignore
7714 && (target == 0 || ! safe_from_p (target, exp, 1)
7715 /* Make sure we don't have a hard reg (such as function's return
7716 value) live across basic blocks, if not optimizing. */
7717 || (!optimize && GET_CODE (target) == REG
7718 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7719 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7720
7721 if (target)
7722 emit_clr_insn (target);
7723
7724 op1 = gen_label_rtx ();
7725 jumpifnot (exp, op1);
7726
7727 if (target)
7728 emit_0_to_1_insn (target);
7729
7730 emit_label (op1);
7731 return ignore ? const0_rtx : target;
7732
7733 case TRUTH_NOT_EXPR:
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7735 /* The parser is careful to generate TRUTH_NOT_EXPR
7736 only with operands that are always zero or one. */
7737 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7738 target, 1, OPTAB_LIB_WIDEN);
7739 if (temp == 0)
7740 abort ();
7741 return temp;
7742
7743 case COMPOUND_EXPR:
7744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7745 emit_queue ();
7746 return expand_expr (TREE_OPERAND (exp, 1),
7747 (ignore ? const0_rtx : target),
7748 VOIDmode, 0);
7749
7750 case COND_EXPR:
7751 /* If we would have a "singleton" (see below) were it not for a
7752 conversion in each arm, bring that conversion back out. */
7753 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7754 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7755 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7756 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7757 {
7758 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7759 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7760
7761 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7762 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7763 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7764 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7765 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7766 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7767 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7768 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7769 return expand_expr (build1 (NOP_EXPR, type,
7770 build (COND_EXPR, TREE_TYPE (true),
7771 TREE_OPERAND (exp, 0),
7772 true, false)),
7773 target, tmode, modifier);
7774 }
7775
7776 {
7777 /* Note that COND_EXPRs whose type is a structure or union
7778 are required to be constructed to contain assignments of
7779 a temporary variable, so that we can evaluate them here
7780 for side effect only. If type is void, we must do likewise. */
7781
7782 /* If an arm of the branch requires a cleanup,
7783 only that cleanup is performed. */
7784
7785 tree singleton = 0;
7786 tree binary_op = 0, unary_op = 0;
7787
7788 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7789 convert it to our mode, if necessary. */
7790 if (integer_onep (TREE_OPERAND (exp, 1))
7791 && integer_zerop (TREE_OPERAND (exp, 2))
7792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7793 {
7794 if (ignore)
7795 {
7796 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7797 ro_modifier);
7798 return const0_rtx;
7799 }
7800
7801 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7802 if (GET_MODE (op0) == mode)
7803 return op0;
7804
7805 if (target == 0)
7806 target = gen_reg_rtx (mode);
7807 convert_move (target, op0, unsignedp);
7808 return target;
7809 }
7810
7811 /* Check for X ? A + B : A. If we have this, we can copy A to the
7812 output and conditionally add B. Similarly for unary operations.
7813 Don't do this if X has side-effects because those side effects
7814 might affect A or B and the "?" operation is a sequence point in
7815 ANSI. (operand_equal_p tests for side effects.) */
7816
7817 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7818 && operand_equal_p (TREE_OPERAND (exp, 2),
7819 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7820 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7822 && operand_equal_p (TREE_OPERAND (exp, 1),
7823 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7824 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7826 && operand_equal_p (TREE_OPERAND (exp, 2),
7827 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7828 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7829 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7830 && operand_equal_p (TREE_OPERAND (exp, 1),
7831 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7832 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7833
7834 /* If we are not to produce a result, we have no target. Otherwise,
7835 if a target was specified use it; it will not be used as an
7836 intermediate target unless it is safe. If no target, use a
7837 temporary. */
7838
7839 if (ignore)
7840 temp = 0;
7841 else if (original_target
7842 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7843 || (singleton && GET_CODE (original_target) == REG
7844 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7845 && original_target == var_rtx (singleton)))
7846 && GET_MODE (original_target) == mode
7847 #ifdef HAVE_conditional_move
7848 && (! can_conditionally_move_p (mode)
7849 || GET_CODE (original_target) == REG
7850 || TREE_ADDRESSABLE (type))
7851 #endif
7852 && ! (GET_CODE (original_target) == MEM
7853 && MEM_VOLATILE_P (original_target)))
7854 temp = original_target;
7855 else if (TREE_ADDRESSABLE (type))
7856 abort ();
7857 else
7858 temp = assign_temp (type, 0, 0, 1);
7859
7860 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7861 do the test of X as a store-flag operation, do this as
7862 A + ((X != 0) << log C). Similarly for other simple binary
7863 operators. Only do for C == 1 if BRANCH_COST is low. */
7864 if (temp && singleton && binary_op
7865 && (TREE_CODE (binary_op) == PLUS_EXPR
7866 || TREE_CODE (binary_op) == MINUS_EXPR
7867 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7868 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7869 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7870 : integer_onep (TREE_OPERAND (binary_op, 1)))
7871 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7872 {
7873 rtx result;
7874 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7875 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7876 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7877 : xor_optab);
7878
7879 /* If we had X ? A : A + 1, do this as A + (X == 0).
7880
7881 We have to invert the truth value here and then put it
7882 back later if do_store_flag fails. We cannot simply copy
7883 TREE_OPERAND (exp, 0) to another variable and modify that
7884 because invert_truthvalue can modify the tree pointed to
7885 by its argument. */
7886 if (singleton == TREE_OPERAND (exp, 1))
7887 TREE_OPERAND (exp, 0)
7888 = invert_truthvalue (TREE_OPERAND (exp, 0));
7889
7890 result = do_store_flag (TREE_OPERAND (exp, 0),
7891 (safe_from_p (temp, singleton, 1)
7892 ? temp : NULL_RTX),
7893 mode, BRANCH_COST <= 1);
7894
7895 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7896 result = expand_shift (LSHIFT_EXPR, mode, result,
7897 build_int_2 (tree_log2
7898 (TREE_OPERAND
7899 (binary_op, 1)),
7900 0),
7901 (safe_from_p (temp, singleton, 1)
7902 ? temp : NULL_RTX), 0);
7903
7904 if (result)
7905 {
7906 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7907 return expand_binop (mode, boptab, op1, result, temp,
7908 unsignedp, OPTAB_LIB_WIDEN);
7909 }
7910 else if (singleton == TREE_OPERAND (exp, 1))
7911 TREE_OPERAND (exp, 0)
7912 = invert_truthvalue (TREE_OPERAND (exp, 0));
7913 }
7914
7915 do_pending_stack_adjust ();
7916 NO_DEFER_POP;
7917 op0 = gen_label_rtx ();
7918
7919 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7920 {
7921 if (temp != 0)
7922 {
7923 /* If the target conflicts with the other operand of the
7924 binary op, we can't use it. Also, we can't use the target
7925 if it is a hard register, because evaluating the condition
7926 might clobber it. */
7927 if ((binary_op
7928 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7929 || (GET_CODE (temp) == REG
7930 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7931 temp = gen_reg_rtx (mode);
7932 store_expr (singleton, temp, 0);
7933 }
7934 else
7935 expand_expr (singleton,
7936 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7937 if (singleton == TREE_OPERAND (exp, 1))
7938 jumpif (TREE_OPERAND (exp, 0), op0);
7939 else
7940 jumpifnot (TREE_OPERAND (exp, 0), op0);
7941
7942 start_cleanup_deferral ();
7943 if (binary_op && temp == 0)
7944 /* Just touch the other operand. */
7945 expand_expr (TREE_OPERAND (binary_op, 1),
7946 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7947 else if (binary_op)
7948 store_expr (build (TREE_CODE (binary_op), type,
7949 make_tree (type, temp),
7950 TREE_OPERAND (binary_op, 1)),
7951 temp, 0);
7952 else
7953 store_expr (build1 (TREE_CODE (unary_op), type,
7954 make_tree (type, temp)),
7955 temp, 0);
7956 op1 = op0;
7957 }
7958 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7959 comparison operator. If we have one of these cases, set the
7960 output to A, branch on A (cse will merge these two references),
7961 then set the output to FOO. */
7962 else if (temp
7963 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7964 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7966 TREE_OPERAND (exp, 1), 0)
7967 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7968 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7969 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7970 {
7971 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7972 temp = gen_reg_rtx (mode);
7973 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7974 jumpif (TREE_OPERAND (exp, 0), op0);
7975
7976 start_cleanup_deferral ();
7977 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7978 op1 = op0;
7979 }
7980 else if (temp
7981 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7982 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7983 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7984 TREE_OPERAND (exp, 2), 0)
7985 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7986 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7987 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7988 {
7989 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7990 temp = gen_reg_rtx (mode);
7991 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7992 jumpifnot (TREE_OPERAND (exp, 0), op0);
7993
7994 start_cleanup_deferral ();
7995 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7996 op1 = op0;
7997 }
7998 else
7999 {
8000 op1 = gen_label_rtx ();
8001 jumpifnot (TREE_OPERAND (exp, 0), op0);
8002
8003 start_cleanup_deferral ();
8004
8005 /* One branch of the cond can be void, if it never returns. For
8006 example A ? throw : E */
8007 if (temp != 0
8008 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8009 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8010 else
8011 expand_expr (TREE_OPERAND (exp, 1),
8012 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8013 end_cleanup_deferral ();
8014 emit_queue ();
8015 emit_jump_insn (gen_jump (op1));
8016 emit_barrier ();
8017 emit_label (op0);
8018 start_cleanup_deferral ();
8019 if (temp != 0
8020 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8021 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8022 else
8023 expand_expr (TREE_OPERAND (exp, 2),
8024 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8025 }
8026
8027 end_cleanup_deferral ();
8028
8029 emit_queue ();
8030 emit_label (op1);
8031 OK_DEFER_POP;
8032
8033 return temp;
8034 }
8035
8036 case TARGET_EXPR:
8037 {
8038 /* Something needs to be initialized, but we didn't know
8039 where that thing was when building the tree. For example,
8040 it could be the return value of a function, or a parameter
8041 to a function which lays down in the stack, or a temporary
8042 variable which must be passed by reference.
8043
8044 We guarantee that the expression will either be constructed
8045 or copied into our original target. */
8046
8047 tree slot = TREE_OPERAND (exp, 0);
8048 tree cleanups = NULL_TREE;
8049 tree exp1;
8050
8051 if (TREE_CODE (slot) != VAR_DECL)
8052 abort ();
8053
8054 if (! ignore)
8055 target = original_target;
8056
8057 /* Set this here so that if we get a target that refers to a
8058 register variable that's already been used, put_reg_into_stack
8059 knows that it should fix up those uses. */
8060 TREE_USED (slot) = 1;
8061
8062 if (target == 0)
8063 {
8064 if (DECL_RTL (slot) != 0)
8065 {
8066 target = DECL_RTL (slot);
8067 /* If we have already expanded the slot, so don't do
8068 it again. (mrs) */
8069 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8070 return target;
8071 }
8072 else
8073 {
8074 target = assign_temp (type, 2, 0, 1);
8075 /* All temp slots at this level must not conflict. */
8076 preserve_temp_slots (target);
8077 DECL_RTL (slot) = target;
8078 if (TREE_ADDRESSABLE (slot))
8079 {
8080 TREE_ADDRESSABLE (slot) = 0;
8081 mark_addressable (slot);
8082 }
8083
8084 /* Since SLOT is not known to the called function
8085 to belong to its stack frame, we must build an explicit
8086 cleanup. This case occurs when we must build up a reference
8087 to pass the reference as an argument. In this case,
8088 it is very likely that such a reference need not be
8089 built here. */
8090
8091 if (TREE_OPERAND (exp, 2) == 0)
8092 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8093 cleanups = TREE_OPERAND (exp, 2);
8094 }
8095 }
8096 else
8097 {
8098 /* This case does occur, when expanding a parameter which
8099 needs to be constructed on the stack. The target
8100 is the actual stack address that we want to initialize.
8101 The function we call will perform the cleanup in this case. */
8102
8103 /* If we have already assigned it space, use that space,
8104 not target that we were passed in, as our target
8105 parameter is only a hint. */
8106 if (DECL_RTL (slot) != 0)
8107 {
8108 target = DECL_RTL (slot);
8109 /* If we have already expanded the slot, so don't do
8110 it again. (mrs) */
8111 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8112 return target;
8113 }
8114 else
8115 {
8116 DECL_RTL (slot) = target;
8117 /* If we must have an addressable slot, then make sure that
8118 the RTL that we just stored in slot is OK. */
8119 if (TREE_ADDRESSABLE (slot))
8120 {
8121 TREE_ADDRESSABLE (slot) = 0;
8122 mark_addressable (slot);
8123 }
8124 }
8125 }
8126
8127 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8128 /* Mark it as expanded. */
8129 TREE_OPERAND (exp, 1) = NULL_TREE;
8130
8131 store_expr (exp1, target, 0);
8132
8133 expand_decl_cleanup (NULL_TREE, cleanups);
8134
8135 return target;
8136 }
8137
8138 case INIT_EXPR:
8139 {
8140 tree lhs = TREE_OPERAND (exp, 0);
8141 tree rhs = TREE_OPERAND (exp, 1);
8142 tree noncopied_parts = 0;
8143 tree lhs_type = TREE_TYPE (lhs);
8144
8145 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8146 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8147 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8148 TYPE_NONCOPIED_PARTS (lhs_type));
8149 while (noncopied_parts != 0)
8150 {
8151 expand_assignment (TREE_VALUE (noncopied_parts),
8152 TREE_PURPOSE (noncopied_parts), 0, 0);
8153 noncopied_parts = TREE_CHAIN (noncopied_parts);
8154 }
8155 return temp;
8156 }
8157
8158 case MODIFY_EXPR:
8159 {
8160 /* If lhs is complex, expand calls in rhs before computing it.
8161 That's so we don't compute a pointer and save it over a call.
8162 If lhs is simple, compute it first so we can give it as a
8163 target if the rhs is just a call. This avoids an extra temp and copy
8164 and that prevents a partial-subsumption which makes bad code.
8165 Actually we could treat component_ref's of vars like vars. */
8166
8167 tree lhs = TREE_OPERAND (exp, 0);
8168 tree rhs = TREE_OPERAND (exp, 1);
8169 tree noncopied_parts = 0;
8170 tree lhs_type = TREE_TYPE (lhs);
8171
8172 temp = 0;
8173
8174 if (TREE_CODE (lhs) != VAR_DECL
8175 && TREE_CODE (lhs) != RESULT_DECL
8176 && TREE_CODE (lhs) != PARM_DECL
8177 && ! (TREE_CODE (lhs) == INDIRECT_REF
8178 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8179 preexpand_calls (exp);
8180
8181 /* Check for |= or &= of a bitfield of size one into another bitfield
8182 of size 1. In this case, (unless we need the result of the
8183 assignment) we can do this more efficiently with a
8184 test followed by an assignment, if necessary.
8185
8186 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8187 things change so we do, this code should be enhanced to
8188 support it. */
8189 if (ignore
8190 && TREE_CODE (lhs) == COMPONENT_REF
8191 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8192 || TREE_CODE (rhs) == BIT_AND_EXPR)
8193 && TREE_OPERAND (rhs, 0) == lhs
8194 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8195 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8196 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8197 {
8198 rtx label = gen_label_rtx ();
8199
8200 do_jump (TREE_OPERAND (rhs, 1),
8201 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8202 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8203 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8204 (TREE_CODE (rhs) == BIT_IOR_EXPR
8205 ? integer_one_node
8206 : integer_zero_node)),
8207 0, 0);
8208 do_pending_stack_adjust ();
8209 emit_label (label);
8210 return const0_rtx;
8211 }
8212
8213 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8214 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8215 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8216 TYPE_NONCOPIED_PARTS (lhs_type));
8217
8218 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8219 while (noncopied_parts != 0)
8220 {
8221 expand_assignment (TREE_PURPOSE (noncopied_parts),
8222 TREE_VALUE (noncopied_parts), 0, 0);
8223 noncopied_parts = TREE_CHAIN (noncopied_parts);
8224 }
8225 return temp;
8226 }
8227
8228 case RETURN_EXPR:
8229 if (!TREE_OPERAND (exp, 0))
8230 expand_null_return ();
8231 else
8232 expand_return (TREE_OPERAND (exp, 0));
8233 return const0_rtx;
8234
8235 case PREINCREMENT_EXPR:
8236 case PREDECREMENT_EXPR:
8237 return expand_increment (exp, 0, ignore);
8238
8239 case POSTINCREMENT_EXPR:
8240 case POSTDECREMENT_EXPR:
8241 /* Faster to treat as pre-increment if result is not used. */
8242 return expand_increment (exp, ! ignore, ignore);
8243
8244 case ADDR_EXPR:
8245 /* If nonzero, TEMP will be set to the address of something that might
8246 be a MEM corresponding to a stack slot. */
8247 temp = 0;
8248
8249 /* Are we taking the address of a nested function? */
8250 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8251 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8252 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8253 && ! TREE_STATIC (exp))
8254 {
8255 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8256 op0 = force_operand (op0, target);
8257 }
8258 /* If we are taking the address of something erroneous, just
8259 return a zero. */
8260 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8261 return const0_rtx;
8262 else
8263 {
8264 /* We make sure to pass const0_rtx down if we came in with
8265 ignore set, to avoid doing the cleanups twice for something. */
8266 op0 = expand_expr (TREE_OPERAND (exp, 0),
8267 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8268 (modifier == EXPAND_INITIALIZER
8269 ? modifier : EXPAND_CONST_ADDRESS));
8270
8271 /* If we are going to ignore the result, OP0 will have been set
8272 to const0_rtx, so just return it. Don't get confused and
8273 think we are taking the address of the constant. */
8274 if (ignore)
8275 return op0;
8276
8277 op0 = protect_from_queue (op0, 0);
8278
8279 /* We would like the object in memory. If it is a constant, we can
8280 have it be statically allocated into memory. For a non-constant,
8281 we need to allocate some memory and store the value into it. */
8282
8283 if (CONSTANT_P (op0))
8284 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8285 op0);
8286 else if (GET_CODE (op0) == MEM)
8287 {
8288 mark_temp_addr_taken (op0);
8289 temp = XEXP (op0, 0);
8290 }
8291
8292 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8293 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8294 {
8295 /* If this object is in a register, it must be not
8296 be BLKmode. */
8297 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8298 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8299
8300 mark_temp_addr_taken (memloc);
8301 emit_move_insn (memloc, op0);
8302 op0 = memloc;
8303 }
8304
8305 if (GET_CODE (op0) != MEM)
8306 abort ();
8307
8308 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8309 {
8310 temp = XEXP (op0, 0);
8311 #ifdef POINTERS_EXTEND_UNSIGNED
8312 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8313 && mode == ptr_mode)
8314 temp = convert_memory_address (ptr_mode, temp);
8315 #endif
8316 return temp;
8317 }
8318
8319 op0 = force_operand (XEXP (op0, 0), target);
8320 }
8321
8322 if (flag_force_addr && GET_CODE (op0) != REG)
8323 op0 = force_reg (Pmode, op0);
8324
8325 if (GET_CODE (op0) == REG
8326 && ! REG_USERVAR_P (op0))
8327 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8328
8329 /* If we might have had a temp slot, add an equivalent address
8330 for it. */
8331 if (temp != 0)
8332 update_temp_slot_address (temp, op0);
8333
8334 #ifdef POINTERS_EXTEND_UNSIGNED
8335 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8336 && mode == ptr_mode)
8337 op0 = convert_memory_address (ptr_mode, op0);
8338 #endif
8339
8340 return op0;
8341
8342 case ENTRY_VALUE_EXPR:
8343 abort ();
8344
8345 /* COMPLEX type for Extended Pascal & Fortran */
8346 case COMPLEX_EXPR:
8347 {
8348 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8349 rtx insns;
8350
8351 /* Get the rtx code of the operands. */
8352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8353 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8354
8355 if (! target)
8356 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8357
8358 start_sequence ();
8359
8360 /* Move the real (op0) and imaginary (op1) parts to their location. */
8361 emit_move_insn (gen_realpart (mode, target), op0);
8362 emit_move_insn (gen_imagpart (mode, target), op1);
8363
8364 insns = get_insns ();
8365 end_sequence ();
8366
8367 /* Complex construction should appear as a single unit. */
8368 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8369 each with a separate pseudo as destination.
8370 It's not correct for flow to treat them as a unit. */
8371 if (GET_CODE (target) != CONCAT)
8372 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8373 else
8374 emit_insns (insns);
8375
8376 return target;
8377 }
8378
8379 case REALPART_EXPR:
8380 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8381 return gen_realpart (mode, op0);
8382
8383 case IMAGPART_EXPR:
8384 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8385 return gen_imagpart (mode, op0);
8386
8387 case CONJ_EXPR:
8388 {
8389 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8390 rtx imag_t;
8391 rtx insns;
8392
8393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8394
8395 if (! target)
8396 target = gen_reg_rtx (mode);
8397
8398 start_sequence ();
8399
8400 /* Store the realpart and the negated imagpart to target. */
8401 emit_move_insn (gen_realpart (partmode, target),
8402 gen_realpart (partmode, op0));
8403
8404 imag_t = gen_imagpart (partmode, target);
8405 temp = expand_unop (partmode, neg_optab,
8406 gen_imagpart (partmode, op0), imag_t, 0);
8407 if (temp != imag_t)
8408 emit_move_insn (imag_t, temp);
8409
8410 insns = get_insns ();
8411 end_sequence ();
8412
8413 /* Conjugate should appear as a single unit
8414 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8415 each with a separate pseudo as destination.
8416 It's not correct for flow to treat them as a unit. */
8417 if (GET_CODE (target) != CONCAT)
8418 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8419 else
8420 emit_insns (insns);
8421
8422 return target;
8423 }
8424
8425 case TRY_CATCH_EXPR:
8426 {
8427 tree handler = TREE_OPERAND (exp, 1);
8428
8429 expand_eh_region_start ();
8430
8431 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8432
8433 expand_eh_region_end (handler);
8434
8435 return op0;
8436 }
8437
8438 case TRY_FINALLY_EXPR:
8439 {
8440 tree try_block = TREE_OPERAND (exp, 0);
8441 tree finally_block = TREE_OPERAND (exp, 1);
8442 rtx finally_label = gen_label_rtx ();
8443 rtx done_label = gen_label_rtx ();
8444 rtx return_link = gen_reg_rtx (Pmode);
8445 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8446 (tree) finally_label, (tree) return_link);
8447 TREE_SIDE_EFFECTS (cleanup) = 1;
8448
8449 /* Start a new binding layer that will keep track of all cleanup
8450 actions to be performed. */
8451 expand_start_bindings (2);
8452
8453 target_temp_slot_level = temp_slot_level;
8454
8455 expand_decl_cleanup (NULL_TREE, cleanup);
8456 op0 = expand_expr (try_block, target, tmode, modifier);
8457
8458 preserve_temp_slots (op0);
8459 expand_end_bindings (NULL_TREE, 0, 0);
8460 emit_jump (done_label);
8461 emit_label (finally_label);
8462 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8463 emit_indirect_jump (return_link);
8464 emit_label (done_label);
8465 return op0;
8466 }
8467
8468 case GOTO_SUBROUTINE_EXPR:
8469 {
8470 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8471 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8472 rtx return_address = gen_label_rtx ();
8473 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8474 emit_jump (subr);
8475 emit_label (return_address);
8476 return const0_rtx;
8477 }
8478
8479 case POPDCC_EXPR:
8480 {
8481 rtx dcc = get_dynamic_cleanup_chain ();
8482 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8483 return const0_rtx;
8484 }
8485
8486 case POPDHC_EXPR:
8487 {
8488 rtx dhc = get_dynamic_handler_chain ();
8489 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8490 return const0_rtx;
8491 }
8492
8493 case VA_ARG_EXPR:
8494 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8495
8496 default:
8497 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8498 }
8499
8500 /* Here to do an ordinary binary operator, generating an instruction
8501 from the optab already placed in `this_optab'. */
8502 binop:
8503 preexpand_calls (exp);
8504 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8505 subtarget = 0;
8506 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8507 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8508 binop2:
8509 temp = expand_binop (mode, this_optab, op0, op1, target,
8510 unsignedp, OPTAB_LIB_WIDEN);
8511 if (temp == 0)
8512 abort ();
8513 return temp;
8514 }
8515 \f
8516 /* Similar to expand_expr, except that we don't specify a target, target
8517 mode, or modifier and we return the alignment of the inner type. This is
8518 used in cases where it is not necessary to align the result to the
8519 alignment of its type as long as we know the alignment of the result, for
8520 example for comparisons of BLKmode values. */
8521
8522 static rtx
8523 expand_expr_unaligned (exp, palign)
8524 register tree exp;
8525 unsigned int *palign;
8526 {
8527 register rtx op0;
8528 tree type = TREE_TYPE (exp);
8529 register enum machine_mode mode = TYPE_MODE (type);
8530
8531 /* Default the alignment we return to that of the type. */
8532 *palign = TYPE_ALIGN (type);
8533
8534 /* The only cases in which we do anything special is if the resulting mode
8535 is BLKmode. */
8536 if (mode != BLKmode)
8537 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8538
8539 switch (TREE_CODE (exp))
8540 {
8541 case CONVERT_EXPR:
8542 case NOP_EXPR:
8543 case NON_LVALUE_EXPR:
8544 /* Conversions between BLKmode values don't change the underlying
8545 alignment or value. */
8546 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8547 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8548 break;
8549
8550 case ARRAY_REF:
8551 /* Much of the code for this case is copied directly from expand_expr.
8552 We need to duplicate it here because we will do something different
8553 in the fall-through case, so we need to handle the same exceptions
8554 it does. */
8555 {
8556 tree array = TREE_OPERAND (exp, 0);
8557 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8558 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8559 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8560 HOST_WIDE_INT i;
8561
8562 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8563 abort ();
8564
8565 /* Optimize the special-case of a zero lower bound.
8566
8567 We convert the low_bound to sizetype to avoid some problems
8568 with constant folding. (E.g. suppose the lower bound is 1,
8569 and its mode is QI. Without the conversion, (ARRAY
8570 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8571 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8572
8573 if (! integer_zerop (low_bound))
8574 index = size_diffop (index, convert (sizetype, low_bound));
8575
8576 /* If this is a constant index into a constant array,
8577 just get the value from the array. Handle both the cases when
8578 we have an explicit constructor and when our operand is a variable
8579 that was declared const. */
8580
8581 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8582 && 0 > compare_tree_int (index,
8583 list_length (CONSTRUCTOR_ELTS
8584 (TREE_OPERAND (exp, 0)))))
8585 {
8586 tree elem;
8587
8588 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8589 i = TREE_INT_CST_LOW (index);
8590 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8591 ;
8592
8593 if (elem)
8594 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8595 }
8596
8597 else if (optimize >= 1
8598 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8599 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8600 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8601 {
8602 if (TREE_CODE (index) == INTEGER_CST)
8603 {
8604 tree init = DECL_INITIAL (array);
8605
8606 if (TREE_CODE (init) == CONSTRUCTOR)
8607 {
8608 tree elem;
8609
8610 for (elem = CONSTRUCTOR_ELTS (init);
8611 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8612 elem = TREE_CHAIN (elem))
8613 ;
8614
8615 if (elem)
8616 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8617 palign);
8618 }
8619 }
8620 }
8621 }
8622
8623 /* ... fall through ... */
8624
8625 case COMPONENT_REF:
8626 case BIT_FIELD_REF:
8627 /* If the operand is a CONSTRUCTOR, we can just extract the
8628 appropriate field if it is present. Don't do this if we have
8629 already written the data since we want to refer to that copy
8630 and varasm.c assumes that's what we'll do. */
8631 if (TREE_CODE (exp) != ARRAY_REF
8632 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8633 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8634 {
8635 tree elt;
8636
8637 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8638 elt = TREE_CHAIN (elt))
8639 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8640 /* Note that unlike the case in expand_expr, we know this is
8641 BLKmode and hence not an integer. */
8642 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8643 }
8644
8645 {
8646 enum machine_mode mode1;
8647 HOST_WIDE_INT bitsize, bitpos;
8648 tree offset;
8649 int volatilep = 0;
8650 unsigned int alignment;
8651 int unsignedp;
8652 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8653 &mode1, &unsignedp, &volatilep,
8654 &alignment);
8655
8656 /* If we got back the original object, something is wrong. Perhaps
8657 we are evaluating an expression too early. In any event, don't
8658 infinitely recurse. */
8659 if (tem == exp)
8660 abort ();
8661
8662 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8663
8664 /* If this is a constant, put it into a register if it is a
8665 legitimate constant and OFFSET is 0 and memory if it isn't. */
8666 if (CONSTANT_P (op0))
8667 {
8668 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8669
8670 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8671 && offset == 0)
8672 op0 = force_reg (inner_mode, op0);
8673 else
8674 op0 = validize_mem (force_const_mem (inner_mode, op0));
8675 }
8676
8677 if (offset != 0)
8678 {
8679 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8680
8681 /* If this object is in a register, put it into memory.
8682 This case can't occur in C, but can in Ada if we have
8683 unchecked conversion of an expression from a scalar type to
8684 an array or record type. */
8685 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8686 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8687 {
8688 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8689
8690 mark_temp_addr_taken (memloc);
8691 emit_move_insn (memloc, op0);
8692 op0 = memloc;
8693 }
8694
8695 if (GET_CODE (op0) != MEM)
8696 abort ();
8697
8698 if (GET_MODE (offset_rtx) != ptr_mode)
8699 {
8700 #ifdef POINTERS_EXTEND_UNSIGNED
8701 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8702 #else
8703 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8704 #endif
8705 }
8706
8707 op0 = change_address (op0, VOIDmode,
8708 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8709 force_reg (ptr_mode,
8710 offset_rtx)));
8711 }
8712
8713 /* Don't forget about volatility even if this is a bitfield. */
8714 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8715 {
8716 op0 = copy_rtx (op0);
8717 MEM_VOLATILE_P (op0) = 1;
8718 }
8719
8720 /* Check the access. */
8721 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8722 {
8723 rtx to;
8724 int size;
8725
8726 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8727 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8728
8729 /* Check the access right of the pointer. */
8730 in_check_memory_usage = 1;
8731 if (size > BITS_PER_UNIT)
8732 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8733 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8734 TYPE_MODE (sizetype),
8735 GEN_INT (MEMORY_USE_RO),
8736 TYPE_MODE (integer_type_node));
8737 in_check_memory_usage = 0;
8738 }
8739
8740 /* In cases where an aligned union has an unaligned object
8741 as a field, we might be extracting a BLKmode value from
8742 an integer-mode (e.g., SImode) object. Handle this case
8743 by doing the extract into an object as wide as the field
8744 (which we know to be the width of a basic mode), then
8745 storing into memory, and changing the mode to BLKmode.
8746 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8747 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8748 if (mode1 == VOIDmode
8749 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8750 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8751 && (TYPE_ALIGN (type) > alignment
8752 || bitpos % TYPE_ALIGN (type) != 0)))
8753 {
8754 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8755
8756 if (ext_mode == BLKmode)
8757 {
8758 /* In this case, BITPOS must start at a byte boundary. */
8759 if (GET_CODE (op0) != MEM
8760 || bitpos % BITS_PER_UNIT != 0)
8761 abort ();
8762
8763 op0 = change_address (op0, VOIDmode,
8764 plus_constant (XEXP (op0, 0),
8765 bitpos / BITS_PER_UNIT));
8766 }
8767 else
8768 {
8769 rtx new = assign_stack_temp (ext_mode,
8770 bitsize / BITS_PER_UNIT, 0);
8771
8772 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8773 unsignedp, NULL_RTX, ext_mode,
8774 ext_mode, alignment,
8775 int_size_in_bytes (TREE_TYPE (tem)));
8776
8777 /* If the result is a record type and BITSIZE is narrower than
8778 the mode of OP0, an integral mode, and this is a big endian
8779 machine, we must put the field into the high-order bits. */
8780 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8781 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8782 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8783 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8784 size_int (GET_MODE_BITSIZE
8785 (GET_MODE (op0))
8786 - bitsize),
8787 op0, 1);
8788
8789
8790 emit_move_insn (new, op0);
8791 op0 = copy_rtx (new);
8792 PUT_MODE (op0, BLKmode);
8793 }
8794 }
8795 else
8796 /* Get a reference to just this component. */
8797 op0 = change_address (op0, mode1,
8798 plus_constant (XEXP (op0, 0),
8799 (bitpos / BITS_PER_UNIT)));
8800
8801 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8802
8803 /* Adjust the alignment in case the bit position is not
8804 a multiple of the alignment of the inner object. */
8805 while (bitpos % alignment != 0)
8806 alignment >>= 1;
8807
8808 if (GET_CODE (XEXP (op0, 0)) == REG)
8809 mark_reg_pointer (XEXP (op0, 0), alignment);
8810
8811 MEM_IN_STRUCT_P (op0) = 1;
8812 MEM_VOLATILE_P (op0) |= volatilep;
8813
8814 *palign = alignment;
8815 return op0;
8816 }
8817
8818 default:
8819 break;
8820
8821 }
8822
8823 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8824 }
8825 \f
8826 /* Return the tree node if a ARG corresponds to a string constant or zero
8827 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8828 in bytes within the string that ARG is accessing. The type of the
8829 offset will be `sizetype'. */
8830
8831 tree
8832 string_constant (arg, ptr_offset)
8833 tree arg;
8834 tree *ptr_offset;
8835 {
8836 STRIP_NOPS (arg);
8837
8838 if (TREE_CODE (arg) == ADDR_EXPR
8839 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8840 {
8841 *ptr_offset = size_zero_node;
8842 return TREE_OPERAND (arg, 0);
8843 }
8844 else if (TREE_CODE (arg) == PLUS_EXPR)
8845 {
8846 tree arg0 = TREE_OPERAND (arg, 0);
8847 tree arg1 = TREE_OPERAND (arg, 1);
8848
8849 STRIP_NOPS (arg0);
8850 STRIP_NOPS (arg1);
8851
8852 if (TREE_CODE (arg0) == ADDR_EXPR
8853 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8854 {
8855 *ptr_offset = convert (sizetype, arg1);
8856 return TREE_OPERAND (arg0, 0);
8857 }
8858 else if (TREE_CODE (arg1) == ADDR_EXPR
8859 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8860 {
8861 *ptr_offset = convert (sizetype, arg0);
8862 return TREE_OPERAND (arg1, 0);
8863 }
8864 }
8865
8866 return 0;
8867 }
8868 \f
8869 /* Expand code for a post- or pre- increment or decrement
8870 and return the RTX for the result.
8871 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8872
8873 static rtx
8874 expand_increment (exp, post, ignore)
8875 register tree exp;
8876 int post, ignore;
8877 {
8878 register rtx op0, op1;
8879 register rtx temp, value;
8880 register tree incremented = TREE_OPERAND (exp, 0);
8881 optab this_optab = add_optab;
8882 int icode;
8883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8884 int op0_is_copy = 0;
8885 int single_insn = 0;
8886 /* 1 means we can't store into OP0 directly,
8887 because it is a subreg narrower than a word,
8888 and we don't dare clobber the rest of the word. */
8889 int bad_subreg = 0;
8890
8891 /* Stabilize any component ref that might need to be
8892 evaluated more than once below. */
8893 if (!post
8894 || TREE_CODE (incremented) == BIT_FIELD_REF
8895 || (TREE_CODE (incremented) == COMPONENT_REF
8896 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8897 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8898 incremented = stabilize_reference (incremented);
8899 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8900 ones into save exprs so that they don't accidentally get evaluated
8901 more than once by the code below. */
8902 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8903 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8904 incremented = save_expr (incremented);
8905
8906 /* Compute the operands as RTX.
8907 Note whether OP0 is the actual lvalue or a copy of it:
8908 I believe it is a copy iff it is a register or subreg
8909 and insns were generated in computing it. */
8910
8911 temp = get_last_insn ();
8912 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8913
8914 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8915 in place but instead must do sign- or zero-extension during assignment,
8916 so we copy it into a new register and let the code below use it as
8917 a copy.
8918
8919 Note that we can safely modify this SUBREG since it is know not to be
8920 shared (it was made by the expand_expr call above). */
8921
8922 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8923 {
8924 if (post)
8925 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8926 else
8927 bad_subreg = 1;
8928 }
8929 else if (GET_CODE (op0) == SUBREG
8930 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8931 {
8932 /* We cannot increment this SUBREG in place. If we are
8933 post-incrementing, get a copy of the old value. Otherwise,
8934 just mark that we cannot increment in place. */
8935 if (post)
8936 op0 = copy_to_reg (op0);
8937 else
8938 bad_subreg = 1;
8939 }
8940
8941 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8942 && temp != get_last_insn ());
8943 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8944 EXPAND_MEMORY_USE_BAD);
8945
8946 /* Decide whether incrementing or decrementing. */
8947 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8948 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8949 this_optab = sub_optab;
8950
8951 /* Convert decrement by a constant into a negative increment. */
8952 if (this_optab == sub_optab
8953 && GET_CODE (op1) == CONST_INT)
8954 {
8955 op1 = GEN_INT (- INTVAL (op1));
8956 this_optab = add_optab;
8957 }
8958
8959 /* For a preincrement, see if we can do this with a single instruction. */
8960 if (!post)
8961 {
8962 icode = (int) this_optab->handlers[(int) mode].insn_code;
8963 if (icode != (int) CODE_FOR_nothing
8964 /* Make sure that OP0 is valid for operands 0 and 1
8965 of the insn we want to queue. */
8966 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8967 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8968 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8969 single_insn = 1;
8970 }
8971
8972 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8973 then we cannot just increment OP0. We must therefore contrive to
8974 increment the original value. Then, for postincrement, we can return
8975 OP0 since it is a copy of the old value. For preincrement, expand here
8976 unless we can do it with a single insn.
8977
8978 Likewise if storing directly into OP0 would clobber high bits
8979 we need to preserve (bad_subreg). */
8980 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8981 {
8982 /* This is the easiest way to increment the value wherever it is.
8983 Problems with multiple evaluation of INCREMENTED are prevented
8984 because either (1) it is a component_ref or preincrement,
8985 in which case it was stabilized above, or (2) it is an array_ref
8986 with constant index in an array in a register, which is
8987 safe to reevaluate. */
8988 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8989 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8990 ? MINUS_EXPR : PLUS_EXPR),
8991 TREE_TYPE (exp),
8992 incremented,
8993 TREE_OPERAND (exp, 1));
8994
8995 while (TREE_CODE (incremented) == NOP_EXPR
8996 || TREE_CODE (incremented) == CONVERT_EXPR)
8997 {
8998 newexp = convert (TREE_TYPE (incremented), newexp);
8999 incremented = TREE_OPERAND (incremented, 0);
9000 }
9001
9002 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9003 return post ? op0 : temp;
9004 }
9005
9006 if (post)
9007 {
9008 /* We have a true reference to the value in OP0.
9009 If there is an insn to add or subtract in this mode, queue it.
9010 Queueing the increment insn avoids the register shuffling
9011 that often results if we must increment now and first save
9012 the old value for subsequent use. */
9013
9014 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9015 op0 = stabilize (op0);
9016 #endif
9017
9018 icode = (int) this_optab->handlers[(int) mode].insn_code;
9019 if (icode != (int) CODE_FOR_nothing
9020 /* Make sure that OP0 is valid for operands 0 and 1
9021 of the insn we want to queue. */
9022 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9023 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9024 {
9025 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9026 op1 = force_reg (mode, op1);
9027
9028 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9029 }
9030 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9031 {
9032 rtx addr = (general_operand (XEXP (op0, 0), mode)
9033 ? force_reg (Pmode, XEXP (op0, 0))
9034 : copy_to_reg (XEXP (op0, 0)));
9035 rtx temp, result;
9036
9037 op0 = change_address (op0, VOIDmode, addr);
9038 temp = force_reg (GET_MODE (op0), op0);
9039 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9040 op1 = force_reg (mode, op1);
9041
9042 /* The increment queue is LIFO, thus we have to `queue'
9043 the instructions in reverse order. */
9044 enqueue_insn (op0, gen_move_insn (op0, temp));
9045 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9046 return result;
9047 }
9048 }
9049
9050 /* Preincrement, or we can't increment with one simple insn. */
9051 if (post)
9052 /* Save a copy of the value before inc or dec, to return it later. */
9053 temp = value = copy_to_reg (op0);
9054 else
9055 /* Arrange to return the incremented value. */
9056 /* Copy the rtx because expand_binop will protect from the queue,
9057 and the results of that would be invalid for us to return
9058 if our caller does emit_queue before using our result. */
9059 temp = copy_rtx (value = op0);
9060
9061 /* Increment however we can. */
9062 op1 = expand_binop (mode, this_optab, value, op1,
9063 current_function_check_memory_usage ? NULL_RTX : op0,
9064 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9065 /* Make sure the value is stored into OP0. */
9066 if (op1 != op0)
9067 emit_move_insn (op0, op1);
9068
9069 return temp;
9070 }
9071 \f
9072 /* Expand all function calls contained within EXP, innermost ones first.
9073 But don't look within expressions that have sequence points.
9074 For each CALL_EXPR, record the rtx for its value
9075 in the CALL_EXPR_RTL field. */
9076
9077 static void
9078 preexpand_calls (exp)
9079 tree exp;
9080 {
9081 register int nops, i;
9082 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9083
9084 if (! do_preexpand_calls)
9085 return;
9086
9087 /* Only expressions and references can contain calls. */
9088
9089 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9090 return;
9091
9092 switch (TREE_CODE (exp))
9093 {
9094 case CALL_EXPR:
9095 /* Do nothing if already expanded. */
9096 if (CALL_EXPR_RTL (exp) != 0
9097 /* Do nothing if the call returns a variable-sized object. */
9098 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9099 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9100 /* Do nothing to built-in functions. */
9101 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9102 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9103 == FUNCTION_DECL)
9104 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9105 return;
9106
9107 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9108 return;
9109
9110 case COMPOUND_EXPR:
9111 case COND_EXPR:
9112 case TRUTH_ANDIF_EXPR:
9113 case TRUTH_ORIF_EXPR:
9114 /* If we find one of these, then we can be sure
9115 the adjust will be done for it (since it makes jumps).
9116 Do it now, so that if this is inside an argument
9117 of a function, we don't get the stack adjustment
9118 after some other args have already been pushed. */
9119 do_pending_stack_adjust ();
9120 return;
9121
9122 case BLOCK:
9123 case RTL_EXPR:
9124 case WITH_CLEANUP_EXPR:
9125 case CLEANUP_POINT_EXPR:
9126 case TRY_CATCH_EXPR:
9127 return;
9128
9129 case SAVE_EXPR:
9130 if (SAVE_EXPR_RTL (exp) != 0)
9131 return;
9132
9133 default:
9134 break;
9135 }
9136
9137 nops = tree_code_length[(int) TREE_CODE (exp)];
9138 for (i = 0; i < nops; i++)
9139 if (TREE_OPERAND (exp, i) != 0)
9140 {
9141 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9142 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9143 It doesn't happen before the call is made. */
9144 ;
9145 else
9146 {
9147 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9148 if (type == 'e' || type == '<' || type == '1' || type == '2'
9149 || type == 'r')
9150 preexpand_calls (TREE_OPERAND (exp, i));
9151 }
9152 }
9153 }
9154 \f
9155 /* At the start of a function, record that we have no previously-pushed
9156 arguments waiting to be popped. */
9157
9158 void
9159 init_pending_stack_adjust ()
9160 {
9161 pending_stack_adjust = 0;
9162 }
9163
9164 /* When exiting from function, if safe, clear out any pending stack adjust
9165 so the adjustment won't get done.
9166
9167 Note, if the current function calls alloca, then it must have a
9168 frame pointer regardless of the value of flag_omit_frame_pointer. */
9169
9170 void
9171 clear_pending_stack_adjust ()
9172 {
9173 #ifdef EXIT_IGNORE_STACK
9174 if (optimize > 0
9175 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9176 && EXIT_IGNORE_STACK
9177 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9178 && ! flag_inline_functions)
9179 {
9180 stack_pointer_delta -= pending_stack_adjust,
9181 pending_stack_adjust = 0;
9182 }
9183 #endif
9184 }
9185
9186 /* Pop any previously-pushed arguments that have not been popped yet. */
9187
9188 void
9189 do_pending_stack_adjust ()
9190 {
9191 if (inhibit_defer_pop == 0)
9192 {
9193 if (pending_stack_adjust != 0)
9194 adjust_stack (GEN_INT (pending_stack_adjust));
9195 pending_stack_adjust = 0;
9196 }
9197 }
9198 \f
9199 /* Expand conditional expressions. */
9200
9201 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9202 LABEL is an rtx of code CODE_LABEL, in this function and all the
9203 functions here. */
9204
9205 void
9206 jumpifnot (exp, label)
9207 tree exp;
9208 rtx label;
9209 {
9210 do_jump (exp, label, NULL_RTX);
9211 }
9212
9213 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9214
9215 void
9216 jumpif (exp, label)
9217 tree exp;
9218 rtx label;
9219 {
9220 do_jump (exp, NULL_RTX, label);
9221 }
9222
9223 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9224 the result is zero, or IF_TRUE_LABEL if the result is one.
9225 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9226 meaning fall through in that case.
9227
9228 do_jump always does any pending stack adjust except when it does not
9229 actually perform a jump. An example where there is no jump
9230 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9231
9232 This function is responsible for optimizing cases such as
9233 &&, || and comparison operators in EXP. */
9234
9235 void
9236 do_jump (exp, if_false_label, if_true_label)
9237 tree exp;
9238 rtx if_false_label, if_true_label;
9239 {
9240 register enum tree_code code = TREE_CODE (exp);
9241 /* Some cases need to create a label to jump to
9242 in order to properly fall through.
9243 These cases set DROP_THROUGH_LABEL nonzero. */
9244 rtx drop_through_label = 0;
9245 rtx temp;
9246 int i;
9247 tree type;
9248 enum machine_mode mode;
9249
9250 #ifdef MAX_INTEGER_COMPUTATION_MODE
9251 check_max_integer_computation_mode (exp);
9252 #endif
9253
9254 emit_queue ();
9255
9256 switch (code)
9257 {
9258 case ERROR_MARK:
9259 break;
9260
9261 case INTEGER_CST:
9262 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9263 if (temp)
9264 emit_jump (temp);
9265 break;
9266
9267 #if 0
9268 /* This is not true with #pragma weak */
9269 case ADDR_EXPR:
9270 /* The address of something can never be zero. */
9271 if (if_true_label)
9272 emit_jump (if_true_label);
9273 break;
9274 #endif
9275
9276 case NOP_EXPR:
9277 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9278 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9279 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9280 goto normal;
9281 case CONVERT_EXPR:
9282 /* If we are narrowing the operand, we have to do the compare in the
9283 narrower mode. */
9284 if ((TYPE_PRECISION (TREE_TYPE (exp))
9285 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9286 goto normal;
9287 case NON_LVALUE_EXPR:
9288 case REFERENCE_EXPR:
9289 case ABS_EXPR:
9290 case NEGATE_EXPR:
9291 case LROTATE_EXPR:
9292 case RROTATE_EXPR:
9293 /* These cannot change zero->non-zero or vice versa. */
9294 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9295 break;
9296
9297 case WITH_RECORD_EXPR:
9298 /* Put the object on the placeholder list, recurse through our first
9299 operand, and pop the list. */
9300 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9301 placeholder_list);
9302 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9303 placeholder_list = TREE_CHAIN (placeholder_list);
9304 break;
9305
9306 #if 0
9307 /* This is never less insns than evaluating the PLUS_EXPR followed by
9308 a test and can be longer if the test is eliminated. */
9309 case PLUS_EXPR:
9310 /* Reduce to minus. */
9311 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9312 TREE_OPERAND (exp, 0),
9313 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9314 TREE_OPERAND (exp, 1))));
9315 /* Process as MINUS. */
9316 #endif
9317
9318 case MINUS_EXPR:
9319 /* Non-zero iff operands of minus differ. */
9320 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9321 TREE_OPERAND (exp, 0),
9322 TREE_OPERAND (exp, 1)),
9323 NE, NE, if_false_label, if_true_label);
9324 break;
9325
9326 case BIT_AND_EXPR:
9327 /* If we are AND'ing with a small constant, do this comparison in the
9328 smallest type that fits. If the machine doesn't have comparisons
9329 that small, it will be converted back to the wider comparison.
9330 This helps if we are testing the sign bit of a narrower object.
9331 combine can't do this for us because it can't know whether a
9332 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9333
9334 if (! SLOW_BYTE_ACCESS
9335 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9336 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9337 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9338 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9339 && (type = type_for_mode (mode, 1)) != 0
9340 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9341 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9342 != CODE_FOR_nothing))
9343 {
9344 do_jump (convert (type, exp), if_false_label, if_true_label);
9345 break;
9346 }
9347 goto normal;
9348
9349 case TRUTH_NOT_EXPR:
9350 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9351 break;
9352
9353 case TRUTH_ANDIF_EXPR:
9354 if (if_false_label == 0)
9355 if_false_label = drop_through_label = gen_label_rtx ();
9356 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9357 start_cleanup_deferral ();
9358 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9359 end_cleanup_deferral ();
9360 break;
9361
9362 case TRUTH_ORIF_EXPR:
9363 if (if_true_label == 0)
9364 if_true_label = drop_through_label = gen_label_rtx ();
9365 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9366 start_cleanup_deferral ();
9367 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9368 end_cleanup_deferral ();
9369 break;
9370
9371 case COMPOUND_EXPR:
9372 push_temp_slots ();
9373 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9374 preserve_temp_slots (NULL_RTX);
9375 free_temp_slots ();
9376 pop_temp_slots ();
9377 emit_queue ();
9378 do_pending_stack_adjust ();
9379 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9380 break;
9381
9382 case COMPONENT_REF:
9383 case BIT_FIELD_REF:
9384 case ARRAY_REF:
9385 {
9386 HOST_WIDE_INT bitsize, bitpos;
9387 int unsignedp;
9388 enum machine_mode mode;
9389 tree type;
9390 tree offset;
9391 int volatilep = 0;
9392 unsigned int alignment;
9393
9394 /* Get description of this reference. We don't actually care
9395 about the underlying object here. */
9396 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9397 &unsignedp, &volatilep, &alignment);
9398
9399 type = type_for_size (bitsize, unsignedp);
9400 if (! SLOW_BYTE_ACCESS
9401 && type != 0 && bitsize >= 0
9402 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9403 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9404 != CODE_FOR_nothing))
9405 {
9406 do_jump (convert (type, exp), if_false_label, if_true_label);
9407 break;
9408 }
9409 goto normal;
9410 }
9411
9412 case COND_EXPR:
9413 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9414 if (integer_onep (TREE_OPERAND (exp, 1))
9415 && integer_zerop (TREE_OPERAND (exp, 2)))
9416 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9417
9418 else if (integer_zerop (TREE_OPERAND (exp, 1))
9419 && integer_onep (TREE_OPERAND (exp, 2)))
9420 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9421
9422 else
9423 {
9424 register rtx label1 = gen_label_rtx ();
9425 drop_through_label = gen_label_rtx ();
9426
9427 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9428
9429 start_cleanup_deferral ();
9430 /* Now the THEN-expression. */
9431 do_jump (TREE_OPERAND (exp, 1),
9432 if_false_label ? if_false_label : drop_through_label,
9433 if_true_label ? if_true_label : drop_through_label);
9434 /* In case the do_jump just above never jumps. */
9435 do_pending_stack_adjust ();
9436 emit_label (label1);
9437
9438 /* Now the ELSE-expression. */
9439 do_jump (TREE_OPERAND (exp, 2),
9440 if_false_label ? if_false_label : drop_through_label,
9441 if_true_label ? if_true_label : drop_through_label);
9442 end_cleanup_deferral ();
9443 }
9444 break;
9445
9446 case EQ_EXPR:
9447 {
9448 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9449
9450 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9451 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9452 {
9453 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9454 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9455 do_jump
9456 (fold
9457 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9458 fold (build (EQ_EXPR, TREE_TYPE (exp),
9459 fold (build1 (REALPART_EXPR,
9460 TREE_TYPE (inner_type),
9461 exp0)),
9462 fold (build1 (REALPART_EXPR,
9463 TREE_TYPE (inner_type),
9464 exp1)))),
9465 fold (build (EQ_EXPR, TREE_TYPE (exp),
9466 fold (build1 (IMAGPART_EXPR,
9467 TREE_TYPE (inner_type),
9468 exp0)),
9469 fold (build1 (IMAGPART_EXPR,
9470 TREE_TYPE (inner_type),
9471 exp1)))))),
9472 if_false_label, if_true_label);
9473 }
9474
9475 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9476 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9477
9478 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9479 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9480 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9481 else
9482 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9483 break;
9484 }
9485
9486 case NE_EXPR:
9487 {
9488 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9489
9490 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9491 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9492 {
9493 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9494 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9495 do_jump
9496 (fold
9497 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9498 fold (build (NE_EXPR, TREE_TYPE (exp),
9499 fold (build1 (REALPART_EXPR,
9500 TREE_TYPE (inner_type),
9501 exp0)),
9502 fold (build1 (REALPART_EXPR,
9503 TREE_TYPE (inner_type),
9504 exp1)))),
9505 fold (build (NE_EXPR, TREE_TYPE (exp),
9506 fold (build1 (IMAGPART_EXPR,
9507 TREE_TYPE (inner_type),
9508 exp0)),
9509 fold (build1 (IMAGPART_EXPR,
9510 TREE_TYPE (inner_type),
9511 exp1)))))),
9512 if_false_label, if_true_label);
9513 }
9514
9515 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9516 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9517
9518 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9519 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9520 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9521 else
9522 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9523 break;
9524 }
9525
9526 case LT_EXPR:
9527 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9528 if (GET_MODE_CLASS (mode) == MODE_INT
9529 && ! can_compare_p (LT, mode, ccp_jump))
9530 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9531 else
9532 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9533 break;
9534
9535 case LE_EXPR:
9536 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9537 if (GET_MODE_CLASS (mode) == MODE_INT
9538 && ! can_compare_p (LE, mode, ccp_jump))
9539 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9540 else
9541 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9542 break;
9543
9544 case GT_EXPR:
9545 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9546 if (GET_MODE_CLASS (mode) == MODE_INT
9547 && ! can_compare_p (GT, mode, ccp_jump))
9548 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9549 else
9550 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9551 break;
9552
9553 case GE_EXPR:
9554 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9555 if (GET_MODE_CLASS (mode) == MODE_INT
9556 && ! can_compare_p (GE, mode, ccp_jump))
9557 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9558 else
9559 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9560 break;
9561
9562 case UNORDERED_EXPR:
9563 case ORDERED_EXPR:
9564 {
9565 enum rtx_code cmp, rcmp;
9566 int do_rev;
9567
9568 if (code == UNORDERED_EXPR)
9569 cmp = UNORDERED, rcmp = ORDERED;
9570 else
9571 cmp = ORDERED, rcmp = UNORDERED;
9572 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9573
9574 do_rev = 0;
9575 if (! can_compare_p (cmp, mode, ccp_jump)
9576 && (can_compare_p (rcmp, mode, ccp_jump)
9577 /* If the target doesn't provide either UNORDERED or ORDERED
9578 comparisons, canonicalize on UNORDERED for the library. */
9579 || rcmp == UNORDERED))
9580 do_rev = 1;
9581
9582 if (! do_rev)
9583 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9584 else
9585 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9586 }
9587 break;
9588
9589 {
9590 enum rtx_code rcode1;
9591 enum tree_code tcode2;
9592
9593 case UNLT_EXPR:
9594 rcode1 = UNLT;
9595 tcode2 = LT_EXPR;
9596 goto unordered_bcc;
9597 case UNLE_EXPR:
9598 rcode1 = UNLE;
9599 tcode2 = LE_EXPR;
9600 goto unordered_bcc;
9601 case UNGT_EXPR:
9602 rcode1 = UNGT;
9603 tcode2 = GT_EXPR;
9604 goto unordered_bcc;
9605 case UNGE_EXPR:
9606 rcode1 = UNGE;
9607 tcode2 = GE_EXPR;
9608 goto unordered_bcc;
9609 case UNEQ_EXPR:
9610 rcode1 = UNEQ;
9611 tcode2 = EQ_EXPR;
9612 goto unordered_bcc;
9613
9614 unordered_bcc:
9615 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9616 if (can_compare_p (rcode1, mode, ccp_jump))
9617 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9618 if_true_label);
9619 else
9620 {
9621 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9622 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9623 tree cmp0, cmp1;
9624
9625 /* If the target doesn't support combined unordered
9626 compares, decompose into UNORDERED + comparison. */
9627 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9628 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9629 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9630 do_jump (exp, if_false_label, if_true_label);
9631 }
9632 }
9633 break;
9634
9635 default:
9636 normal:
9637 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9638 #if 0
9639 /* This is not needed any more and causes poor code since it causes
9640 comparisons and tests from non-SI objects to have different code
9641 sequences. */
9642 /* Copy to register to avoid generating bad insns by cse
9643 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9644 if (!cse_not_expected && GET_CODE (temp) == MEM)
9645 temp = copy_to_reg (temp);
9646 #endif
9647 do_pending_stack_adjust ();
9648 /* Do any postincrements in the expression that was tested. */
9649 emit_queue ();
9650
9651 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9652 {
9653 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9654 if (target)
9655 emit_jump (target);
9656 }
9657 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9658 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9659 /* Note swapping the labels gives us not-equal. */
9660 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9661 else if (GET_MODE (temp) != VOIDmode)
9662 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9663 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9664 GET_MODE (temp), NULL_RTX, 0,
9665 if_false_label, if_true_label);
9666 else
9667 abort ();
9668 }
9669
9670 if (drop_through_label)
9671 {
9672 /* If do_jump produces code that might be jumped around,
9673 do any stack adjusts from that code, before the place
9674 where control merges in. */
9675 do_pending_stack_adjust ();
9676 emit_label (drop_through_label);
9677 }
9678 }
9679 \f
9680 /* Given a comparison expression EXP for values too wide to be compared
9681 with one insn, test the comparison and jump to the appropriate label.
9682 The code of EXP is ignored; we always test GT if SWAP is 0,
9683 and LT if SWAP is 1. */
9684
9685 static void
9686 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9687 tree exp;
9688 int swap;
9689 rtx if_false_label, if_true_label;
9690 {
9691 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9692 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9693 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9694 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695
9696 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9697 }
9698
9699 /* Compare OP0 with OP1, word at a time, in mode MODE.
9700 UNSIGNEDP says to do unsigned comparison.
9701 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9702
9703 void
9704 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9705 enum machine_mode mode;
9706 int unsignedp;
9707 rtx op0, op1;
9708 rtx if_false_label, if_true_label;
9709 {
9710 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9711 rtx drop_through_label = 0;
9712 int i;
9713
9714 if (! if_true_label || ! if_false_label)
9715 drop_through_label = gen_label_rtx ();
9716 if (! if_true_label)
9717 if_true_label = drop_through_label;
9718 if (! if_false_label)
9719 if_false_label = drop_through_label;
9720
9721 /* Compare a word at a time, high order first. */
9722 for (i = 0; i < nwords; i++)
9723 {
9724 rtx op0_word, op1_word;
9725
9726 if (WORDS_BIG_ENDIAN)
9727 {
9728 op0_word = operand_subword_force (op0, i, mode);
9729 op1_word = operand_subword_force (op1, i, mode);
9730 }
9731 else
9732 {
9733 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9734 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9735 }
9736
9737 /* All but high-order word must be compared as unsigned. */
9738 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9739 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9740 NULL_RTX, if_true_label);
9741
9742 /* Consider lower words only if these are equal. */
9743 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9744 NULL_RTX, 0, NULL_RTX, if_false_label);
9745 }
9746
9747 if (if_false_label)
9748 emit_jump (if_false_label);
9749 if (drop_through_label)
9750 emit_label (drop_through_label);
9751 }
9752
9753 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9754 with one insn, test the comparison and jump to the appropriate label. */
9755
9756 static void
9757 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9758 tree exp;
9759 rtx if_false_label, if_true_label;
9760 {
9761 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9762 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9763 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9764 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9765 int i;
9766 rtx drop_through_label = 0;
9767
9768 if (! if_false_label)
9769 drop_through_label = if_false_label = gen_label_rtx ();
9770
9771 for (i = 0; i < nwords; i++)
9772 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9773 operand_subword_force (op1, i, mode),
9774 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9775 word_mode, NULL_RTX, 0, if_false_label,
9776 NULL_RTX);
9777
9778 if (if_true_label)
9779 emit_jump (if_true_label);
9780 if (drop_through_label)
9781 emit_label (drop_through_label);
9782 }
9783 \f
9784 /* Jump according to whether OP0 is 0.
9785 We assume that OP0 has an integer mode that is too wide
9786 for the available compare insns. */
9787
9788 void
9789 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9790 rtx op0;
9791 rtx if_false_label, if_true_label;
9792 {
9793 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9794 rtx part;
9795 int i;
9796 rtx drop_through_label = 0;
9797
9798 /* The fastest way of doing this comparison on almost any machine is to
9799 "or" all the words and compare the result. If all have to be loaded
9800 from memory and this is a very wide item, it's possible this may
9801 be slower, but that's highly unlikely. */
9802
9803 part = gen_reg_rtx (word_mode);
9804 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9805 for (i = 1; i < nwords && part != 0; i++)
9806 part = expand_binop (word_mode, ior_optab, part,
9807 operand_subword_force (op0, i, GET_MODE (op0)),
9808 part, 1, OPTAB_WIDEN);
9809
9810 if (part != 0)
9811 {
9812 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9813 NULL_RTX, 0, if_false_label, if_true_label);
9814
9815 return;
9816 }
9817
9818 /* If we couldn't do the "or" simply, do this with a series of compares. */
9819 if (! if_false_label)
9820 drop_through_label = if_false_label = gen_label_rtx ();
9821
9822 for (i = 0; i < nwords; i++)
9823 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9824 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9825 if_false_label, NULL_RTX);
9826
9827 if (if_true_label)
9828 emit_jump (if_true_label);
9829
9830 if (drop_through_label)
9831 emit_label (drop_through_label);
9832 }
9833 \f
9834 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9835 (including code to compute the values to be compared)
9836 and set (CC0) according to the result.
9837 The decision as to signed or unsigned comparison must be made by the caller.
9838
9839 We force a stack adjustment unless there are currently
9840 things pushed on the stack that aren't yet used.
9841
9842 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9843 compared.
9844
9845 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9846 size of MODE should be used. */
9847
9848 rtx
9849 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9850 register rtx op0, op1;
9851 enum rtx_code code;
9852 int unsignedp;
9853 enum machine_mode mode;
9854 rtx size;
9855 unsigned int align;
9856 {
9857 rtx tem;
9858
9859 /* If one operand is constant, make it the second one. Only do this
9860 if the other operand is not constant as well. */
9861
9862 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9863 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9864 {
9865 tem = op0;
9866 op0 = op1;
9867 op1 = tem;
9868 code = swap_condition (code);
9869 }
9870
9871 if (flag_force_mem)
9872 {
9873 op0 = force_not_mem (op0);
9874 op1 = force_not_mem (op1);
9875 }
9876
9877 do_pending_stack_adjust ();
9878
9879 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9880 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9881 return tem;
9882
9883 #if 0
9884 /* There's no need to do this now that combine.c can eliminate lots of
9885 sign extensions. This can be less efficient in certain cases on other
9886 machines. */
9887
9888 /* If this is a signed equality comparison, we can do it as an
9889 unsigned comparison since zero-extension is cheaper than sign
9890 extension and comparisons with zero are done as unsigned. This is
9891 the case even on machines that can do fast sign extension, since
9892 zero-extension is easier to combine with other operations than
9893 sign-extension is. If we are comparing against a constant, we must
9894 convert it to what it would look like unsigned. */
9895 if ((code == EQ || code == NE) && ! unsignedp
9896 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9897 {
9898 if (GET_CODE (op1) == CONST_INT
9899 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9900 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9901 unsignedp = 1;
9902 }
9903 #endif
9904
9905 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9906
9907 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9908 }
9909
9910 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9911 The decision as to signed or unsigned comparison must be made by the caller.
9912
9913 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9914 compared.
9915
9916 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9917 size of MODE should be used. */
9918
9919 void
9920 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9921 if_false_label, if_true_label)
9922 register rtx op0, op1;
9923 enum rtx_code code;
9924 int unsignedp;
9925 enum machine_mode mode;
9926 rtx size;
9927 unsigned int align;
9928 rtx if_false_label, if_true_label;
9929 {
9930 rtx tem;
9931 int dummy_true_label = 0;
9932
9933 /* Reverse the comparison if that is safe and we want to jump if it is
9934 false. */
9935 if (! if_true_label && ! FLOAT_MODE_P (mode))
9936 {
9937 if_true_label = if_false_label;
9938 if_false_label = 0;
9939 code = reverse_condition (code);
9940 }
9941
9942 /* If one operand is constant, make it the second one. Only do this
9943 if the other operand is not constant as well. */
9944
9945 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9946 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9947 {
9948 tem = op0;
9949 op0 = op1;
9950 op1 = tem;
9951 code = swap_condition (code);
9952 }
9953
9954 if (flag_force_mem)
9955 {
9956 op0 = force_not_mem (op0);
9957 op1 = force_not_mem (op1);
9958 }
9959
9960 do_pending_stack_adjust ();
9961
9962 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9963 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9964 {
9965 if (tem == const_true_rtx)
9966 {
9967 if (if_true_label)
9968 emit_jump (if_true_label);
9969 }
9970 else
9971 {
9972 if (if_false_label)
9973 emit_jump (if_false_label);
9974 }
9975 return;
9976 }
9977
9978 #if 0
9979 /* There's no need to do this now that combine.c can eliminate lots of
9980 sign extensions. This can be less efficient in certain cases on other
9981 machines. */
9982
9983 /* If this is a signed equality comparison, we can do it as an
9984 unsigned comparison since zero-extension is cheaper than sign
9985 extension and comparisons with zero are done as unsigned. This is
9986 the case even on machines that can do fast sign extension, since
9987 zero-extension is easier to combine with other operations than
9988 sign-extension is. If we are comparing against a constant, we must
9989 convert it to what it would look like unsigned. */
9990 if ((code == EQ || code == NE) && ! unsignedp
9991 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9992 {
9993 if (GET_CODE (op1) == CONST_INT
9994 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9995 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9996 unsignedp = 1;
9997 }
9998 #endif
9999
10000 if (! if_true_label)
10001 {
10002 dummy_true_label = 1;
10003 if_true_label = gen_label_rtx ();
10004 }
10005
10006 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10007 if_true_label);
10008
10009 if (if_false_label)
10010 emit_jump (if_false_label);
10011 if (dummy_true_label)
10012 emit_label (if_true_label);
10013 }
10014
10015 /* Generate code for a comparison expression EXP (including code to compute
10016 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10017 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10018 generated code will drop through.
10019 SIGNED_CODE should be the rtx operation for this comparison for
10020 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10021
10022 We force a stack adjustment unless there are currently
10023 things pushed on the stack that aren't yet used. */
10024
10025 static void
10026 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10027 if_true_label)
10028 register tree exp;
10029 enum rtx_code signed_code, unsigned_code;
10030 rtx if_false_label, if_true_label;
10031 {
10032 unsigned int align0, align1;
10033 register rtx op0, op1;
10034 register tree type;
10035 register enum machine_mode mode;
10036 int unsignedp;
10037 enum rtx_code code;
10038
10039 /* Don't crash if the comparison was erroneous. */
10040 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10042 return;
10043
10044 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10045 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10046 mode = TYPE_MODE (type);
10047 unsignedp = TREE_UNSIGNED (type);
10048 code = unsignedp ? unsigned_code : signed_code;
10049
10050 #ifdef HAVE_canonicalize_funcptr_for_compare
10051 /* If function pointers need to be "canonicalized" before they can
10052 be reliably compared, then canonicalize them. */
10053 if (HAVE_canonicalize_funcptr_for_compare
10054 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10055 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10056 == FUNCTION_TYPE))
10057 {
10058 rtx new_op0 = gen_reg_rtx (mode);
10059
10060 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10061 op0 = new_op0;
10062 }
10063
10064 if (HAVE_canonicalize_funcptr_for_compare
10065 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10066 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10067 == FUNCTION_TYPE))
10068 {
10069 rtx new_op1 = gen_reg_rtx (mode);
10070
10071 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10072 op1 = new_op1;
10073 }
10074 #endif
10075
10076 /* Do any postincrements in the expression that was tested. */
10077 emit_queue ();
10078
10079 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10080 ((mode == BLKmode)
10081 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10082 MIN (align0, align1),
10083 if_false_label, if_true_label);
10084 }
10085 \f
10086 /* Generate code to calculate EXP using a store-flag instruction
10087 and return an rtx for the result. EXP is either a comparison
10088 or a TRUTH_NOT_EXPR whose operand is a comparison.
10089
10090 If TARGET is nonzero, store the result there if convenient.
10091
10092 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10093 cheap.
10094
10095 Return zero if there is no suitable set-flag instruction
10096 available on this machine.
10097
10098 Once expand_expr has been called on the arguments of the comparison,
10099 we are committed to doing the store flag, since it is not safe to
10100 re-evaluate the expression. We emit the store-flag insn by calling
10101 emit_store_flag, but only expand the arguments if we have a reason
10102 to believe that emit_store_flag will be successful. If we think that
10103 it will, but it isn't, we have to simulate the store-flag with a
10104 set/jump/set sequence. */
10105
10106 static rtx
10107 do_store_flag (exp, target, mode, only_cheap)
10108 tree exp;
10109 rtx target;
10110 enum machine_mode mode;
10111 int only_cheap;
10112 {
10113 enum rtx_code code;
10114 tree arg0, arg1, type;
10115 tree tem;
10116 enum machine_mode operand_mode;
10117 int invert = 0;
10118 int unsignedp;
10119 rtx op0, op1;
10120 enum insn_code icode;
10121 rtx subtarget = target;
10122 rtx result, label;
10123
10124 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10125 result at the end. We can't simply invert the test since it would
10126 have already been inverted if it were valid. This case occurs for
10127 some floating-point comparisons. */
10128
10129 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10130 invert = 1, exp = TREE_OPERAND (exp, 0);
10131
10132 arg0 = TREE_OPERAND (exp, 0);
10133 arg1 = TREE_OPERAND (exp, 1);
10134 type = TREE_TYPE (arg0);
10135 operand_mode = TYPE_MODE (type);
10136 unsignedp = TREE_UNSIGNED (type);
10137
10138 /* We won't bother with BLKmode store-flag operations because it would mean
10139 passing a lot of information to emit_store_flag. */
10140 if (operand_mode == BLKmode)
10141 return 0;
10142
10143 /* We won't bother with store-flag operations involving function pointers
10144 when function pointers must be canonicalized before comparisons. */
10145 #ifdef HAVE_canonicalize_funcptr_for_compare
10146 if (HAVE_canonicalize_funcptr_for_compare
10147 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10148 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10149 == FUNCTION_TYPE))
10150 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10151 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10152 == FUNCTION_TYPE))))
10153 return 0;
10154 #endif
10155
10156 STRIP_NOPS (arg0);
10157 STRIP_NOPS (arg1);
10158
10159 /* Get the rtx comparison code to use. We know that EXP is a comparison
10160 operation of some type. Some comparisons against 1 and -1 can be
10161 converted to comparisons with zero. Do so here so that the tests
10162 below will be aware that we have a comparison with zero. These
10163 tests will not catch constants in the first operand, but constants
10164 are rarely passed as the first operand. */
10165
10166 switch (TREE_CODE (exp))
10167 {
10168 case EQ_EXPR:
10169 code = EQ;
10170 break;
10171 case NE_EXPR:
10172 code = NE;
10173 break;
10174 case LT_EXPR:
10175 if (integer_onep (arg1))
10176 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10177 else
10178 code = unsignedp ? LTU : LT;
10179 break;
10180 case LE_EXPR:
10181 if (! unsignedp && integer_all_onesp (arg1))
10182 arg1 = integer_zero_node, code = LT;
10183 else
10184 code = unsignedp ? LEU : LE;
10185 break;
10186 case GT_EXPR:
10187 if (! unsignedp && integer_all_onesp (arg1))
10188 arg1 = integer_zero_node, code = GE;
10189 else
10190 code = unsignedp ? GTU : GT;
10191 break;
10192 case GE_EXPR:
10193 if (integer_onep (arg1))
10194 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10195 else
10196 code = unsignedp ? GEU : GE;
10197 break;
10198
10199 case UNORDERED_EXPR:
10200 code = UNORDERED;
10201 break;
10202 case ORDERED_EXPR:
10203 code = ORDERED;
10204 break;
10205 case UNLT_EXPR:
10206 code = UNLT;
10207 break;
10208 case UNLE_EXPR:
10209 code = UNLE;
10210 break;
10211 case UNGT_EXPR:
10212 code = UNGT;
10213 break;
10214 case UNGE_EXPR:
10215 code = UNGE;
10216 break;
10217 case UNEQ_EXPR:
10218 code = UNEQ;
10219 break;
10220
10221 default:
10222 abort ();
10223 }
10224
10225 /* Put a constant second. */
10226 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10227 {
10228 tem = arg0; arg0 = arg1; arg1 = tem;
10229 code = swap_condition (code);
10230 }
10231
10232 /* If this is an equality or inequality test of a single bit, we can
10233 do this by shifting the bit being tested to the low-order bit and
10234 masking the result with the constant 1. If the condition was EQ,
10235 we xor it with 1. This does not require an scc insn and is faster
10236 than an scc insn even if we have it. */
10237
10238 if ((code == NE || code == EQ)
10239 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10240 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10241 {
10242 tree inner = TREE_OPERAND (arg0, 0);
10243 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10244 int ops_unsignedp;
10245
10246 /* If INNER is a right shift of a constant and it plus BITNUM does
10247 not overflow, adjust BITNUM and INNER. */
10248
10249 if (TREE_CODE (inner) == RSHIFT_EXPR
10250 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10251 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10252 && bitnum < TYPE_PRECISION (type)
10253 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10254 bitnum - TYPE_PRECISION (type)))
10255 {
10256 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10257 inner = TREE_OPERAND (inner, 0);
10258 }
10259
10260 /* If we are going to be able to omit the AND below, we must do our
10261 operations as unsigned. If we must use the AND, we have a choice.
10262 Normally unsigned is faster, but for some machines signed is. */
10263 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10264 #ifdef LOAD_EXTEND_OP
10265 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10266 #else
10267 : 1
10268 #endif
10269 );
10270
10271 if (! get_subtarget (subtarget)
10272 || GET_MODE (subtarget) != operand_mode
10273 || ! safe_from_p (subtarget, inner, 1))
10274 subtarget = 0;
10275
10276 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10277
10278 if (bitnum != 0)
10279 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10280 size_int (bitnum), subtarget, ops_unsignedp);
10281
10282 if (GET_MODE (op0) != mode)
10283 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10284
10285 if ((code == EQ && ! invert) || (code == NE && invert))
10286 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10287 ops_unsignedp, OPTAB_LIB_WIDEN);
10288
10289 /* Put the AND last so it can combine with more things. */
10290 if (bitnum != TYPE_PRECISION (type) - 1)
10291 op0 = expand_and (op0, const1_rtx, subtarget);
10292
10293 return op0;
10294 }
10295
10296 /* Now see if we are likely to be able to do this. Return if not. */
10297 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10298 return 0;
10299
10300 icode = setcc_gen_code[(int) code];
10301 if (icode == CODE_FOR_nothing
10302 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10303 {
10304 /* We can only do this if it is one of the special cases that
10305 can be handled without an scc insn. */
10306 if ((code == LT && integer_zerop (arg1))
10307 || (! only_cheap && code == GE && integer_zerop (arg1)))
10308 ;
10309 else if (BRANCH_COST >= 0
10310 && ! only_cheap && (code == NE || code == EQ)
10311 && TREE_CODE (type) != REAL_TYPE
10312 && ((abs_optab->handlers[(int) operand_mode].insn_code
10313 != CODE_FOR_nothing)
10314 || (ffs_optab->handlers[(int) operand_mode].insn_code
10315 != CODE_FOR_nothing)))
10316 ;
10317 else
10318 return 0;
10319 }
10320
10321 preexpand_calls (exp);
10322 if (! get_subtarget (target)
10323 || GET_MODE (subtarget) != operand_mode
10324 || ! safe_from_p (subtarget, arg1, 1))
10325 subtarget = 0;
10326
10327 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10328 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10329
10330 if (target == 0)
10331 target = gen_reg_rtx (mode);
10332
10333 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10334 because, if the emit_store_flag does anything it will succeed and
10335 OP0 and OP1 will not be used subsequently. */
10336
10337 result = emit_store_flag (target, code,
10338 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10339 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10340 operand_mode, unsignedp, 1);
10341
10342 if (result)
10343 {
10344 if (invert)
10345 result = expand_binop (mode, xor_optab, result, const1_rtx,
10346 result, 0, OPTAB_LIB_WIDEN);
10347 return result;
10348 }
10349
10350 /* If this failed, we have to do this with set/compare/jump/set code. */
10351 if (GET_CODE (target) != REG
10352 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10353 target = gen_reg_rtx (GET_MODE (target));
10354
10355 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10356 result = compare_from_rtx (op0, op1, code, unsignedp,
10357 operand_mode, NULL_RTX, 0);
10358 if (GET_CODE (result) == CONST_INT)
10359 return (((result == const0_rtx && ! invert)
10360 || (result != const0_rtx && invert))
10361 ? const0_rtx : const1_rtx);
10362
10363 label = gen_label_rtx ();
10364 if (bcc_gen_fctn[(int) code] == 0)
10365 abort ();
10366
10367 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10368 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10369 emit_label (label);
10370
10371 return target;
10372 }
10373 \f
10374 /* Generate a tablejump instruction (used for switch statements). */
10375
10376 #ifdef HAVE_tablejump
10377
10378 /* INDEX is the value being switched on, with the lowest value
10379 in the table already subtracted.
10380 MODE is its expected mode (needed if INDEX is constant).
10381 RANGE is the length of the jump table.
10382 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10383
10384 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10385 index value is out of range. */
10386
10387 void
10388 do_tablejump (index, mode, range, table_label, default_label)
10389 rtx index, range, table_label, default_label;
10390 enum machine_mode mode;
10391 {
10392 register rtx temp, vector;
10393
10394 /* Do an unsigned comparison (in the proper mode) between the index
10395 expression and the value which represents the length of the range.
10396 Since we just finished subtracting the lower bound of the range
10397 from the index expression, this comparison allows us to simultaneously
10398 check that the original index expression value is both greater than
10399 or equal to the minimum value of the range and less than or equal to
10400 the maximum value of the range. */
10401
10402 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10403 0, default_label);
10404
10405 /* If index is in range, it must fit in Pmode.
10406 Convert to Pmode so we can index with it. */
10407 if (mode != Pmode)
10408 index = convert_to_mode (Pmode, index, 1);
10409
10410 /* Don't let a MEM slip thru, because then INDEX that comes
10411 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10412 and break_out_memory_refs will go to work on it and mess it up. */
10413 #ifdef PIC_CASE_VECTOR_ADDRESS
10414 if (flag_pic && GET_CODE (index) != REG)
10415 index = copy_to_mode_reg (Pmode, index);
10416 #endif
10417
10418 /* If flag_force_addr were to affect this address
10419 it could interfere with the tricky assumptions made
10420 about addresses that contain label-refs,
10421 which may be valid only very near the tablejump itself. */
10422 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10423 GET_MODE_SIZE, because this indicates how large insns are. The other
10424 uses should all be Pmode, because they are addresses. This code
10425 could fail if addresses and insns are not the same size. */
10426 index = gen_rtx_PLUS (Pmode,
10427 gen_rtx_MULT (Pmode, index,
10428 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10429 gen_rtx_LABEL_REF (Pmode, table_label));
10430 #ifdef PIC_CASE_VECTOR_ADDRESS
10431 if (flag_pic)
10432 index = PIC_CASE_VECTOR_ADDRESS (index);
10433 else
10434 #endif
10435 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10436 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10437 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10438 RTX_UNCHANGING_P (vector) = 1;
10439 convert_move (temp, vector, 0);
10440
10441 emit_jump_insn (gen_tablejump (temp, table_label));
10442
10443 /* If we are generating PIC code or if the table is PC-relative, the
10444 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10445 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10446 emit_barrier ();
10447 }
10448
10449 #endif /* HAVE_tablejump */