top level:
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 int to_struct;
119 int to_readonly;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 int from_struct;
125 int from_readonly;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147
148 static rtx get_push_address PARAMS ((int));
149
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
156 enum machine_mode,
157 struct clear_by_pieces *));
158 static rtx get_subtarget PARAMS ((rtx));
159 static int is_zeros_p PARAMS ((tree));
160 static int mostly_zeros_p PARAMS ((tree));
161 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
162 HOST_WIDE_INT, enum machine_mode,
163 tree, tree, unsigned int, int));
164 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
165 HOST_WIDE_INT));
166 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
167 HOST_WIDE_INT, enum machine_mode,
168 tree, enum machine_mode, int,
169 unsigned int, HOST_WIDE_INT, int));
170 static enum memory_use_mode
171 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
172 static tree save_noncopied_parts PARAMS ((tree, tree));
173 static tree init_noncopied_parts PARAMS ((tree, tree));
174 static int safe_from_p PARAMS ((rtx, tree, int));
175 static int fixed_type_p PARAMS ((tree));
176 static rtx var_rtx PARAMS ((tree));
177 static int readonly_fields_p PARAMS ((tree));
178 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
179 static rtx expand_increment PARAMS ((tree, int, int));
180 static void preexpand_calls PARAMS ((tree));
181 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
182 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
183 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
184 rtx, rtx));
185 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
186
187 /* Record for each mode whether we can move a register directly to or
188 from an object of that mode in memory. If we can't, we won't try
189 to use that mode directly when accessing a field of that mode. */
190
191 static char direct_load[NUM_MACHINE_MODES];
192 static char direct_store[NUM_MACHINE_MODES];
193
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
196
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
205
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
211 #endif
212
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215
216 /* This array records the insn_code of insns to perform block clears. */
217 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
218
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
223 #endif
224 \f
225 /* This is run once per compilation to set up which modes can be used
226 directly in memory and to initialize the block move optab. */
227
228 void
229 init_expr_once ()
230 {
231 rtx insn, pat;
232 enum machine_mode mode;
233 int num_clobbers;
234 rtx mem, mem1;
235 char *free_point;
236
237 start_sequence ();
238
239 /* Since we are on the permanent obstack, we must be sure we save this
240 spot AFTER we call start_sequence, since it will reuse the rtl it
241 makes. */
242 free_point = (char *) oballoc (0);
243
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
247 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
248 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
252
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
255 {
256 int regno;
257 rtx reg;
258
259 direct_load[(int) mode] = direct_store[(int) mode] = 0;
260 PUT_MODE (mem, mode);
261 PUT_MODE (mem1, mode);
262
263 /* See if there is some register that can be used in this mode and
264 directly loaded or stored from memory. */
265
266 if (mode != VOIDmode && mode != BLKmode)
267 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
268 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
269 regno++)
270 {
271 if (! HARD_REGNO_MODE_OK (regno, mode))
272 continue;
273
274 reg = gen_rtx_REG (mode, regno);
275
276 SET_SRC (pat) = mem;
277 SET_DEST (pat) = reg;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_load[(int) mode] = 1;
280
281 SET_SRC (pat) = mem1;
282 SET_DEST (pat) = reg;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_load[(int) mode] = 1;
285
286 SET_SRC (pat) = reg;
287 SET_DEST (pat) = mem;
288 if (recog (pat, insn, &num_clobbers) >= 0)
289 direct_store[(int) mode] = 1;
290
291 SET_SRC (pat) = reg;
292 SET_DEST (pat) = mem1;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_store[(int) mode] = 1;
295 }
296 }
297
298 end_sequence ();
299 obfree (free_point);
300 }
301
302 /* This is run at the start of compiling a function. */
303
304 void
305 init_expr ()
306 {
307 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
308
309 pending_chain = 0;
310 pending_stack_adjust = 0;
311 stack_pointer_delta = 0;
312 inhibit_defer_pop = 0;
313 saveregs_value = 0;
314 apply_args_value = 0;
315 forced_labels = 0;
316 }
317
318 void
319 mark_expr_status (p)
320 struct expr_status *p;
321 {
322 if (p == NULL)
323 return;
324
325 ggc_mark_rtx (p->x_saveregs_value);
326 ggc_mark_rtx (p->x_apply_args_value);
327 ggc_mark_rtx (p->x_forced_labels);
328 }
329
330 void
331 free_expr_status (f)
332 struct function *f;
333 {
334 free (f->expr);
335 f->expr = NULL;
336 }
337
338 /* Small sanity check that the queue is empty at the end of a function. */
339
340 void
341 finish_expr_for_function ()
342 {
343 if (pending_chain)
344 abort ();
345 }
346 \f
347 /* Manage the queue of increment instructions to be output
348 for POSTINCREMENT_EXPR expressions, etc. */
349
350 /* Queue up to increment (or change) VAR later. BODY says how:
351 BODY should be the same thing you would pass to emit_insn
352 to increment right away. It will go to emit_insn later on.
353
354 The value is a QUEUED expression to be used in place of VAR
355 where you want to guarantee the pre-incrementation value of VAR. */
356
357 static rtx
358 enqueue_insn (var, body)
359 rtx var, body;
360 {
361 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
362 body, pending_chain);
363 return pending_chain;
364 }
365
366 /* Use protect_from_queue to convert a QUEUED expression
367 into something that you can put immediately into an instruction.
368 If the queued incrementation has not happened yet,
369 protect_from_queue returns the variable itself.
370 If the incrementation has happened, protect_from_queue returns a temp
371 that contains a copy of the old value of the variable.
372
373 Any time an rtx which might possibly be a QUEUED is to be put
374 into an instruction, it must be passed through protect_from_queue first.
375 QUEUED expressions are not meaningful in instructions.
376
377 Do not pass a value through protect_from_queue and then hold
378 on to it for a while before putting it in an instruction!
379 If the queue is flushed in between, incorrect code will result. */
380
381 rtx
382 protect_from_queue (x, modify)
383 register rtx x;
384 int modify;
385 {
386 register RTX_CODE code = GET_CODE (x);
387
388 #if 0 /* A QUEUED can hang around after the queue is forced out. */
389 /* Shortcut for most common case. */
390 if (pending_chain == 0)
391 return x;
392 #endif
393
394 if (code != QUEUED)
395 {
396 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
397 use of autoincrement. Make a copy of the contents of the memory
398 location rather than a copy of the address, but not if the value is
399 of mode BLKmode. Don't modify X in place since it might be
400 shared. */
401 if (code == MEM && GET_MODE (x) != BLKmode
402 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
403 {
404 register rtx y = XEXP (x, 0);
405 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
406
407 MEM_COPY_ATTRIBUTES (new, x);
408
409 if (QUEUED_INSN (y))
410 {
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
413 QUEUED_INSN (y));
414 return temp;
415 }
416 return new;
417 }
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
420 if (code == MEM)
421 {
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
424 {
425 x = copy_rtx (x);
426 XEXP (x, 0) = tem;
427 }
428 }
429 else if (code == PLUS || code == MULT)
430 {
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
434 {
435 x = copy_rtx (x);
436 XEXP (x, 0) = new0;
437 XEXP (x, 1) = new1;
438 }
439 }
440 return x;
441 }
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
446 use that copy. */
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
453 QUEUED_INSN (x));
454 return QUEUED_COPY (x);
455 }
456
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
461
462 int
463 queued_subexp_p (x)
464 rtx x;
465 {
466 register enum rtx_code code = GET_CODE (x);
467 switch (code)
468 {
469 case QUEUED:
470 return 1;
471 case MEM:
472 return queued_subexp_p (XEXP (x, 0));
473 case MULT:
474 case PLUS:
475 case MINUS:
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
478 default:
479 return 0;
480 }
481 }
482
483 /* Perform all the pending incrementations. */
484
485 void
486 emit_queue ()
487 {
488 register rtx p;
489 while ((p = pending_chain))
490 {
491 rtx body = QUEUED_BODY (p);
492
493 if (GET_CODE (body) == SEQUENCE)
494 {
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
497 }
498 else
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
501 }
502 }
503 \f
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
508
509 void
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
512 int unsignedp;
513 {
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
518 enum insn_code code;
519 rtx libcall;
520
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
526
527 if (to_real != from_real)
528 abort ();
529
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
532 TO here. */
533
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 abort ();
542
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 {
546 emit_move_insn (to, from);
547 return;
548 }
549
550 if (to_real)
551 {
552 rtx value;
553
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
555 {
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
559 {
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
562 }
563 }
564
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708
709 libcall = (rtx) 0;
710 switch (from_mode)
711 {
712 case SFmode:
713 switch (to_mode)
714 {
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
718
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
722
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
726
727 default:
728 break;
729 }
730 break;
731
732 case DFmode:
733 switch (to_mode)
734 {
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
738
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
742
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
746
747 default:
748 break;
749 }
750 break;
751
752 case XFmode:
753 switch (to_mode)
754 {
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
758
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
762
763 default:
764 break;
765 }
766 break;
767
768 case TFmode:
769 switch (to_mode)
770 {
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
774
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
778
779 default:
780 break;
781 }
782 break;
783
784 default:
785 break;
786 }
787
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
791
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
793 1, from, from_mode);
794 emit_move_insn (to, value);
795 return;
796 }
797
798 /* Now both modes are integers. */
799
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 {
804 rtx insns;
805 rtx lowpart;
806 rtx fill_value;
807 rtx lowfrom;
808 int i;
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
811
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
814 != CODE_FOR_nothing)
815 {
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
823 return;
824 }
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
829 {
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
835 return;
836 }
837
838 /* No special multiword conversion insn; do it by hand. */
839 start_sequence ();
840
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
843
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
846
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
850 else
851 lowpart_mode = from_mode;
852
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
854
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
857
858 /* Compute the value to put in each remaining word. */
859 if (unsignedp)
860 fill_value = const0_rtx;
861 else
862 {
863 #ifdef HAVE_slt
864 if (HAVE_slt
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
867 {
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
869 lowpart_mode, 0, 0);
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
872 }
873 else
874 #endif
875 {
876 fill_value
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
879 NULL_RTX, 0);
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
881 }
882 }
883
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
886 {
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
889
890 if (subword == 0)
891 abort ();
892
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
895 }
896
897 insns = get_insns ();
898 end_sequence ();
899
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
902 return;
903 }
904
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
908 {
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
917 return;
918 }
919
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
922 {
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
925
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
928 {
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_truncqipqi2 */
933 abort ();
934 }
935
936 if (from_mode == PQImode)
937 {
938 if (to_mode != QImode)
939 {
940 from = convert_to_mode (QImode, from, unsignedp);
941 from_mode = QImode;
942 }
943 else
944 {
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
947 {
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
949 return;
950 }
951 #endif /* HAVE_extendpqiqi2 */
952 abort ();
953 }
954 }
955
956 if (to_mode == PSImode)
957 {
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
960
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
963 {
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_truncsipsi2 */
968 abort ();
969 }
970
971 if (from_mode == PSImode)
972 {
973 if (to_mode != SImode)
974 {
975 from = convert_to_mode (SImode, from, unsignedp);
976 from_mode = SImode;
977 }
978 else
979 {
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 register rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (val);
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1366
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1370
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1373 #endif
1374
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1380
1381 void
1382 move_by_pieces (to, from, len, align)
1383 rtx to, from;
1384 int len;
1385 unsigned int align;
1386 {
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1392
1393 data.offset = 0;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1396 data.to = to;
1397 data.from = from;
1398 data.autinc_to
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 data.autinc_from
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1405
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1408 data.reverse
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1411 data.len = len;
1412
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1417
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1423 {
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1428 mode = tmode;
1429
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1431 {
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1435 }
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1441 }
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1445 {
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1447 data.autinc_to = 1;
1448 data.explicit_inc_to = -1;
1449 }
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1451 {
1452 data.to_addr = copy_addr_to_reg (to_addr);
1453 data.autinc_to = 1;
1454 data.explicit_inc_to = 1;
1455 }
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1458 }
1459
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1463
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1466
1467 while (max_size > 1)
1468 {
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1472 mode = tmode;
1473
1474 if (mode == VOIDmode)
1475 break;
1476
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1480
1481 max_size = GET_MODE_SIZE (mode);
1482 }
1483
1484 /* The code above should have handled everything. */
1485 if (data.len > 0)
1486 abort ();
1487 }
1488
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1491
1492 static int
1493 move_by_pieces_ninsns (l, align)
1494 unsigned int l;
1495 unsigned int align;
1496 {
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1499
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1503
1504 while (max_size > 1)
1505 {
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1508
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1512 mode = tmode;
1513
1514 if (mode == VOIDmode)
1515 break;
1516
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 return n_insns;
1525 }
1526
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1530
1531 static void
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1536 {
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1539
1540 while (data->len >= size)
1541 {
1542 if (data->reverse) data->offset -= size;
1543
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1548 data->offset))));
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1551
1552 from1
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1557 data->offset))));
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1560
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1571
1572 if (! data->reverse) data->offset += size;
1573
1574 data->len -= size;
1575 }
1576 }
1577 \f
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1581
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1583 with mode BLKmode.
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1586
1587 Return the address of the new block, if memcpy is called and returns it,
1588 0 otherwise. */
1589
1590 rtx
1591 emit_block_move (x, y, size, align)
1592 rtx x, y;
1593 rtx size;
1594 unsigned int align;
1595 {
1596 rtx retval = 0;
1597 #ifdef TARGET_MEM_FUNCTIONS
1598 static tree fn;
1599 tree call_expr, arg_list;
1600 #endif
1601
1602 if (GET_MODE (x) != BLKmode)
1603 abort ();
1604
1605 if (GET_MODE (y) != BLKmode)
1606 abort ();
1607
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1611
1612 if (GET_CODE (x) != MEM)
1613 abort ();
1614 if (GET_CODE (y) != MEM)
1615 abort ();
1616 if (size == 0)
1617 abort ();
1618
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1621 else
1622 {
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1626
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1629
1630 /* Since this is a move insn, we don't care about volatility. */
1631 volatile_ok = 1;
1632
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1635 {
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1638
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1654 {
1655 rtx op2;
1656 rtx last = get_last_insn ();
1657 rtx pat;
1658
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1663
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1665 if (pat)
1666 {
1667 emit_insn (pat);
1668 volatile_ok = 0;
1669 return 0;
1670 }
1671 else
1672 delete_insns_since (last);
1673 }
1674 }
1675
1676 volatile_ok = 0;
1677
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1679
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1683
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1687
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1691 emit_queue.
1692
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1703 #else
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1707 #endif
1708
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1712
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1715
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1718
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1722 {
1723 tree fntype;
1724
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1739 pop_obstacks ();
1740 }
1741
1742 /* We need to make an argument list for the function call.
1743
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1746 arg_list
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1755
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1761
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1763 #else
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1769 #endif
1770 }
1771
1772 return retval;
1773 }
1774 \f
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1777
1778 void
1779 move_block_to_reg (regno, x, nregs, mode)
1780 int regno;
1781 rtx x;
1782 int nregs;
1783 enum machine_mode mode;
1784 {
1785 int i;
1786 #ifdef HAVE_load_multiple
1787 rtx pat;
1788 rtx last;
1789 #endif
1790
1791 if (nregs == 0)
1792 return;
1793
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1796
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1800 {
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1803 GEN_INT (nregs));
1804 if (pat)
1805 {
1806 emit_insn (pat);
1807 return;
1808 }
1809 else
1810 delete_insns_since (last);
1811 }
1812 #endif
1813
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1817 }
1818
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1822
1823
1824 void
1825 move_block_from_reg (regno, x, nregs, size)
1826 int regno;
1827 rtx x;
1828 int nregs;
1829 int size;
1830 {
1831 int i;
1832 #ifdef HAVE_store_multiple
1833 rtx pat;
1834 rtx last;
1835 #endif
1836 enum machine_mode mode;
1837
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1842 {
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1845 return;
1846 }
1847
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1852 {
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1854 rtx shift;
1855
1856 if (tem == 0)
1857 abort ();
1858
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1864 return;
1865 }
1866
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1870 {
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1873 GEN_INT (nregs));
1874 if (pat)
1875 {
1876 emit_insn (pat);
1877 return;
1878 }
1879 else
1880 delete_insns_since (last);
1881 }
1882 #endif
1883
1884 for (i = 0; i < nregs; i++)
1885 {
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1892 }
1893 }
1894
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1898 SRC in bits. */
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1904 would be needed. */
1905
1906 void
1907 emit_group_load (dst, orig_src, ssize, align)
1908 rtx dst, orig_src;
1909 unsigned int align;
1910 int ssize;
1911 {
1912 rtx *tmps, src;
1913 int start, i;
1914
1915 if (GET_CODE (dst) != PARALLEL)
1916 abort ();
1917
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1921 start = 0;
1922 else
1923 start = 1;
1924
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1926
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1929 src = orig_src;
1930 if (GET_CODE (src) != MEM)
1931 {
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1934 else
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1937 }
1938
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1941 {
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1945 int shift = 0;
1946
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1949 {
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1952 if (bytelen <= 0)
1953 abort ();
1954 }
1955
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1961 {
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1966 bytepos)));
1967 }
1968 else if (GET_CODE (src) == CONCAT)
1969 {
1970 if (bytepos == 0
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1976 else
1977 abort ();
1978 }
1979 else
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1983
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1987 }
1988
1989 emit_queue();
1990
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1994 }
1995
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1999
2000 void
2001 emit_group_store (orig_dst, src, ssize, align)
2002 rtx orig_dst, src;
2003 int ssize;
2004 unsigned int align;
2005 {
2006 rtx *tmps, dst;
2007 int start, i;
2008
2009 if (GET_CODE (src) != PARALLEL)
2010 abort ();
2011
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2015 start = 0;
2016 else
2017 start = 1;
2018
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2020
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2023 {
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2027 }
2028 emit_queue();
2029
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2032 dst = orig_dst;
2033 if (GET_CODE (dst) == PARALLEL)
2034 {
2035 rtx temp;
2036
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2041 return;
2042
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2045 the temporary. */
2046
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2050 return;
2051 }
2052 else if (GET_CODE (dst) != MEM)
2053 {
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2057 }
2058 else if (! MEM_IN_STRUCT_P (dst))
2059 {
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2062
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2065 }
2066
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2073
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2076 {
2077 if (BYTES_BIG_ENDIAN)
2078 {
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2082 }
2083 bytelen = ssize - bytepos;
2084 }
2085
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2093 bytepos)),
2094 tmps[i]);
2095 else
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2098 }
2099
2100 emit_queue();
2101
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2105 }
2106
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2110
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2115
2116 rtx
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2118 rtx tgtblk;
2119 rtx srcreg;
2120 tree type;
2121 {
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2126
2127 if (tgtblk == 0)
2128 {
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2132 }
2133
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2139
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2147
2148 /* Copy the structure BITSIZE bites at a time.
2149
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2152 time. */
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2156 {
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2163
2164 /* We need a new destination operand each time bitpos is on
2165 a word boundary. */
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2177 }
2178
2179 return tgtblk;
2180 }
2181
2182 /* Add a USE expression for REG to the (possibly empty) list pointed
2183 to by CALL_FUSAGE. REG must denote a hard register. */
2184
2185 void
2186 use_reg (call_fusage, reg)
2187 rtx *call_fusage, reg;
2188 {
2189 if (GET_CODE (reg) != REG
2190 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2191 abort();
2192
2193 *call_fusage
2194 = gen_rtx_EXPR_LIST (VOIDmode,
2195 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 }
2197
2198 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2199 starting at REGNO. All of these registers must be hard registers. */
2200
2201 void
2202 use_regs (call_fusage, regno, nregs)
2203 rtx *call_fusage;
2204 int regno;
2205 int nregs;
2206 {
2207 int i;
2208
2209 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2210 abort ();
2211
2212 for (i = 0; i < nregs; i++)
2213 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2214 }
2215
2216 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2217 PARALLEL REGS. This is for calls that pass values in multiple
2218 non-contiguous locations. The Irix 6 ABI has examples of this. */
2219
2220 void
2221 use_group_regs (call_fusage, regs)
2222 rtx *call_fusage;
2223 rtx regs;
2224 {
2225 int i;
2226
2227 for (i = 0; i < XVECLEN (regs, 0); i++)
2228 {
2229 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2230
2231 /* A NULL entry means the parameter goes both on the stack and in
2232 registers. This can also be a MEM for targets that pass values
2233 partially on the stack and partially in registers. */
2234 if (reg != 0 && GET_CODE (reg) == REG)
2235 use_reg (call_fusage, reg);
2236 }
2237 }
2238 \f
2239 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2240 rtx with BLKmode). The caller must pass TO through protect_from_queue
2241 before calling. ALIGN is maximum alignment we can assume. */
2242
2243 static void
2244 clear_by_pieces (to, len, align)
2245 rtx to;
2246 int len;
2247 unsigned int align;
2248 {
2249 struct clear_by_pieces data;
2250 rtx to_addr = XEXP (to, 0);
2251 unsigned int max_size = MOVE_MAX_PIECES + 1;
2252 enum machine_mode mode = VOIDmode, tmode;
2253 enum insn_code icode;
2254
2255 data.offset = 0;
2256 data.to_addr = to_addr;
2257 data.to = to;
2258 data.autinc_to
2259 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2260 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2261
2262 data.explicit_inc_to = 0;
2263 data.reverse
2264 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2265 if (data.reverse) data.offset = len;
2266 data.len = len;
2267
2268 data.to_struct = MEM_IN_STRUCT_P (to);
2269
2270 /* If copying requires more than two move insns,
2271 copy addresses to registers (to make displacements shorter)
2272 and use post-increment if available. */
2273 if (!data.autinc_to
2274 && move_by_pieces_ninsns (len, align) > 2)
2275 {
2276 /* Determine the main mode we'll be using */
2277 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2278 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2279 if (GET_MODE_SIZE (tmode) < max_size)
2280 mode = tmode;
2281
2282 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2283 {
2284 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = -1;
2287 }
2288 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2289 {
2290 data.to_addr = copy_addr_to_reg (to_addr);
2291 data.autinc_to = 1;
2292 data.explicit_inc_to = 1;
2293 }
2294 if (!data.autinc_to && CONSTANT_P (to_addr))
2295 data.to_addr = copy_addr_to_reg (to_addr);
2296 }
2297
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2301
2302 /* First move what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2304
2305 while (max_size > 1)
2306 {
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2310 mode = tmode;
2311
2312 if (mode == VOIDmode)
2313 break;
2314
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2317 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2318
2319 max_size = GET_MODE_SIZE (mode);
2320 }
2321
2322 /* The code above should have handled everything. */
2323 if (data.len != 0)
2324 abort ();
2325 }
2326
2327 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2328 with move instructions for mode MODE. GENFUN is the gen_... function
2329 to make a move insn for that mode. DATA has all the other info. */
2330
2331 static void
2332 clear_by_pieces_1 (genfun, mode, data)
2333 rtx (*genfun) PARAMS ((rtx, ...));
2334 enum machine_mode mode;
2335 struct clear_by_pieces *data;
2336 {
2337 register int size = GET_MODE_SIZE (mode);
2338 register rtx to1;
2339
2340 while (data->len >= size)
2341 {
2342 if (data->reverse) data->offset -= size;
2343
2344 to1 = (data->autinc_to
2345 ? gen_rtx_MEM (mode, data->to_addr)
2346 : copy_rtx (change_address (data->to, mode,
2347 plus_constant (data->to_addr,
2348 data->offset))));
2349 MEM_IN_STRUCT_P (to1) = data->to_struct;
2350
2351 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2352 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2353
2354 emit_insn ((*genfun) (to1, const0_rtx));
2355 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2356 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2357
2358 if (! data->reverse) data->offset += size;
2359
2360 data->len -= size;
2361 }
2362 }
2363 \f
2364 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2365 its length in bytes and ALIGN is the maximum alignment we can is has.
2366
2367 If we call a function that returns the length of the block, return it. */
2368
2369 rtx
2370 clear_storage (object, size, align)
2371 rtx object;
2372 rtx size;
2373 unsigned int align;
2374 {
2375 #ifdef TARGET_MEM_FUNCTIONS
2376 static tree fn;
2377 tree call_expr, arg_list;
2378 #endif
2379 rtx retval = 0;
2380
2381 if (GET_MODE (object) == BLKmode)
2382 {
2383 object = protect_from_queue (object, 1);
2384 size = protect_from_queue (size, 0);
2385
2386 if (GET_CODE (size) == CONST_INT
2387 && MOVE_BY_PIECES_P (INTVAL (size), align))
2388 clear_by_pieces (object, INTVAL (size), align);
2389 else
2390 {
2391 /* Try the most limited insn first, because there's no point
2392 including more than one in the machine description unless
2393 the more limited one has some advantage. */
2394
2395 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2396 enum machine_mode mode;
2397
2398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2399 mode = GET_MODE_WIDER_MODE (mode))
2400 {
2401 enum insn_code code = clrstr_optab[(int) mode];
2402 insn_operand_predicate_fn pred;
2403
2404 if (code != CODE_FOR_nothing
2405 /* We don't need MODE to be narrower than
2406 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2407 the mode mask, as it is returned by the macro, it will
2408 definitely be less than the actual mode mask. */
2409 && ((GET_CODE (size) == CONST_INT
2410 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2411 <= (GET_MODE_MASK (mode) >> 1)))
2412 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2413 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2414 || (*pred) (object, BLKmode))
2415 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2416 || (*pred) (opalign, VOIDmode)))
2417 {
2418 rtx op1;
2419 rtx last = get_last_insn ();
2420 rtx pat;
2421
2422 op1 = convert_to_mode (mode, size, 1);
2423 pred = insn_data[(int) code].operand[1].predicate;
2424 if (pred != 0 && ! (*pred) (op1, mode))
2425 op1 = copy_to_mode_reg (mode, op1);
2426
2427 pat = GEN_FCN ((int) code) (object, op1, opalign);
2428 if (pat)
2429 {
2430 emit_insn (pat);
2431 return 0;
2432 }
2433 else
2434 delete_insns_since (last);
2435 }
2436 }
2437
2438 /* OBJECT or SIZE may have been passed through protect_from_queue.
2439
2440 It is unsafe to save the value generated by protect_from_queue
2441 and reuse it later. Consider what happens if emit_queue is
2442 called before the return value from protect_from_queue is used.
2443
2444 Expansion of the CALL_EXPR below will call emit_queue before
2445 we are finished emitting RTL for argument setup. So if we are
2446 not careful we could get the wrong value for an argument.
2447
2448 To avoid this problem we go ahead and emit code to copy OBJECT
2449 and SIZE into new pseudos. We can then place those new pseudos
2450 into an RTL_EXPR and use them later, even after a call to
2451 emit_queue.
2452
2453 Note this is not strictly needed for library calls since they
2454 do not call emit_queue before loading their arguments. However,
2455 we may need to have library calls call emit_queue in the future
2456 since failing to do so could cause problems for targets which
2457 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2458 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2459
2460 #ifdef TARGET_MEM_FUNCTIONS
2461 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2462 #else
2463 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2464 TREE_UNSIGNED (integer_type_node));
2465 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2466 #endif
2467
2468
2469 #ifdef TARGET_MEM_FUNCTIONS
2470 /* It is incorrect to use the libcall calling conventions to call
2471 memset in this context.
2472
2473 This could be a user call to memset and the user may wish to
2474 examine the return value from memset.
2475
2476 For targets where libcalls and normal calls have different
2477 conventions for returning pointers, we could end up generating
2478 incorrect code.
2479
2480 So instead of using a libcall sequence we build up a suitable
2481 CALL_EXPR and expand the call in the normal fashion. */
2482 if (fn == NULL_TREE)
2483 {
2484 tree fntype;
2485
2486 /* This was copied from except.c, I don't know if all this is
2487 necessary in this context or not. */
2488 fn = get_identifier ("memset");
2489 push_obstacks_nochange ();
2490 end_temporary_allocation ();
2491 fntype = build_pointer_type (void_type_node);
2492 fntype = build_function_type (fntype, NULL_TREE);
2493 fn = build_decl (FUNCTION_DECL, fn, fntype);
2494 ggc_add_tree_root (&fn, 1);
2495 DECL_EXTERNAL (fn) = 1;
2496 TREE_PUBLIC (fn) = 1;
2497 DECL_ARTIFICIAL (fn) = 1;
2498 make_decl_rtl (fn, NULL_PTR, 1);
2499 assemble_external (fn);
2500 pop_obstacks ();
2501 }
2502
2503 /* We need to make an argument list for the function call.
2504
2505 memset has three arguments, the first is a void * addresses, the
2506 second a integer with the initialization value, the last is a
2507 size_t byte count for the copy. */
2508 arg_list
2509 = build_tree_list (NULL_TREE,
2510 make_tree (build_pointer_type (void_type_node),
2511 object));
2512 TREE_CHAIN (arg_list)
2513 = build_tree_list (NULL_TREE,
2514 make_tree (integer_type_node, const0_rtx));
2515 TREE_CHAIN (TREE_CHAIN (arg_list))
2516 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2517 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2518
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR,
2521 build_pointer_type (TREE_TYPE (fn)), fn);
2522 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2523 call_expr, arg_list, NULL_TREE);
2524 TREE_SIDE_EFFECTS (call_expr) = 1;
2525
2526 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2527 #else
2528 emit_library_call (bzero_libfunc, 0,
2529 VOIDmode, 2, object, Pmode, size,
2530 TYPE_MODE (integer_type_node));
2531 #endif
2532 }
2533 }
2534 else
2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2536
2537 return retval;
2538 }
2539
2540 /* Generate code to copy Y into X.
2541 Both Y and X must have the same mode, except that
2542 Y can be a constant with VOIDmode.
2543 This mode cannot be BLKmode; use emit_block_move for that.
2544
2545 Return the last instruction emitted. */
2546
2547 rtx
2548 emit_move_insn (x, y)
2549 rtx x, y;
2550 {
2551 enum machine_mode mode = GET_MODE (x);
2552
2553 x = protect_from_queue (x, 1);
2554 y = protect_from_queue (y, 0);
2555
2556 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2557 abort ();
2558
2559 /* Never force constant_p_rtx to memory. */
2560 if (GET_CODE (y) == CONSTANT_P_RTX)
2561 ;
2562 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2563 y = force_const_mem (mode, y);
2564
2565 /* If X or Y are memory references, verify that their addresses are valid
2566 for the machine. */
2567 if (GET_CODE (x) == MEM
2568 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2569 && ! push_operand (x, GET_MODE (x)))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2572 x = change_address (x, VOIDmode, XEXP (x, 0));
2573
2574 if (GET_CODE (y) == MEM
2575 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2576 || (flag_force_addr
2577 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2578 y = change_address (y, VOIDmode, XEXP (y, 0));
2579
2580 if (mode == BLKmode)
2581 abort ();
2582
2583 return emit_move_insn_1 (x, y);
2584 }
2585
2586 /* Low level part of emit_move_insn.
2587 Called just like emit_move_insn, but assumes X and Y
2588 are basically valid. */
2589
2590 rtx
2591 emit_move_insn_1 (x, y)
2592 rtx x, y;
2593 {
2594 enum machine_mode mode = GET_MODE (x);
2595 enum machine_mode submode;
2596 enum mode_class class = GET_MODE_CLASS (mode);
2597 unsigned int i;
2598
2599 if (mode >= MAX_MACHINE_MODE)
2600 abort ();
2601
2602 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2603 return
2604 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2605
2606 /* Expand complex moves by moving real part and imag part, if possible. */
2607 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2608 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2609 * BITS_PER_UNIT),
2610 (class == MODE_COMPLEX_INT
2611 ? MODE_INT : MODE_FLOAT),
2612 0))
2613 && (mov_optab->handlers[(int) submode].insn_code
2614 != CODE_FOR_nothing))
2615 {
2616 /* Don't split destination if it is a stack push. */
2617 int stack = push_operand (x, GET_MODE (x));
2618
2619 /* If this is a stack, push the highpart first, so it
2620 will be in the argument order.
2621
2622 In that case, change_address is used only to convert
2623 the mode, not to change the address. */
2624 if (stack)
2625 {
2626 /* Note that the real part always precedes the imag part in memory
2627 regardless of machine's endianness. */
2628 #ifdef STACK_GROWS_DOWNWARD
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_imagpart (submode, y)));
2632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2633 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2634 gen_realpart (submode, y)));
2635 #else
2636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2637 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2638 gen_realpart (submode, y)));
2639 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2640 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2641 gen_imagpart (submode, y)));
2642 #endif
2643 }
2644 else
2645 {
2646 rtx realpart_x, realpart_y;
2647 rtx imagpart_x, imagpart_y;
2648
2649 /* If this is a complex value with each part being smaller than a
2650 word, the usual calling sequence will likely pack the pieces into
2651 a single register. Unfortunately, SUBREG of hard registers only
2652 deals in terms of words, so we have a problem converting input
2653 arguments to the CONCAT of two registers that is used elsewhere
2654 for complex values. If this is before reload, we can copy it into
2655 memory and reload. FIXME, we should see about using extract and
2656 insert on integer registers, but complex short and complex char
2657 variables should be rarely used. */
2658 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2659 && (reload_in_progress | reload_completed) == 0)
2660 {
2661 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2662 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2663
2664 if (packed_dest_p || packed_src_p)
2665 {
2666 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2667 ? MODE_FLOAT : MODE_INT);
2668
2669 enum machine_mode reg_mode =
2670 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2671
2672 if (reg_mode != BLKmode)
2673 {
2674 rtx mem = assign_stack_temp (reg_mode,
2675 GET_MODE_SIZE (mode), 0);
2676
2677 rtx cmem = change_address (mem, mode, NULL_RTX);
2678
2679 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2680
2681 if (packed_dest_p)
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2684 emit_move_insn_1 (cmem, y);
2685 return emit_move_insn_1 (sreg, mem);
2686 }
2687 else
2688 {
2689 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2690 emit_move_insn_1 (mem, sreg);
2691 return emit_move_insn_1 (x, cmem);
2692 }
2693 }
2694 }
2695 }
2696
2697 realpart_x = gen_realpart (submode, x);
2698 realpart_y = gen_realpart (submode, y);
2699 imagpart_x = gen_imagpart (submode, x);
2700 imagpart_y = gen_imagpart (submode, y);
2701
2702 /* Show the output dies here. This is necessary for SUBREGs
2703 of pseudos since we cannot track their lifetimes correctly;
2704 hard regs shouldn't appear here except as return values.
2705 We never want to emit such a clobber after reload. */
2706 if (x != y
2707 && ! (reload_in_progress || reload_completed)
2708 && (GET_CODE (realpart_x) == SUBREG
2709 || GET_CODE (imagpart_x) == SUBREG))
2710 {
2711 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2712 }
2713
2714 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2715 (realpart_x, realpart_y));
2716 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2717 (imagpart_x, imagpart_y));
2718 }
2719
2720 return get_last_insn ();
2721 }
2722
2723 /* This will handle any multi-word mode that lacks a move_insn pattern.
2724 However, you will get better code if you define such patterns,
2725 even if they must turn into multiple assembler instructions. */
2726 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2727 {
2728 rtx last_insn = 0;
2729 rtx seq, inner;
2730 int need_clobber;
2731
2732 #ifdef PUSH_ROUNDING
2733
2734 /* If X is a push on the stack, do the push now and replace
2735 X with a reference to the stack pointer. */
2736 if (push_operand (x, GET_MODE (x)))
2737 {
2738 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2739 x = change_address (x, VOIDmode, stack_pointer_rtx);
2740 }
2741 #endif
2742
2743 /* If we are in reload, see if either operand is a MEM whose address
2744 is scheduled for replacement. */
2745 if (reload_in_progress && GET_CODE (x) == MEM
2746 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2747 {
2748 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2749
2750 MEM_COPY_ATTRIBUTES (new, x);
2751 x = new;
2752 }
2753 if (reload_in_progress && GET_CODE (y) == MEM
2754 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2755 {
2756 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2757
2758 MEM_COPY_ATTRIBUTES (new, y);
2759 y = new;
2760 }
2761
2762 start_sequence ();
2763
2764 need_clobber = 0;
2765 for (i = 0;
2766 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2767 i++)
2768 {
2769 rtx xpart = operand_subword (x, i, 1, mode);
2770 rtx ypart = operand_subword (y, i, 1, mode);
2771
2772 /* If we can't get a part of Y, put Y into memory if it is a
2773 constant. Otherwise, force it into a register. If we still
2774 can't get a part of Y, abort. */
2775 if (ypart == 0 && CONSTANT_P (y))
2776 {
2777 y = force_const_mem (mode, y);
2778 ypart = operand_subword (y, i, 1, mode);
2779 }
2780 else if (ypart == 0)
2781 ypart = operand_subword_force (y, i, mode);
2782
2783 if (xpart == 0 || ypart == 0)
2784 abort ();
2785
2786 need_clobber |= (GET_CODE (xpart) == SUBREG);
2787
2788 last_insn = emit_move_insn (xpart, ypart);
2789 }
2790
2791 seq = gen_sequence ();
2792 end_sequence ();
2793
2794 /* Show the output dies here. This is necessary for SUBREGs
2795 of pseudos since we cannot track their lifetimes correctly;
2796 hard regs shouldn't appear here except as return values.
2797 We never want to emit such a clobber after reload. */
2798 if (x != y
2799 && ! (reload_in_progress || reload_completed)
2800 && need_clobber != 0)
2801 {
2802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2803 }
2804
2805 emit_insn (seq);
2806
2807 return last_insn;
2808 }
2809 else
2810 abort ();
2811 }
2812 \f
2813 /* Pushing data onto the stack. */
2814
2815 /* Push a block of length SIZE (perhaps variable)
2816 and return an rtx to address the beginning of the block.
2817 Note that it is not possible for the value returned to be a QUEUED.
2818 The value may be virtual_outgoing_args_rtx.
2819
2820 EXTRA is the number of bytes of padding to push in addition to SIZE.
2821 BELOW nonzero means this padding comes at low addresses;
2822 otherwise, the padding comes at high addresses. */
2823
2824 rtx
2825 push_block (size, extra, below)
2826 rtx size;
2827 int extra, below;
2828 {
2829 register rtx temp;
2830
2831 size = convert_modes (Pmode, ptr_mode, size, 1);
2832 if (CONSTANT_P (size))
2833 anti_adjust_stack (plus_constant (size, extra));
2834 else if (GET_CODE (size) == REG && extra == 0)
2835 anti_adjust_stack (size);
2836 else
2837 {
2838 temp = copy_to_mode_reg (Pmode, size);
2839 if (extra != 0)
2840 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2841 temp, 0, OPTAB_LIB_WIDEN);
2842 anti_adjust_stack (temp);
2843 }
2844
2845 #ifndef STACK_GROWS_DOWNWARD
2846 #ifdef ARGS_GROW_DOWNWARD
2847 if (!ACCUMULATE_OUTGOING_ARGS)
2848 #else
2849 if (0)
2850 #endif
2851 #else
2852 if (1)
2853 #endif
2854 {
2855 /* Return the lowest stack address when STACK or ARGS grow downward and
2856 we are not aaccumulating outgoing arguments (the c4x port uses such
2857 conventions). */
2858 temp = virtual_outgoing_args_rtx;
2859 if (extra != 0 && below)
2860 temp = plus_constant (temp, extra);
2861 }
2862 else
2863 {
2864 if (GET_CODE (size) == CONST_INT)
2865 temp = plus_constant (virtual_outgoing_args_rtx,
2866 - INTVAL (size) - (below ? 0 : extra));
2867 else if (extra != 0 && !below)
2868 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2869 negate_rtx (Pmode, plus_constant (size, extra)));
2870 else
2871 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2872 negate_rtx (Pmode, size));
2873 }
2874
2875 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2876 }
2877
2878 rtx
2879 gen_push_operand ()
2880 {
2881 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2882 }
2883
2884 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2885 block of SIZE bytes. */
2886
2887 static rtx
2888 get_push_address (size)
2889 int size;
2890 {
2891 register rtx temp;
2892
2893 if (STACK_PUSH_CODE == POST_DEC)
2894 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2895 else if (STACK_PUSH_CODE == POST_INC)
2896 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2897 else
2898 temp = stack_pointer_rtx;
2899
2900 return copy_to_reg (temp);
2901 }
2902
2903 /* Generate code to push X onto the stack, assuming it has mode MODE and
2904 type TYPE.
2905 MODE is redundant except when X is a CONST_INT (since they don't
2906 carry mode info).
2907 SIZE is an rtx for the size of data to be copied (in bytes),
2908 needed only if X is BLKmode.
2909
2910 ALIGN is maximum alignment we can assume.
2911
2912 If PARTIAL and REG are both nonzero, then copy that many of the first
2913 words of X into registers starting with REG, and push the rest of X.
2914 The amount of space pushed is decreased by PARTIAL words,
2915 rounded *down* to a multiple of PARM_BOUNDARY.
2916 REG must be a hard register in this case.
2917 If REG is zero but PARTIAL is not, take any all others actions for an
2918 argument partially in registers, but do not actually load any
2919 registers.
2920
2921 EXTRA is the amount in bytes of extra space to leave next to this arg.
2922 This is ignored if an argument block has already been allocated.
2923
2924 On a machine that lacks real push insns, ARGS_ADDR is the address of
2925 the bottom of the argument block for this call. We use indexing off there
2926 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2927 argument block has not been preallocated.
2928
2929 ARGS_SO_FAR is the size of args previously pushed for this call.
2930
2931 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2932 for arguments passed in registers. If nonzero, it will be the number
2933 of bytes required. */
2934
2935 void
2936 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2937 args_addr, args_so_far, reg_parm_stack_space,
2938 alignment_pad)
2939 register rtx x;
2940 enum machine_mode mode;
2941 tree type;
2942 rtx size;
2943 unsigned int align;
2944 int partial;
2945 rtx reg;
2946 int extra;
2947 rtx args_addr;
2948 rtx args_so_far;
2949 int reg_parm_stack_space;
2950 rtx alignment_pad;
2951 {
2952 rtx xinner;
2953 enum direction stack_direction
2954 #ifdef STACK_GROWS_DOWNWARD
2955 = downward;
2956 #else
2957 = upward;
2958 #endif
2959
2960 /* Decide where to pad the argument: `downward' for below,
2961 `upward' for above, or `none' for don't pad it.
2962 Default is below for small data on big-endian machines; else above. */
2963 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2964
2965 /* Invert direction if stack is post-update. */
2966 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2967 if (where_pad != none)
2968 where_pad = (where_pad == downward ? upward : downward);
2969
2970 xinner = x = protect_from_queue (x, 0);
2971
2972 if (mode == BLKmode)
2973 {
2974 /* Copy a block into the stack, entirely or partially. */
2975
2976 register rtx temp;
2977 int used = partial * UNITS_PER_WORD;
2978 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2979 int skip;
2980
2981 if (size == 0)
2982 abort ();
2983
2984 used -= offset;
2985
2986 /* USED is now the # of bytes we need not copy to the stack
2987 because registers will take care of them. */
2988
2989 if (partial != 0)
2990 xinner = change_address (xinner, BLKmode,
2991 plus_constant (XEXP (xinner, 0), used));
2992
2993 /* If the partial register-part of the arg counts in its stack size,
2994 skip the part of stack space corresponding to the registers.
2995 Otherwise, start copying to the beginning of the stack space,
2996 by setting SKIP to 0. */
2997 skip = (reg_parm_stack_space == 0) ? 0 : used;
2998
2999 #ifdef PUSH_ROUNDING
3000 /* Do it with several push insns if that doesn't take lots of insns
3001 and if there is no difficulty with push insns that skip bytes
3002 on the stack for alignment purposes. */
3003 if (args_addr == 0
3004 && PUSH_ARGS
3005 && GET_CODE (size) == CONST_INT
3006 && skip == 0
3007 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3008 /* Here we avoid the case of a structure whose weak alignment
3009 forces many pushes of a small amount of data,
3010 and such small pushes do rounding that causes trouble. */
3011 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3012 || align >= BIGGEST_ALIGNMENT
3013 || PUSH_ROUNDING (align) == align)
3014 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3015 {
3016 /* Push padding now if padding above and stack grows down,
3017 or if padding below and stack grows up.
3018 But if space already allocated, this has already been done. */
3019 if (extra && args_addr == 0
3020 && where_pad != none && where_pad != stack_direction)
3021 anti_adjust_stack (GEN_INT (extra));
3022
3023 stack_pointer_delta += INTVAL (size) - used;
3024 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3025 INTVAL (size) - used, align);
3026
3027 if (current_function_check_memory_usage && ! in_check_memory_usage)
3028 {
3029 rtx temp;
3030
3031 in_check_memory_usage = 1;
3032 temp = get_push_address (INTVAL(size) - used);
3033 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3034 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3035 temp, Pmode,
3036 XEXP (xinner, 0), Pmode,
3037 GEN_INT (INTVAL(size) - used),
3038 TYPE_MODE (sizetype));
3039 else
3040 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3041 temp, Pmode,
3042 GEN_INT (INTVAL(size) - used),
3043 TYPE_MODE (sizetype),
3044 GEN_INT (MEMORY_USE_RW),
3045 TYPE_MODE (integer_type_node));
3046 in_check_memory_usage = 0;
3047 }
3048 }
3049 else
3050 #endif /* PUSH_ROUNDING */
3051 {
3052 /* Otherwise make space on the stack and copy the data
3053 to the address of that space. */
3054
3055 /* Deduct words put into registers from the size we must copy. */
3056 if (partial != 0)
3057 {
3058 if (GET_CODE (size) == CONST_INT)
3059 size = GEN_INT (INTVAL (size) - used);
3060 else
3061 size = expand_binop (GET_MODE (size), sub_optab, size,
3062 GEN_INT (used), NULL_RTX, 0,
3063 OPTAB_LIB_WIDEN);
3064 }
3065
3066 /* Get the address of the stack space.
3067 In this case, we do not deal with EXTRA separately.
3068 A single stack adjust will do. */
3069 if (! args_addr)
3070 {
3071 temp = push_block (size, extra, where_pad == downward);
3072 extra = 0;
3073 }
3074 else if (GET_CODE (args_so_far) == CONST_INT)
3075 temp = memory_address (BLKmode,
3076 plus_constant (args_addr,
3077 skip + INTVAL (args_so_far)));
3078 else
3079 temp = memory_address (BLKmode,
3080 plus_constant (gen_rtx_PLUS (Pmode,
3081 args_addr,
3082 args_so_far),
3083 skip));
3084 if (current_function_check_memory_usage && ! in_check_memory_usage)
3085 {
3086 rtx target;
3087
3088 in_check_memory_usage = 1;
3089 target = copy_to_reg (temp);
3090 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3091 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3092 target, Pmode,
3093 XEXP (xinner, 0), Pmode,
3094 size, TYPE_MODE (sizetype));
3095 else
3096 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3097 target, Pmode,
3098 size, TYPE_MODE (sizetype),
3099 GEN_INT (MEMORY_USE_RW),
3100 TYPE_MODE (integer_type_node));
3101 in_check_memory_usage = 0;
3102 }
3103
3104 /* TEMP is the address of the block. Copy the data there. */
3105 if (GET_CODE (size) == CONST_INT
3106 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3107 {
3108 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3109 INTVAL (size), align);
3110 goto ret;
3111 }
3112 else
3113 {
3114 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3115 enum machine_mode mode;
3116 rtx target = gen_rtx_MEM (BLKmode, temp);
3117
3118 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3119 mode != VOIDmode;
3120 mode = GET_MODE_WIDER_MODE (mode))
3121 {
3122 enum insn_code code = movstr_optab[(int) mode];
3123 insn_operand_predicate_fn pred;
3124
3125 if (code != CODE_FOR_nothing
3126 && ((GET_CODE (size) == CONST_INT
3127 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3128 <= (GET_MODE_MASK (mode) >> 1)))
3129 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3130 && (!(pred = insn_data[(int) code].operand[0].predicate)
3131 || ((*pred) (target, BLKmode)))
3132 && (!(pred = insn_data[(int) code].operand[1].predicate)
3133 || ((*pred) (xinner, BLKmode)))
3134 && (!(pred = insn_data[(int) code].operand[3].predicate)
3135 || ((*pred) (opalign, VOIDmode))))
3136 {
3137 rtx op2 = convert_to_mode (mode, size, 1);
3138 rtx last = get_last_insn ();
3139 rtx pat;
3140
3141 pred = insn_data[(int) code].operand[2].predicate;
3142 if (pred != 0 && ! (*pred) (op2, mode))
3143 op2 = copy_to_mode_reg (mode, op2);
3144
3145 pat = GEN_FCN ((int) code) (target, xinner,
3146 op2, opalign);
3147 if (pat)
3148 {
3149 emit_insn (pat);
3150 goto ret;
3151 }
3152 else
3153 delete_insns_since (last);
3154 }
3155 }
3156 }
3157
3158 if (!ACCUMULATE_OUTGOING_ARGS)
3159 {
3160 /* If the source is referenced relative to the stack pointer,
3161 copy it to another register to stabilize it. We do not need
3162 to do this if we know that we won't be changing sp. */
3163
3164 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3165 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3166 temp = copy_to_reg (temp);
3167 }
3168
3169 /* Make inhibit_defer_pop nonzero around the library call
3170 to force it to pop the bcopy-arguments right away. */
3171 NO_DEFER_POP;
3172 #ifdef TARGET_MEM_FUNCTIONS
3173 emit_library_call (memcpy_libfunc, 0,
3174 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3175 convert_to_mode (TYPE_MODE (sizetype),
3176 size, TREE_UNSIGNED (sizetype)),
3177 TYPE_MODE (sizetype));
3178 #else
3179 emit_library_call (bcopy_libfunc, 0,
3180 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3181 convert_to_mode (TYPE_MODE (integer_type_node),
3182 size,
3183 TREE_UNSIGNED (integer_type_node)),
3184 TYPE_MODE (integer_type_node));
3185 #endif
3186 OK_DEFER_POP;
3187 }
3188 }
3189 else if (partial > 0)
3190 {
3191 /* Scalar partly in registers. */
3192
3193 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3194 int i;
3195 int not_stack;
3196 /* # words of start of argument
3197 that we must make space for but need not store. */
3198 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3199 int args_offset = INTVAL (args_so_far);
3200 int skip;
3201
3202 /* Push padding now if padding above and stack grows down,
3203 or if padding below and stack grows up.
3204 But if space already allocated, this has already been done. */
3205 if (extra && args_addr == 0
3206 && where_pad != none && where_pad != stack_direction)
3207 anti_adjust_stack (GEN_INT (extra));
3208
3209 /* If we make space by pushing it, we might as well push
3210 the real data. Otherwise, we can leave OFFSET nonzero
3211 and leave the space uninitialized. */
3212 if (args_addr == 0)
3213 offset = 0;
3214
3215 /* Now NOT_STACK gets the number of words that we don't need to
3216 allocate on the stack. */
3217 not_stack = partial - offset;
3218
3219 /* If the partial register-part of the arg counts in its stack size,
3220 skip the part of stack space corresponding to the registers.
3221 Otherwise, start copying to the beginning of the stack space,
3222 by setting SKIP to 0. */
3223 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3224
3225 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3226 x = validize_mem (force_const_mem (mode, x));
3227
3228 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3229 SUBREGs of such registers are not allowed. */
3230 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3231 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3232 x = copy_to_reg (x);
3233
3234 /* Loop over all the words allocated on the stack for this arg. */
3235 /* We can do it by words, because any scalar bigger than a word
3236 has a size a multiple of a word. */
3237 #ifndef PUSH_ARGS_REVERSED
3238 for (i = not_stack; i < size; i++)
3239 #else
3240 for (i = size - 1; i >= not_stack; i--)
3241 #endif
3242 if (i >= not_stack + offset)
3243 emit_push_insn (operand_subword_force (x, i, mode),
3244 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3245 0, args_addr,
3246 GEN_INT (args_offset + ((i - not_stack + skip)
3247 * UNITS_PER_WORD)),
3248 reg_parm_stack_space, alignment_pad);
3249 }
3250 else
3251 {
3252 rtx addr;
3253 rtx target = NULL_RTX;
3254
3255 /* Push padding now if padding above and stack grows down,
3256 or if padding below and stack grows up.
3257 But if space already allocated, this has already been done. */
3258 if (extra && args_addr == 0
3259 && where_pad != none && where_pad != stack_direction)
3260 anti_adjust_stack (GEN_INT (extra));
3261
3262 #ifdef PUSH_ROUNDING
3263 if (args_addr == 0 && PUSH_ARGS)
3264 {
3265 addr = gen_push_operand ();
3266 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3267 }
3268 else
3269 #endif
3270 {
3271 if (GET_CODE (args_so_far) == CONST_INT)
3272 addr
3273 = memory_address (mode,
3274 plus_constant (args_addr,
3275 INTVAL (args_so_far)));
3276 else
3277 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3278 args_so_far));
3279 target = addr;
3280 }
3281
3282 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3283
3284 if (current_function_check_memory_usage && ! in_check_memory_usage)
3285 {
3286 in_check_memory_usage = 1;
3287 if (target == 0)
3288 target = get_push_address (GET_MODE_SIZE (mode));
3289
3290 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3291 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3292 target, Pmode,
3293 XEXP (x, 0), Pmode,
3294 GEN_INT (GET_MODE_SIZE (mode)),
3295 TYPE_MODE (sizetype));
3296 else
3297 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3298 target, Pmode,
3299 GEN_INT (GET_MODE_SIZE (mode)),
3300 TYPE_MODE (sizetype),
3301 GEN_INT (MEMORY_USE_RW),
3302 TYPE_MODE (integer_type_node));
3303 in_check_memory_usage = 0;
3304 }
3305 }
3306
3307 ret:
3308 /* If part should go in registers, copy that part
3309 into the appropriate registers. Do this now, at the end,
3310 since mem-to-mem copies above may do function calls. */
3311 if (partial > 0 && reg != 0)
3312 {
3313 /* Handle calls that pass values in multiple non-contiguous locations.
3314 The Irix 6 ABI has examples of this. */
3315 if (GET_CODE (reg) == PARALLEL)
3316 emit_group_load (reg, x, -1, align); /* ??? size? */
3317 else
3318 move_block_to_reg (REGNO (reg), x, partial, mode);
3319 }
3320
3321 if (extra && args_addr == 0 && where_pad == stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3323
3324 if (alignment_pad)
3325 anti_adjust_stack (alignment_pad);
3326 }
3327 \f
3328 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3329 operations. */
3330
3331 static rtx
3332 get_subtarget (x)
3333 rtx x;
3334 {
3335 return ((x == 0
3336 /* Only registers can be subtargets. */
3337 || GET_CODE (x) != REG
3338 /* If the register is readonly, it can't be set more than once. */
3339 || RTX_UNCHANGING_P (x)
3340 /* Don't use hard regs to avoid extending their life. */
3341 || REGNO (x) < FIRST_PSEUDO_REGISTER
3342 /* Avoid subtargets inside loops,
3343 since they hide some invariant expressions. */
3344 || preserve_subexpressions_p ())
3345 ? 0 : x);
3346 }
3347
3348 /* Expand an assignment that stores the value of FROM into TO.
3349 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3350 (This may contain a QUEUED rtx;
3351 if the value is constant, this rtx is a constant.)
3352 Otherwise, the returned value is NULL_RTX.
3353
3354 SUGGEST_REG is no longer actually used.
3355 It used to mean, copy the value through a register
3356 and return that register, if that is possible.
3357 We now use WANT_VALUE to decide whether to do this. */
3358
3359 rtx
3360 expand_assignment (to, from, want_value, suggest_reg)
3361 tree to, from;
3362 int want_value;
3363 int suggest_reg ATTRIBUTE_UNUSED;
3364 {
3365 register rtx to_rtx = 0;
3366 rtx result;
3367
3368 /* Don't crash if the lhs of the assignment was erroneous. */
3369
3370 if (TREE_CODE (to) == ERROR_MARK)
3371 {
3372 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3373 return want_value ? result : NULL_RTX;
3374 }
3375
3376 /* Assignment of a structure component needs special treatment
3377 if the structure component's rtx is not simply a MEM.
3378 Assignment of an array element at a constant index, and assignment of
3379 an array element in an unaligned packed structure field, has the same
3380 problem. */
3381
3382 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3383 || TREE_CODE (to) == ARRAY_REF)
3384 {
3385 enum machine_mode mode1;
3386 HOST_WIDE_INT bitsize, bitpos;
3387 tree offset;
3388 int unsignedp;
3389 int volatilep = 0;
3390 tree tem;
3391 unsigned int alignment;
3392
3393 push_temp_slots ();
3394 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3395 &unsignedp, &volatilep, &alignment);
3396
3397 /* If we are going to use store_bit_field and extract_bit_field,
3398 make sure to_rtx will be safe for multiple use. */
3399
3400 if (mode1 == VOIDmode && want_value)
3401 tem = stabilize_reference (tem);
3402
3403 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3404 if (offset != 0)
3405 {
3406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3407
3408 if (GET_CODE (to_rtx) != MEM)
3409 abort ();
3410
3411 if (GET_MODE (offset_rtx) != ptr_mode)
3412 {
3413 #ifdef POINTERS_EXTEND_UNSIGNED
3414 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3415 #else
3416 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3417 #endif
3418 }
3419
3420 /* A constant address in TO_RTX can have VOIDmode, we must not try
3421 to call force_reg for that case. Avoid that case. */
3422 if (GET_CODE (to_rtx) == MEM
3423 && GET_MODE (to_rtx) == BLKmode
3424 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3425 && bitsize
3426 && (bitpos % bitsize) == 0
3427 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3428 && alignment == GET_MODE_ALIGNMENT (mode1))
3429 {
3430 rtx temp = change_address (to_rtx, mode1,
3431 plus_constant (XEXP (to_rtx, 0),
3432 (bitpos /
3433 BITS_PER_UNIT)));
3434 if (GET_CODE (XEXP (temp, 0)) == REG)
3435 to_rtx = temp;
3436 else
3437 to_rtx = change_address (to_rtx, mode1,
3438 force_reg (GET_MODE (XEXP (temp, 0)),
3439 XEXP (temp, 0)));
3440 bitpos = 0;
3441 }
3442
3443 to_rtx = change_address (to_rtx, VOIDmode,
3444 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3445 force_reg (ptr_mode,
3446 offset_rtx)));
3447 }
3448
3449 if (volatilep)
3450 {
3451 if (GET_CODE (to_rtx) == MEM)
3452 {
3453 /* When the offset is zero, to_rtx is the address of the
3454 structure we are storing into, and hence may be shared.
3455 We must make a new MEM before setting the volatile bit. */
3456 if (offset == 0)
3457 to_rtx = copy_rtx (to_rtx);
3458
3459 MEM_VOLATILE_P (to_rtx) = 1;
3460 }
3461 #if 0 /* This was turned off because, when a field is volatile
3462 in an object which is not volatile, the object may be in a register,
3463 and then we would abort over here. */
3464 else
3465 abort ();
3466 #endif
3467 }
3468
3469 if (TREE_CODE (to) == COMPONENT_REF
3470 && TREE_READONLY (TREE_OPERAND (to, 1)))
3471 {
3472 if (offset == 0)
3473 to_rtx = copy_rtx (to_rtx);
3474
3475 RTX_UNCHANGING_P (to_rtx) = 1;
3476 }
3477
3478 /* Check the access. */
3479 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3480 {
3481 rtx to_addr;
3482 int size;
3483 int best_mode_size;
3484 enum machine_mode best_mode;
3485
3486 best_mode = get_best_mode (bitsize, bitpos,
3487 TYPE_ALIGN (TREE_TYPE (tem)),
3488 mode1, volatilep);
3489 if (best_mode == VOIDmode)
3490 best_mode = QImode;
3491
3492 best_mode_size = GET_MODE_BITSIZE (best_mode);
3493 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3494 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3495 size *= GET_MODE_SIZE (best_mode);
3496
3497 /* Check the access right of the pointer. */
3498 if (size)
3499 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3500 to_addr, Pmode,
3501 GEN_INT (size), TYPE_MODE (sizetype),
3502 GEN_INT (MEMORY_USE_WO),
3503 TYPE_MODE (integer_type_node));
3504 }
3505
3506 /* If this is a varying-length object, we must get the address of
3507 the source and do an explicit block move. */
3508 if (bitsize < 0)
3509 {
3510 unsigned int from_align;
3511 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3512 rtx inner_to_rtx
3513 = change_address (to_rtx, VOIDmode,
3514 plus_constant (XEXP (to_rtx, 0),
3515 bitpos / BITS_PER_UNIT));
3516
3517 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3518 MIN (alignment, from_align));
3519 free_temp_slots ();
3520 pop_temp_slots ();
3521 return to_rtx;
3522 }
3523 else
3524 {
3525 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3526 (want_value
3527 /* Spurious cast for HPUX compiler. */
3528 ? ((enum machine_mode)
3529 TYPE_MODE (TREE_TYPE (to)))
3530 : VOIDmode),
3531 unsignedp,
3532 alignment,
3533 int_size_in_bytes (TREE_TYPE (tem)),
3534 get_alias_set (to));
3535
3536 preserve_temp_slots (result);
3537 free_temp_slots ();
3538 pop_temp_slots ();
3539
3540 /* If the value is meaningful, convert RESULT to the proper mode.
3541 Otherwise, return nothing. */
3542 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3543 TYPE_MODE (TREE_TYPE (from)),
3544 result,
3545 TREE_UNSIGNED (TREE_TYPE (to)))
3546 : NULL_RTX);
3547 }
3548 }
3549
3550 /* If the rhs is a function call and its value is not an aggregate,
3551 call the function before we start to compute the lhs.
3552 This is needed for correct code for cases such as
3553 val = setjmp (buf) on machines where reference to val
3554 requires loading up part of an address in a separate insn.
3555
3556 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3557 since it might be a promoted variable where the zero- or sign- extension
3558 needs to be done. Handling this in the normal way is safe because no
3559 computation is done before the call. */
3560 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3562 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3563 && GET_CODE (DECL_RTL (to)) == REG))
3564 {
3565 rtx value;
3566
3567 push_temp_slots ();
3568 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3569 if (to_rtx == 0)
3570 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3571
3572 /* Handle calls that return values in multiple non-contiguous locations.
3573 The Irix 6 ABI has examples of this. */
3574 if (GET_CODE (to_rtx) == PARALLEL)
3575 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3576 TYPE_ALIGN (TREE_TYPE (from)));
3577 else if (GET_MODE (to_rtx) == BLKmode)
3578 emit_block_move (to_rtx, value, expr_size (from),
3579 TYPE_ALIGN (TREE_TYPE (from)));
3580 else
3581 {
3582 #ifdef POINTERS_EXTEND_UNSIGNED
3583 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3584 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3585 value = convert_memory_address (GET_MODE (to_rtx), value);
3586 #endif
3587 emit_move_insn (to_rtx, value);
3588 }
3589 preserve_temp_slots (to_rtx);
3590 free_temp_slots ();
3591 pop_temp_slots ();
3592 return want_value ? to_rtx : NULL_RTX;
3593 }
3594
3595 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3596 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3597
3598 if (to_rtx == 0)
3599 {
3600 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3601 if (GET_CODE (to_rtx) == MEM)
3602 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3603 }
3604
3605 /* Don't move directly into a return register. */
3606 if (TREE_CODE (to) == RESULT_DECL
3607 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3608 {
3609 rtx temp;
3610
3611 push_temp_slots ();
3612 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3613
3614 if (GET_CODE (to_rtx) == PARALLEL)
3615 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3616 TYPE_ALIGN (TREE_TYPE (from)));
3617 else
3618 emit_move_insn (to_rtx, temp);
3619
3620 preserve_temp_slots (to_rtx);
3621 free_temp_slots ();
3622 pop_temp_slots ();
3623 return want_value ? to_rtx : NULL_RTX;
3624 }
3625
3626 /* In case we are returning the contents of an object which overlaps
3627 the place the value is being stored, use a safe function when copying
3628 a value through a pointer into a structure value return block. */
3629 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3630 && current_function_returns_struct
3631 && !current_function_returns_pcc_struct)
3632 {
3633 rtx from_rtx, size;
3634
3635 push_temp_slots ();
3636 size = expr_size (from);
3637 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3638 EXPAND_MEMORY_USE_DONT);
3639
3640 /* Copy the rights of the bitmap. */
3641 if (current_function_check_memory_usage)
3642 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3643 XEXP (to_rtx, 0), Pmode,
3644 XEXP (from_rtx, 0), Pmode,
3645 convert_to_mode (TYPE_MODE (sizetype),
3646 size, TREE_UNSIGNED (sizetype)),
3647 TYPE_MODE (sizetype));
3648
3649 #ifdef TARGET_MEM_FUNCTIONS
3650 emit_library_call (memcpy_libfunc, 0,
3651 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3652 XEXP (from_rtx, 0), Pmode,
3653 convert_to_mode (TYPE_MODE (sizetype),
3654 size, TREE_UNSIGNED (sizetype)),
3655 TYPE_MODE (sizetype));
3656 #else
3657 emit_library_call (bcopy_libfunc, 0,
3658 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3659 XEXP (to_rtx, 0), Pmode,
3660 convert_to_mode (TYPE_MODE (integer_type_node),
3661 size, TREE_UNSIGNED (integer_type_node)),
3662 TYPE_MODE (integer_type_node));
3663 #endif
3664
3665 preserve_temp_slots (to_rtx);
3666 free_temp_slots ();
3667 pop_temp_slots ();
3668 return want_value ? to_rtx : NULL_RTX;
3669 }
3670
3671 /* Compute FROM and store the value in the rtx we got. */
3672
3673 push_temp_slots ();
3674 result = store_expr (from, to_rtx, want_value);
3675 preserve_temp_slots (result);
3676 free_temp_slots ();
3677 pop_temp_slots ();
3678 return want_value ? result : NULL_RTX;
3679 }
3680
3681 /* Generate code for computing expression EXP,
3682 and storing the value into TARGET.
3683 TARGET may contain a QUEUED rtx.
3684
3685 If WANT_VALUE is nonzero, return a copy of the value
3686 not in TARGET, so that we can be sure to use the proper
3687 value in a containing expression even if TARGET has something
3688 else stored in it. If possible, we copy the value through a pseudo
3689 and return that pseudo. Or, if the value is constant, we try to
3690 return the constant. In some cases, we return a pseudo
3691 copied *from* TARGET.
3692
3693 If the mode is BLKmode then we may return TARGET itself.
3694 It turns out that in BLKmode it doesn't cause a problem.
3695 because C has no operators that could combine two different
3696 assignments into the same BLKmode object with different values
3697 with no sequence point. Will other languages need this to
3698 be more thorough?
3699
3700 If WANT_VALUE is 0, we return NULL, to make sure
3701 to catch quickly any cases where the caller uses the value
3702 and fails to set WANT_VALUE. */
3703
3704 rtx
3705 store_expr (exp, target, want_value)
3706 register tree exp;
3707 register rtx target;
3708 int want_value;
3709 {
3710 register rtx temp;
3711 int dont_return_target = 0;
3712
3713 if (TREE_CODE (exp) == COMPOUND_EXPR)
3714 {
3715 /* Perform first part of compound expression, then assign from second
3716 part. */
3717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3718 emit_queue ();
3719 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3720 }
3721 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3722 {
3723 /* For conditional expression, get safe form of the target. Then
3724 test the condition, doing the appropriate assignment on either
3725 side. This avoids the creation of unnecessary temporaries.
3726 For non-BLKmode, it is more efficient not to do this. */
3727
3728 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3729
3730 emit_queue ();
3731 target = protect_from_queue (target, 1);
3732
3733 do_pending_stack_adjust ();
3734 NO_DEFER_POP;
3735 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3736 start_cleanup_deferral ();
3737 store_expr (TREE_OPERAND (exp, 1), target, 0);
3738 end_cleanup_deferral ();
3739 emit_queue ();
3740 emit_jump_insn (gen_jump (lab2));
3741 emit_barrier ();
3742 emit_label (lab1);
3743 start_cleanup_deferral ();
3744 store_expr (TREE_OPERAND (exp, 2), target, 0);
3745 end_cleanup_deferral ();
3746 emit_queue ();
3747 emit_label (lab2);
3748 OK_DEFER_POP;
3749
3750 return want_value ? target : NULL_RTX;
3751 }
3752 else if (queued_subexp_p (target))
3753 /* If target contains a postincrement, let's not risk
3754 using it as the place to generate the rhs. */
3755 {
3756 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3757 {
3758 /* Expand EXP into a new pseudo. */
3759 temp = gen_reg_rtx (GET_MODE (target));
3760 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3761 }
3762 else
3763 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3764
3765 /* If target is volatile, ANSI requires accessing the value
3766 *from* the target, if it is accessed. So make that happen.
3767 In no case return the target itself. */
3768 if (! MEM_VOLATILE_P (target) && want_value)
3769 dont_return_target = 1;
3770 }
3771 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3772 && GET_MODE (target) != BLKmode)
3773 /* If target is in memory and caller wants value in a register instead,
3774 arrange that. Pass TARGET as target for expand_expr so that,
3775 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3776 We know expand_expr will not use the target in that case.
3777 Don't do this if TARGET is volatile because we are supposed
3778 to write it and then read it. */
3779 {
3780 temp = expand_expr (exp, target, GET_MODE (target), 0);
3781 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3782 temp = copy_to_reg (temp);
3783 dont_return_target = 1;
3784 }
3785 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3786 /* If this is an scalar in a register that is stored in a wider mode
3787 than the declared mode, compute the result into its declared mode
3788 and then convert to the wider mode. Our value is the computed
3789 expression. */
3790 {
3791 /* If we don't want a value, we can do the conversion inside EXP,
3792 which will often result in some optimizations. Do the conversion
3793 in two steps: first change the signedness, if needed, then
3794 the extend. But don't do this if the type of EXP is a subtype
3795 of something else since then the conversion might involve
3796 more than just converting modes. */
3797 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3798 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3799 {
3800 if (TREE_UNSIGNED (TREE_TYPE (exp))
3801 != SUBREG_PROMOTED_UNSIGNED_P (target))
3802 exp
3803 = convert
3804 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3805 TREE_TYPE (exp)),
3806 exp);
3807
3808 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3809 SUBREG_PROMOTED_UNSIGNED_P (target)),
3810 exp);
3811 }
3812
3813 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3814
3815 /* If TEMP is a volatile MEM and we want a result value, make
3816 the access now so it gets done only once. Likewise if
3817 it contains TARGET. */
3818 if (GET_CODE (temp) == MEM && want_value
3819 && (MEM_VOLATILE_P (temp)
3820 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3821 temp = copy_to_reg (temp);
3822
3823 /* If TEMP is a VOIDmode constant, use convert_modes to make
3824 sure that we properly convert it. */
3825 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3826 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3827 TYPE_MODE (TREE_TYPE (exp)), temp,
3828 SUBREG_PROMOTED_UNSIGNED_P (target));
3829
3830 convert_move (SUBREG_REG (target), temp,
3831 SUBREG_PROMOTED_UNSIGNED_P (target));
3832
3833 /* If we promoted a constant, change the mode back down to match
3834 target. Otherwise, the caller might get confused by a result whose
3835 mode is larger than expected. */
3836
3837 if (want_value && GET_MODE (temp) != GET_MODE (target)
3838 && GET_MODE (temp) != VOIDmode)
3839 {
3840 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3841 SUBREG_PROMOTED_VAR_P (temp) = 1;
3842 SUBREG_PROMOTED_UNSIGNED_P (temp)
3843 = SUBREG_PROMOTED_UNSIGNED_P (target);
3844 }
3845
3846 return want_value ? temp : NULL_RTX;
3847 }
3848 else
3849 {
3850 temp = expand_expr (exp, target, GET_MODE (target), 0);
3851 /* Return TARGET if it's a specified hardware register.
3852 If TARGET is a volatile mem ref, either return TARGET
3853 or return a reg copied *from* TARGET; ANSI requires this.
3854
3855 Otherwise, if TEMP is not TARGET, return TEMP
3856 if it is constant (for efficiency),
3857 or if we really want the correct value. */
3858 if (!(target && GET_CODE (target) == REG
3859 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3860 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3861 && ! rtx_equal_p (temp, target)
3862 && (CONSTANT_P (temp) || want_value))
3863 dont_return_target = 1;
3864 }
3865
3866 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3867 the same as that of TARGET, adjust the constant. This is needed, for
3868 example, in case it is a CONST_DOUBLE and we want only a word-sized
3869 value. */
3870 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3871 && TREE_CODE (exp) != ERROR_MARK
3872 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3873 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3874 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3875
3876 if (current_function_check_memory_usage
3877 && GET_CODE (target) == MEM
3878 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3879 {
3880 if (GET_CODE (temp) == MEM)
3881 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3882 XEXP (target, 0), Pmode,
3883 XEXP (temp, 0), Pmode,
3884 expr_size (exp), TYPE_MODE (sizetype));
3885 else
3886 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3887 XEXP (target, 0), Pmode,
3888 expr_size (exp), TYPE_MODE (sizetype),
3889 GEN_INT (MEMORY_USE_WO),
3890 TYPE_MODE (integer_type_node));
3891 }
3892
3893 /* If value was not generated in the target, store it there.
3894 Convert the value to TARGET's type first if nec. */
3895 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3896 one or both of them are volatile memory refs, we have to distinguish
3897 two cases:
3898 - expand_expr has used TARGET. In this case, we must not generate
3899 another copy. This can be detected by TARGET being equal according
3900 to == .
3901 - expand_expr has not used TARGET - that means that the source just
3902 happens to have the same RTX form. Since temp will have been created
3903 by expand_expr, it will compare unequal according to == .
3904 We must generate a copy in this case, to reach the correct number
3905 of volatile memory references. */
3906
3907 if ((! rtx_equal_p (temp, target)
3908 || (temp != target && (side_effects_p (temp)
3909 || side_effects_p (target))))
3910 && TREE_CODE (exp) != ERROR_MARK)
3911 {
3912 target = protect_from_queue (target, 1);
3913 if (GET_MODE (temp) != GET_MODE (target)
3914 && GET_MODE (temp) != VOIDmode)
3915 {
3916 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3917 if (dont_return_target)
3918 {
3919 /* In this case, we will return TEMP,
3920 so make sure it has the proper mode.
3921 But don't forget to store the value into TARGET. */
3922 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3923 emit_move_insn (target, temp);
3924 }
3925 else
3926 convert_move (target, temp, unsignedp);
3927 }
3928
3929 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3930 {
3931 /* Handle copying a string constant into an array.
3932 The string constant may be shorter than the array.
3933 So copy just the string's actual length, and clear the rest. */
3934 rtx size;
3935 rtx addr;
3936
3937 /* Get the size of the data type of the string,
3938 which is actually the size of the target. */
3939 size = expr_size (exp);
3940 if (GET_CODE (size) == CONST_INT
3941 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3942 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3943 else
3944 {
3945 /* Compute the size of the data to copy from the string. */
3946 tree copy_size
3947 = size_binop (MIN_EXPR,
3948 make_tree (sizetype, size),
3949 size_int (TREE_STRING_LENGTH (exp)));
3950 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3951 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3952 VOIDmode, 0);
3953 rtx label = 0;
3954
3955 /* Copy that much. */
3956 emit_block_move (target, temp, copy_size_rtx,
3957 TYPE_ALIGN (TREE_TYPE (exp)));
3958
3959 /* Figure out how much is left in TARGET that we have to clear.
3960 Do all calculations in ptr_mode. */
3961
3962 addr = XEXP (target, 0);
3963 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3964
3965 if (GET_CODE (copy_size_rtx) == CONST_INT)
3966 {
3967 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3968 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3969 align = MIN (align, (BITS_PER_UNIT
3970 * (INTVAL (copy_size_rtx)
3971 & - INTVAL (copy_size_rtx))));
3972 }
3973 else
3974 {
3975 addr = force_reg (ptr_mode, addr);
3976 addr = expand_binop (ptr_mode, add_optab, addr,
3977 copy_size_rtx, NULL_RTX, 0,
3978 OPTAB_LIB_WIDEN);
3979
3980 size = expand_binop (ptr_mode, sub_optab, size,
3981 copy_size_rtx, NULL_RTX, 0,
3982 OPTAB_LIB_WIDEN);
3983
3984 align = BITS_PER_UNIT;
3985 label = gen_label_rtx ();
3986 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3987 GET_MODE (size), 0, 0, label);
3988 }
3989 align = MIN (align, expr_align (copy_size));
3990
3991 if (size != const0_rtx)
3992 {
3993 /* Be sure we can write on ADDR. */
3994 if (current_function_check_memory_usage)
3995 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3996 addr, Pmode,
3997 size, TYPE_MODE (sizetype),
3998 GEN_INT (MEMORY_USE_WO),
3999 TYPE_MODE (integer_type_node));
4000 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
4001 }
4002
4003 if (label)
4004 emit_label (label);
4005 }
4006 }
4007 /* Handle calls that return values in multiple non-contiguous locations.
4008 The Irix 6 ABI has examples of this. */
4009 else if (GET_CODE (target) == PARALLEL)
4010 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4011 TYPE_ALIGN (TREE_TYPE (exp)));
4012 else if (GET_MODE (temp) == BLKmode)
4013 emit_block_move (target, temp, expr_size (exp),
4014 TYPE_ALIGN (TREE_TYPE (exp)));
4015 else
4016 emit_move_insn (target, temp);
4017 }
4018
4019 /* If we don't want a value, return NULL_RTX. */
4020 if (! want_value)
4021 return NULL_RTX;
4022
4023 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4024 ??? The latter test doesn't seem to make sense. */
4025 else if (dont_return_target && GET_CODE (temp) != MEM)
4026 return temp;
4027
4028 /* Return TARGET itself if it is a hard register. */
4029 else if (want_value && GET_MODE (target) != BLKmode
4030 && ! (GET_CODE (target) == REG
4031 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4032 return copy_to_reg (target);
4033
4034 else
4035 return target;
4036 }
4037 \f
4038 /* Return 1 if EXP just contains zeros. */
4039
4040 static int
4041 is_zeros_p (exp)
4042 tree exp;
4043 {
4044 tree elt;
4045
4046 switch (TREE_CODE (exp))
4047 {
4048 case CONVERT_EXPR:
4049 case NOP_EXPR:
4050 case NON_LVALUE_EXPR:
4051 return is_zeros_p (TREE_OPERAND (exp, 0));
4052
4053 case INTEGER_CST:
4054 return integer_zerop (exp);
4055
4056 case COMPLEX_CST:
4057 return
4058 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4059
4060 case REAL_CST:
4061 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4062
4063 case CONSTRUCTOR:
4064 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4065 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4066 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4067 if (! is_zeros_p (TREE_VALUE (elt)))
4068 return 0;
4069
4070 return 1;
4071
4072 default:
4073 return 0;
4074 }
4075 }
4076
4077 /* Return 1 if EXP contains mostly (3/4) zeros. */
4078
4079 static int
4080 mostly_zeros_p (exp)
4081 tree exp;
4082 {
4083 if (TREE_CODE (exp) == CONSTRUCTOR)
4084 {
4085 int elts = 0, zeros = 0;
4086 tree elt = CONSTRUCTOR_ELTS (exp);
4087 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4088 {
4089 /* If there are no ranges of true bits, it is all zero. */
4090 return elt == NULL_TREE;
4091 }
4092 for (; elt; elt = TREE_CHAIN (elt))
4093 {
4094 /* We do not handle the case where the index is a RANGE_EXPR,
4095 so the statistic will be somewhat inaccurate.
4096 We do make a more accurate count in store_constructor itself,
4097 so since this function is only used for nested array elements,
4098 this should be close enough. */
4099 if (mostly_zeros_p (TREE_VALUE (elt)))
4100 zeros++;
4101 elts++;
4102 }
4103
4104 return 4 * zeros >= 3 * elts;
4105 }
4106
4107 return is_zeros_p (exp);
4108 }
4109 \f
4110 /* Helper function for store_constructor.
4111 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4112 TYPE is the type of the CONSTRUCTOR, not the element type.
4113 ALIGN and CLEARED are as for store_constructor.
4114
4115 This provides a recursive shortcut back to store_constructor when it isn't
4116 necessary to go through store_field. This is so that we can pass through
4117 the cleared field to let store_constructor know that we may not have to
4118 clear a substructure if the outer structure has already been cleared. */
4119
4120 static void
4121 store_constructor_field (target, bitsize, bitpos,
4122 mode, exp, type, align, cleared)
4123 rtx target;
4124 unsigned HOST_WIDE_INT bitsize;
4125 HOST_WIDE_INT bitpos;
4126 enum machine_mode mode;
4127 tree exp, type;
4128 unsigned int align;
4129 int cleared;
4130 {
4131 if (TREE_CODE (exp) == CONSTRUCTOR
4132 && bitpos % BITS_PER_UNIT == 0
4133 /* If we have a non-zero bitpos for a register target, then we just
4134 let store_field do the bitfield handling. This is unlikely to
4135 generate unnecessary clear instructions anyways. */
4136 && (bitpos == 0 || GET_CODE (target) == MEM))
4137 {
4138 if (bitpos != 0)
4139 target
4140 = change_address (target,
4141 GET_MODE (target) == BLKmode
4142 || 0 != (bitpos
4143 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4144 ? BLKmode : VOIDmode,
4145 plus_constant (XEXP (target, 0),
4146 bitpos / BITS_PER_UNIT));
4147 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4148 }
4149 else
4150 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4151 int_size_in_bytes (type), 0);
4152 }
4153
4154 /* Store the value of constructor EXP into the rtx TARGET.
4155 TARGET is either a REG or a MEM.
4156 ALIGN is the maximum known alignment for TARGET.
4157 CLEARED is true if TARGET is known to have been zero'd.
4158 SIZE is the number of bytes of TARGET we are allowed to modify: this
4159 may not be the same as the size of EXP if we are assigning to a field
4160 which has been packed to exclude padding bits. */
4161
4162 static void
4163 store_constructor (exp, target, align, cleared, size)
4164 tree exp;
4165 rtx target;
4166 unsigned int align;
4167 int cleared;
4168 HOST_WIDE_INT size;
4169 {
4170 tree type = TREE_TYPE (exp);
4171 #ifdef WORD_REGISTER_OPERATIONS
4172 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4173 #endif
4174
4175 /* We know our target cannot conflict, since safe_from_p has been called. */
4176 #if 0
4177 /* Don't try copying piece by piece into a hard register
4178 since that is vulnerable to being clobbered by EXP.
4179 Instead, construct in a pseudo register and then copy it all. */
4180 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4181 {
4182 rtx temp = gen_reg_rtx (GET_MODE (target));
4183 store_constructor (exp, temp, align, cleared, size);
4184 emit_move_insn (target, temp);
4185 return;
4186 }
4187 #endif
4188
4189 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4190 || TREE_CODE (type) == QUAL_UNION_TYPE)
4191 {
4192 register tree elt;
4193
4194 /* Inform later passes that the whole union value is dead. */
4195 if ((TREE_CODE (type) == UNION_TYPE
4196 || TREE_CODE (type) == QUAL_UNION_TYPE)
4197 && ! cleared)
4198 {
4199 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4200
4201 /* If the constructor is empty, clear the union. */
4202 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4203 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4204 }
4205
4206 /* If we are building a static constructor into a register,
4207 set the initial value as zero so we can fold the value into
4208 a constant. But if more than one register is involved,
4209 this probably loses. */
4210 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4211 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4212 {
4213 if (! cleared)
4214 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4215
4216 cleared = 1;
4217 }
4218
4219 /* If the constructor has fewer fields than the structure
4220 or if we are initializing the structure to mostly zeros,
4221 clear the whole structure first. */
4222 else if (size > 0
4223 && ((list_length (CONSTRUCTOR_ELTS (exp))
4224 != fields_length (type))
4225 || mostly_zeros_p (exp)))
4226 {
4227 if (! cleared)
4228 clear_storage (target, GEN_INT (size), align);
4229
4230 cleared = 1;
4231 }
4232 else if (! cleared)
4233 /* Inform later passes that the old value is dead. */
4234 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4235
4236 /* Store each element of the constructor into
4237 the corresponding field of TARGET. */
4238
4239 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4240 {
4241 register tree field = TREE_PURPOSE (elt);
4242 #ifdef WORD_REGISTER_OPERATIONS
4243 tree value = TREE_VALUE (elt);
4244 #endif
4245 register enum machine_mode mode;
4246 HOST_WIDE_INT bitsize;
4247 HOST_WIDE_INT bitpos = 0;
4248 int unsignedp;
4249 tree offset;
4250 rtx to_rtx = target;
4251
4252 /* Just ignore missing fields.
4253 We cleared the whole structure, above,
4254 if any fields are missing. */
4255 if (field == 0)
4256 continue;
4257
4258 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4259 continue;
4260
4261 if (host_integerp (DECL_SIZE (field), 1))
4262 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4263 else
4264 bitsize = -1;
4265
4266 unsignedp = TREE_UNSIGNED (field);
4267 mode = DECL_MODE (field);
4268 if (DECL_BIT_FIELD (field))
4269 mode = VOIDmode;
4270
4271 offset = DECL_FIELD_OFFSET (field);
4272 if (host_integerp (offset, 0)
4273 && host_integerp (bit_position (field), 0))
4274 {
4275 bitpos = int_bit_position (field);
4276 offset = 0;
4277 }
4278 else
4279 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4280
4281 if (offset)
4282 {
4283 rtx offset_rtx;
4284
4285 if (contains_placeholder_p (offset))
4286 offset = build (WITH_RECORD_EXPR, sizetype,
4287 offset, make_tree (TREE_TYPE (exp), target));
4288
4289 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4290 if (GET_CODE (to_rtx) != MEM)
4291 abort ();
4292
4293 if (GET_MODE (offset_rtx) != ptr_mode)
4294 {
4295 #ifdef POINTERS_EXTEND_UNSIGNED
4296 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4297 #else
4298 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4299 #endif
4300 }
4301
4302 to_rtx
4303 = change_address (to_rtx, VOIDmode,
4304 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4305 force_reg (ptr_mode,
4306 offset_rtx)));
4307 align = DECL_OFFSET_ALIGN (field);
4308 }
4309
4310 if (TREE_READONLY (field))
4311 {
4312 if (GET_CODE (to_rtx) == MEM)
4313 to_rtx = copy_rtx (to_rtx);
4314
4315 RTX_UNCHANGING_P (to_rtx) = 1;
4316 }
4317
4318 #ifdef WORD_REGISTER_OPERATIONS
4319 /* If this initializes a field that is smaller than a word, at the
4320 start of a word, try to widen it to a full word.
4321 This special case allows us to output C++ member function
4322 initializations in a form that the optimizers can understand. */
4323 if (GET_CODE (target) == REG
4324 && bitsize < BITS_PER_WORD
4325 && bitpos % BITS_PER_WORD == 0
4326 && GET_MODE_CLASS (mode) == MODE_INT
4327 && TREE_CODE (value) == INTEGER_CST
4328 && exp_size >= 0
4329 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4330 {
4331 tree type = TREE_TYPE (value);
4332 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4333 {
4334 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4335 value = convert (type, value);
4336 }
4337 if (BYTES_BIG_ENDIAN)
4338 value
4339 = fold (build (LSHIFT_EXPR, type, value,
4340 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4341 bitsize = BITS_PER_WORD;
4342 mode = word_mode;
4343 }
4344 #endif
4345 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4346 TREE_VALUE (elt), type, align, cleared);
4347 }
4348 }
4349 else if (TREE_CODE (type) == ARRAY_TYPE)
4350 {
4351 register tree elt;
4352 register int i;
4353 int need_to_clear;
4354 tree domain = TYPE_DOMAIN (type);
4355 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4356 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4357 tree elttype = TREE_TYPE (type);
4358
4359 /* If the constructor has fewer elements than the array,
4360 clear the whole array first. Similarly if this is
4361 static constructor of a non-BLKmode object. */
4362 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4363 need_to_clear = 1;
4364 else
4365 {
4366 HOST_WIDE_INT count = 0, zero_count = 0;
4367 need_to_clear = 0;
4368 /* This loop is a more accurate version of the loop in
4369 mostly_zeros_p (it handles RANGE_EXPR in an index).
4370 It is also needed to check for missing elements. */
4371 for (elt = CONSTRUCTOR_ELTS (exp);
4372 elt != NULL_TREE;
4373 elt = TREE_CHAIN (elt))
4374 {
4375 tree index = TREE_PURPOSE (elt);
4376 HOST_WIDE_INT this_node_count;
4377
4378 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4379 {
4380 tree lo_index = TREE_OPERAND (index, 0);
4381 tree hi_index = TREE_OPERAND (index, 1);
4382
4383 if (! host_integerp (lo_index, 1)
4384 || ! host_integerp (hi_index, 1))
4385 {
4386 need_to_clear = 1;
4387 break;
4388 }
4389
4390 this_node_count = (tree_low_cst (hi_index, 1)
4391 - tree_low_cst (lo_index, 1) + 1);
4392 }
4393 else
4394 this_node_count = 1;
4395 count += this_node_count;
4396 if (mostly_zeros_p (TREE_VALUE (elt)))
4397 zero_count += this_node_count;
4398 }
4399 /* Clear the entire array first if there are any missing elements,
4400 or if the incidence of zero elements is >= 75%. */
4401 if (count < maxelt - minelt + 1
4402 || 4 * zero_count >= 3 * count)
4403 need_to_clear = 1;
4404 }
4405 if (need_to_clear && size > 0)
4406 {
4407 if (! cleared)
4408 clear_storage (target, GEN_INT (size), align);
4409 cleared = 1;
4410 }
4411 else
4412 /* Inform later passes that the old value is dead. */
4413 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4414
4415 /* Store each element of the constructor into
4416 the corresponding element of TARGET, determined
4417 by counting the elements. */
4418 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4419 elt;
4420 elt = TREE_CHAIN (elt), i++)
4421 {
4422 register enum machine_mode mode;
4423 HOST_WIDE_INT bitsize;
4424 HOST_WIDE_INT bitpos;
4425 int unsignedp;
4426 tree value = TREE_VALUE (elt);
4427 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4428 tree index = TREE_PURPOSE (elt);
4429 rtx xtarget = target;
4430
4431 if (cleared && is_zeros_p (value))
4432 continue;
4433
4434 unsignedp = TREE_UNSIGNED (elttype);
4435 mode = TYPE_MODE (elttype);
4436 if (mode == BLKmode)
4437 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4438 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4439 : -1);
4440 else
4441 bitsize = GET_MODE_BITSIZE (mode);
4442
4443 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4444 {
4445 tree lo_index = TREE_OPERAND (index, 0);
4446 tree hi_index = TREE_OPERAND (index, 1);
4447 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4448 struct nesting *loop;
4449 HOST_WIDE_INT lo, hi, count;
4450 tree position;
4451
4452 /* If the range is constant and "small", unroll the loop. */
4453 if (host_integerp (lo_index, 0)
4454 && host_integerp (hi_index, 0)
4455 && (lo = tree_low_cst (lo_index, 0),
4456 hi = tree_low_cst (hi_index, 0),
4457 count = hi - lo + 1,
4458 (GET_CODE (target) != MEM
4459 || count <= 2
4460 || (host_integerp (TYPE_SIZE (elttype), 1)
4461 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4462 <= 40 * 8)))))
4463 {
4464 lo -= minelt; hi -= minelt;
4465 for (; lo <= hi; lo++)
4466 {
4467 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4468 store_constructor_field (target, bitsize, bitpos, mode,
4469 value, type, align, cleared);
4470 }
4471 }
4472 else
4473 {
4474 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4475 loop_top = gen_label_rtx ();
4476 loop_end = gen_label_rtx ();
4477
4478 unsignedp = TREE_UNSIGNED (domain);
4479
4480 index = build_decl (VAR_DECL, NULL_TREE, domain);
4481
4482 DECL_RTL (index) = index_r
4483 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4484 &unsignedp, 0));
4485
4486 if (TREE_CODE (value) == SAVE_EXPR
4487 && SAVE_EXPR_RTL (value) == 0)
4488 {
4489 /* Make sure value gets expanded once before the
4490 loop. */
4491 expand_expr (value, const0_rtx, VOIDmode, 0);
4492 emit_queue ();
4493 }
4494 store_expr (lo_index, index_r, 0);
4495 loop = expand_start_loop (0);
4496
4497 /* Assign value to element index. */
4498 position
4499 = convert (ssizetype,
4500 fold (build (MINUS_EXPR, TREE_TYPE (index),
4501 index, TYPE_MIN_VALUE (domain))));
4502 position = size_binop (MULT_EXPR, position,
4503 convert (ssizetype,
4504 TYPE_SIZE_UNIT (elttype)));
4505
4506 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4507 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4508 xtarget = change_address (target, mode, addr);
4509 if (TREE_CODE (value) == CONSTRUCTOR)
4510 store_constructor (value, xtarget, align, cleared,
4511 bitsize / BITS_PER_UNIT);
4512 else
4513 store_expr (value, xtarget, 0);
4514
4515 expand_exit_loop_if_false (loop,
4516 build (LT_EXPR, integer_type_node,
4517 index, hi_index));
4518
4519 expand_increment (build (PREINCREMENT_EXPR,
4520 TREE_TYPE (index),
4521 index, integer_one_node), 0, 0);
4522 expand_end_loop ();
4523 emit_label (loop_end);
4524 }
4525 }
4526 else if ((index != 0 && ! host_integerp (index, 0))
4527 || ! host_integerp (TYPE_SIZE (elttype), 1))
4528 {
4529 rtx pos_rtx, addr;
4530 tree position;
4531
4532 if (index == 0)
4533 index = ssize_int (1);
4534
4535 if (minelt)
4536 index = convert (ssizetype,
4537 fold (build (MINUS_EXPR, index,
4538 TYPE_MIN_VALUE (domain))));
4539
4540 position = size_binop (MULT_EXPR, index,
4541 convert (ssizetype,
4542 TYPE_SIZE_UNIT (elttype)));
4543 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4544 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4545 xtarget = change_address (target, mode, addr);
4546 store_expr (value, xtarget, 0);
4547 }
4548 else
4549 {
4550 if (index != 0)
4551 bitpos = ((tree_low_cst (index, 0) - minelt)
4552 * tree_low_cst (TYPE_SIZE (elttype), 1));
4553 else
4554 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4555
4556 store_constructor_field (target, bitsize, bitpos, mode, value,
4557 type, align, cleared);
4558 }
4559 }
4560 }
4561
4562 /* Set constructor assignments */
4563 else if (TREE_CODE (type) == SET_TYPE)
4564 {
4565 tree elt = CONSTRUCTOR_ELTS (exp);
4566 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4567 tree domain = TYPE_DOMAIN (type);
4568 tree domain_min, domain_max, bitlength;
4569
4570 /* The default implementation strategy is to extract the constant
4571 parts of the constructor, use that to initialize the target,
4572 and then "or" in whatever non-constant ranges we need in addition.
4573
4574 If a large set is all zero or all ones, it is
4575 probably better to set it using memset (if available) or bzero.
4576 Also, if a large set has just a single range, it may also be
4577 better to first clear all the first clear the set (using
4578 bzero/memset), and set the bits we want. */
4579
4580 /* Check for all zeros. */
4581 if (elt == NULL_TREE && size > 0)
4582 {
4583 if (!cleared)
4584 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4585 return;
4586 }
4587
4588 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4589 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4590 bitlength = size_binop (PLUS_EXPR,
4591 size_diffop (domain_max, domain_min),
4592 ssize_int (1));
4593
4594 nbits = tree_low_cst (bitlength, 1);
4595
4596 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4597 are "complicated" (more than one range), initialize (the
4598 constant parts) by copying from a constant. */
4599 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4600 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4601 {
4602 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4603 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4604 char *bit_buffer = (char *) alloca (nbits);
4605 HOST_WIDE_INT word = 0;
4606 unsigned int bit_pos = 0;
4607 unsigned int ibit = 0;
4608 unsigned int offset = 0; /* In bytes from beginning of set. */
4609
4610 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4611 for (;;)
4612 {
4613 if (bit_buffer[ibit])
4614 {
4615 if (BYTES_BIG_ENDIAN)
4616 word |= (1 << (set_word_size - 1 - bit_pos));
4617 else
4618 word |= 1 << bit_pos;
4619 }
4620
4621 bit_pos++; ibit++;
4622 if (bit_pos >= set_word_size || ibit == nbits)
4623 {
4624 if (word != 0 || ! cleared)
4625 {
4626 rtx datum = GEN_INT (word);
4627 rtx to_rtx;
4628
4629 /* The assumption here is that it is safe to use
4630 XEXP if the set is multi-word, but not if
4631 it's single-word. */
4632 if (GET_CODE (target) == MEM)
4633 {
4634 to_rtx = plus_constant (XEXP (target, 0), offset);
4635 to_rtx = change_address (target, mode, to_rtx);
4636 }
4637 else if (offset == 0)
4638 to_rtx = target;
4639 else
4640 abort ();
4641 emit_move_insn (to_rtx, datum);
4642 }
4643
4644 if (ibit == nbits)
4645 break;
4646 word = 0;
4647 bit_pos = 0;
4648 offset += set_word_size / BITS_PER_UNIT;
4649 }
4650 }
4651 }
4652 else if (!cleared)
4653 /* Don't bother clearing storage if the set is all ones. */
4654 if (TREE_CHAIN (elt) != NULL_TREE
4655 || (TREE_PURPOSE (elt) == NULL_TREE
4656 ? nbits != 1
4657 : ( ! host_integerp (TREE_VALUE (elt), 0)
4658 || ! host_integerp (TREE_PURPOSE (elt), 0)
4659 || (tree_low_cst (TREE_VALUE (elt), 0)
4660 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4661 != (HOST_WIDE_INT) nbits))))
4662 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4663
4664 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4665 {
4666 /* start of range of element or NULL */
4667 tree startbit = TREE_PURPOSE (elt);
4668 /* end of range of element, or element value */
4669 tree endbit = TREE_VALUE (elt);
4670 #ifdef TARGET_MEM_FUNCTIONS
4671 HOST_WIDE_INT startb, endb;
4672 #endif
4673 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4674
4675 bitlength_rtx = expand_expr (bitlength,
4676 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4677
4678 /* handle non-range tuple element like [ expr ] */
4679 if (startbit == NULL_TREE)
4680 {
4681 startbit = save_expr (endbit);
4682 endbit = startbit;
4683 }
4684
4685 startbit = convert (sizetype, startbit);
4686 endbit = convert (sizetype, endbit);
4687 if (! integer_zerop (domain_min))
4688 {
4689 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4690 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4691 }
4692 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4693 EXPAND_CONST_ADDRESS);
4694 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4695 EXPAND_CONST_ADDRESS);
4696
4697 if (REG_P (target))
4698 {
4699 targetx = assign_stack_temp (GET_MODE (target),
4700 GET_MODE_SIZE (GET_MODE (target)),
4701 0);
4702 emit_move_insn (targetx, target);
4703 }
4704
4705 else if (GET_CODE (target) == MEM)
4706 targetx = target;
4707 else
4708 abort ();
4709
4710 #ifdef TARGET_MEM_FUNCTIONS
4711 /* Optimization: If startbit and endbit are
4712 constants divisible by BITS_PER_UNIT,
4713 call memset instead. */
4714 if (TREE_CODE (startbit) == INTEGER_CST
4715 && TREE_CODE (endbit) == INTEGER_CST
4716 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4717 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4718 {
4719 emit_library_call (memset_libfunc, 0,
4720 VOIDmode, 3,
4721 plus_constant (XEXP (targetx, 0),
4722 startb / BITS_PER_UNIT),
4723 Pmode,
4724 constm1_rtx, TYPE_MODE (integer_type_node),
4725 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4726 TYPE_MODE (sizetype));
4727 }
4728 else
4729 #endif
4730 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4731 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4732 bitlength_rtx, TYPE_MODE (sizetype),
4733 startbit_rtx, TYPE_MODE (sizetype),
4734 endbit_rtx, TYPE_MODE (sizetype));
4735
4736 if (REG_P (target))
4737 emit_move_insn (target, targetx);
4738 }
4739 }
4740
4741 else
4742 abort ();
4743 }
4744
4745 /* Store the value of EXP (an expression tree)
4746 into a subfield of TARGET which has mode MODE and occupies
4747 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4748 If MODE is VOIDmode, it means that we are storing into a bit-field.
4749
4750 If VALUE_MODE is VOIDmode, return nothing in particular.
4751 UNSIGNEDP is not used in this case.
4752
4753 Otherwise, return an rtx for the value stored. This rtx
4754 has mode VALUE_MODE if that is convenient to do.
4755 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4756
4757 ALIGN is the alignment that TARGET is known to have.
4758 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4759
4760 ALIAS_SET is the alias set for the destination. This value will
4761 (in general) be different from that for TARGET, since TARGET is a
4762 reference to the containing structure. */
4763
4764 static rtx
4765 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4766 unsignedp, align, total_size, alias_set)
4767 rtx target;
4768 HOST_WIDE_INT bitsize;
4769 HOST_WIDE_INT bitpos;
4770 enum machine_mode mode;
4771 tree exp;
4772 enum machine_mode value_mode;
4773 int unsignedp;
4774 unsigned int align;
4775 HOST_WIDE_INT total_size;
4776 int alias_set;
4777 {
4778 HOST_WIDE_INT width_mask = 0;
4779
4780 if (TREE_CODE (exp) == ERROR_MARK)
4781 return const0_rtx;
4782
4783 if (bitsize < HOST_BITS_PER_WIDE_INT)
4784 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4785
4786 /* If we are storing into an unaligned field of an aligned union that is
4787 in a register, we may have the mode of TARGET being an integer mode but
4788 MODE == BLKmode. In that case, get an aligned object whose size and
4789 alignment are the same as TARGET and store TARGET into it (we can avoid
4790 the store if the field being stored is the entire width of TARGET). Then
4791 call ourselves recursively to store the field into a BLKmode version of
4792 that object. Finally, load from the object into TARGET. This is not
4793 very efficient in general, but should only be slightly more expensive
4794 than the otherwise-required unaligned accesses. Perhaps this can be
4795 cleaned up later. */
4796
4797 if (mode == BLKmode
4798 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4799 {
4800 rtx object = assign_stack_temp (GET_MODE (target),
4801 GET_MODE_SIZE (GET_MODE (target)), 0);
4802 rtx blk_object = copy_rtx (object);
4803
4804 MEM_SET_IN_STRUCT_P (object, 1);
4805 MEM_SET_IN_STRUCT_P (blk_object, 1);
4806 PUT_MODE (blk_object, BLKmode);
4807
4808 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4809 emit_move_insn (object, target);
4810
4811 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4812 align, total_size, alias_set);
4813
4814 /* Even though we aren't returning target, we need to
4815 give it the updated value. */
4816 emit_move_insn (target, object);
4817
4818 return blk_object;
4819 }
4820
4821 if (GET_CODE (target) == CONCAT)
4822 {
4823 /* We're storing into a struct containing a single __complex. */
4824
4825 if (bitpos != 0)
4826 abort ();
4827 return store_expr (exp, target, 0);
4828 }
4829
4830 /* If the structure is in a register or if the component
4831 is a bit field, we cannot use addressing to access it.
4832 Use bit-field techniques or SUBREG to store in it. */
4833
4834 if (mode == VOIDmode
4835 || (mode != BLKmode && ! direct_store[(int) mode]
4836 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4837 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4838 || GET_CODE (target) == REG
4839 || GET_CODE (target) == SUBREG
4840 /* If the field isn't aligned enough to store as an ordinary memref,
4841 store it as a bit field. */
4842 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4843 && (align < GET_MODE_ALIGNMENT (mode)
4844 || bitpos % GET_MODE_ALIGNMENT (mode)))
4845 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4846 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4847 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4848 /* If the RHS and field are a constant size and the size of the
4849 RHS isn't the same size as the bitfield, we must use bitfield
4850 operations. */
4851 || (bitsize >= 0
4852 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4853 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4854 {
4855 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4856
4857 /* If BITSIZE is narrower than the size of the type of EXP
4858 we will be narrowing TEMP. Normally, what's wanted are the
4859 low-order bits. However, if EXP's type is a record and this is
4860 big-endian machine, we want the upper BITSIZE bits. */
4861 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4862 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4863 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4864 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4865 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4866 - bitsize),
4867 temp, 1);
4868
4869 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4870 MODE. */
4871 if (mode != VOIDmode && mode != BLKmode
4872 && mode != TYPE_MODE (TREE_TYPE (exp)))
4873 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4874
4875 /* If the modes of TARGET and TEMP are both BLKmode, both
4876 must be in memory and BITPOS must be aligned on a byte
4877 boundary. If so, we simply do a block copy. */
4878 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4879 {
4880 unsigned int exp_align = expr_align (exp);
4881
4882 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4883 || bitpos % BITS_PER_UNIT != 0)
4884 abort ();
4885
4886 target = change_address (target, VOIDmode,
4887 plus_constant (XEXP (target, 0),
4888 bitpos / BITS_PER_UNIT));
4889
4890 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4891 align = MIN (exp_align, align);
4892
4893 /* Find an alignment that is consistent with the bit position. */
4894 while ((bitpos % align) != 0)
4895 align >>= 1;
4896
4897 emit_block_move (target, temp,
4898 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4899 / BITS_PER_UNIT),
4900 align);
4901
4902 return value_mode == VOIDmode ? const0_rtx : target;
4903 }
4904
4905 /* Store the value in the bitfield. */
4906 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4907 if (value_mode != VOIDmode)
4908 {
4909 /* The caller wants an rtx for the value. */
4910 /* If possible, avoid refetching from the bitfield itself. */
4911 if (width_mask != 0
4912 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4913 {
4914 tree count;
4915 enum machine_mode tmode;
4916
4917 if (unsignedp)
4918 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4919 tmode = GET_MODE (temp);
4920 if (tmode == VOIDmode)
4921 tmode = value_mode;
4922 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4923 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4924 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4925 }
4926 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4927 NULL_RTX, value_mode, 0, align,
4928 total_size);
4929 }
4930 return const0_rtx;
4931 }
4932 else
4933 {
4934 rtx addr = XEXP (target, 0);
4935 rtx to_rtx;
4936
4937 /* If a value is wanted, it must be the lhs;
4938 so make the address stable for multiple use. */
4939
4940 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4941 && ! CONSTANT_ADDRESS_P (addr)
4942 /* A frame-pointer reference is already stable. */
4943 && ! (GET_CODE (addr) == PLUS
4944 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4945 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4946 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4947 addr = copy_to_reg (addr);
4948
4949 /* Now build a reference to just the desired component. */
4950
4951 to_rtx = copy_rtx (change_address (target, mode,
4952 plus_constant (addr,
4953 (bitpos
4954 / BITS_PER_UNIT))));
4955 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4956 MEM_ALIAS_SET (to_rtx) = alias_set;
4957
4958 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4959 }
4960 }
4961 \f
4962 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4963 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4964 ARRAY_REFs and find the ultimate containing object, which we return.
4965
4966 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4967 bit position, and *PUNSIGNEDP to the signedness of the field.
4968 If the position of the field is variable, we store a tree
4969 giving the variable offset (in units) in *POFFSET.
4970 This offset is in addition to the bit position.
4971 If the position is not variable, we store 0 in *POFFSET.
4972 We set *PALIGNMENT to the alignment of the address that will be
4973 computed. This is the alignment of the thing we return if *POFFSET
4974 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4975
4976 If any of the extraction expressions is volatile,
4977 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4978
4979 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4980 is a mode that can be used to access the field. In that case, *PBITSIZE
4981 is redundant.
4982
4983 If the field describes a variable-sized object, *PMODE is set to
4984 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4985 this case, but the address of the object can be found. */
4986
4987 tree
4988 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4989 punsignedp, pvolatilep, palignment)
4990 tree exp;
4991 HOST_WIDE_INT *pbitsize;
4992 HOST_WIDE_INT *pbitpos;
4993 tree *poffset;
4994 enum machine_mode *pmode;
4995 int *punsignedp;
4996 int *pvolatilep;
4997 unsigned int *palignment;
4998 {
4999 tree size_tree = 0;
5000 enum machine_mode mode = VOIDmode;
5001 tree offset = size_zero_node;
5002 tree bit_offset = bitsize_zero_node;
5003 unsigned int alignment = BIGGEST_ALIGNMENT;
5004 tree tem;
5005
5006 /* First get the mode, signedness, and size. We do this from just the
5007 outermost expression. */
5008 if (TREE_CODE (exp) == COMPONENT_REF)
5009 {
5010 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5011 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5012 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5013
5014 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5015 }
5016 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5017 {
5018 size_tree = TREE_OPERAND (exp, 1);
5019 *punsignedp = TREE_UNSIGNED (exp);
5020 }
5021 else
5022 {
5023 mode = TYPE_MODE (TREE_TYPE (exp));
5024 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5025
5026 if (mode == BLKmode)
5027 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5028 else
5029 *pbitsize = GET_MODE_BITSIZE (mode);
5030 }
5031
5032 if (size_tree != 0)
5033 {
5034 if (! host_integerp (size_tree, 1))
5035 mode = BLKmode, *pbitsize = -1;
5036 else
5037 *pbitsize = tree_low_cst (size_tree, 1);
5038 }
5039
5040 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5041 and find the ultimate containing object. */
5042 while (1)
5043 {
5044 if (TREE_CODE (exp) == BIT_FIELD_REF)
5045 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5046 else if (TREE_CODE (exp) == COMPONENT_REF)
5047 {
5048 tree field = TREE_OPERAND (exp, 1);
5049 tree this_offset = DECL_FIELD_OFFSET (field);
5050
5051 /* If this field hasn't been filled in yet, don't go
5052 past it. This should only happen when folding expressions
5053 made during type construction. */
5054 if (this_offset == 0)
5055 break;
5056 else if (! TREE_CONSTANT (this_offset)
5057 && contains_placeholder_p (this_offset))
5058 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5059
5060 offset = size_binop (PLUS_EXPR, offset, this_offset);
5061 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5062 DECL_FIELD_BIT_OFFSET (field));
5063
5064 if (! host_integerp (offset, 0))
5065 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5066 }
5067
5068 else if (TREE_CODE (exp) == ARRAY_REF)
5069 {
5070 tree index = TREE_OPERAND (exp, 1);
5071 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5072 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5073 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5074
5075 /* We assume all arrays have sizes that are a multiple of a byte.
5076 First subtract the lower bound, if any, in the type of the
5077 index, then convert to sizetype and multiply by the size of the
5078 array element. */
5079 if (low_bound != 0 && ! integer_zerop (low_bound))
5080 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5081 index, low_bound));
5082
5083 /* If the index has a self-referential type, pass it to a
5084 WITH_RECORD_EXPR; if the component size is, pass our
5085 component to one. */
5086 if (! TREE_CONSTANT (index)
5087 && contains_placeholder_p (index))
5088 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5089 if (! TREE_CONSTANT (unit_size)
5090 && contains_placeholder_p (unit_size))
5091 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5092 TREE_OPERAND (exp, 0));
5093
5094 offset = size_binop (PLUS_EXPR, offset,
5095 size_binop (MULT_EXPR,
5096 convert (sizetype, index),
5097 unit_size));
5098 }
5099
5100 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5101 && ! ((TREE_CODE (exp) == NOP_EXPR
5102 || TREE_CODE (exp) == CONVERT_EXPR)
5103 && (TYPE_MODE (TREE_TYPE (exp))
5104 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5105 break;
5106
5107 /* If any reference in the chain is volatile, the effect is volatile. */
5108 if (TREE_THIS_VOLATILE (exp))
5109 *pvolatilep = 1;
5110
5111 /* If the offset is non-constant already, then we can't assume any
5112 alignment more than the alignment here. */
5113 if (! TREE_CONSTANT (offset))
5114 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5115
5116 exp = TREE_OPERAND (exp, 0);
5117 }
5118
5119 if (DECL_P (exp))
5120 alignment = MIN (alignment, DECL_ALIGN (exp));
5121 else if (TREE_TYPE (exp) != 0)
5122 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5123
5124 /* If OFFSET is constant, see if we can return the whole thing as a
5125 constant bit position. Otherwise, split it up. */
5126 if (host_integerp (offset, 0)
5127 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5128 bitsize_unit_node))
5129 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5130 && host_integerp (tem, 0))
5131 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5132 else
5133 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5134
5135 *pmode = mode;
5136 *palignment = alignment;
5137 return exp;
5138 }
5139
5140 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5141
5142 static enum memory_use_mode
5143 get_memory_usage_from_modifier (modifier)
5144 enum expand_modifier modifier;
5145 {
5146 switch (modifier)
5147 {
5148 case EXPAND_NORMAL:
5149 case EXPAND_SUM:
5150 return MEMORY_USE_RO;
5151 break;
5152 case EXPAND_MEMORY_USE_WO:
5153 return MEMORY_USE_WO;
5154 break;
5155 case EXPAND_MEMORY_USE_RW:
5156 return MEMORY_USE_RW;
5157 break;
5158 case EXPAND_MEMORY_USE_DONT:
5159 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5160 MEMORY_USE_DONT, because they are modifiers to a call of
5161 expand_expr in the ADDR_EXPR case of expand_expr. */
5162 case EXPAND_CONST_ADDRESS:
5163 case EXPAND_INITIALIZER:
5164 return MEMORY_USE_DONT;
5165 case EXPAND_MEMORY_USE_BAD:
5166 default:
5167 abort ();
5168 }
5169 }
5170 \f
5171 /* Given an rtx VALUE that may contain additions and multiplications,
5172 return an equivalent value that just refers to a register or memory.
5173 This is done by generating instructions to perform the arithmetic
5174 and returning a pseudo-register containing the value.
5175
5176 The returned value may be a REG, SUBREG, MEM or constant. */
5177
5178 rtx
5179 force_operand (value, target)
5180 rtx value, target;
5181 {
5182 register optab binoptab = 0;
5183 /* Use a temporary to force order of execution of calls to
5184 `force_operand'. */
5185 rtx tmp;
5186 register rtx op2;
5187 /* Use subtarget as the target for operand 0 of a binary operation. */
5188 register rtx subtarget = get_subtarget (target);
5189
5190 /* Check for a PIC address load. */
5191 if (flag_pic
5192 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5193 && XEXP (value, 0) == pic_offset_table_rtx
5194 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5195 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5196 || GET_CODE (XEXP (value, 1)) == CONST))
5197 {
5198 if (!subtarget)
5199 subtarget = gen_reg_rtx (GET_MODE (value));
5200 emit_move_insn (subtarget, value);
5201 return subtarget;
5202 }
5203
5204 if (GET_CODE (value) == PLUS)
5205 binoptab = add_optab;
5206 else if (GET_CODE (value) == MINUS)
5207 binoptab = sub_optab;
5208 else if (GET_CODE (value) == MULT)
5209 {
5210 op2 = XEXP (value, 1);
5211 if (!CONSTANT_P (op2)
5212 && !(GET_CODE (op2) == REG && op2 != subtarget))
5213 subtarget = 0;
5214 tmp = force_operand (XEXP (value, 0), subtarget);
5215 return expand_mult (GET_MODE (value), tmp,
5216 force_operand (op2, NULL_RTX),
5217 target, 0);
5218 }
5219
5220 if (binoptab)
5221 {
5222 op2 = XEXP (value, 1);
5223 if (!CONSTANT_P (op2)
5224 && !(GET_CODE (op2) == REG && op2 != subtarget))
5225 subtarget = 0;
5226 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5227 {
5228 binoptab = add_optab;
5229 op2 = negate_rtx (GET_MODE (value), op2);
5230 }
5231
5232 /* Check for an addition with OP2 a constant integer and our first
5233 operand a PLUS of a virtual register and something else. In that
5234 case, we want to emit the sum of the virtual register and the
5235 constant first and then add the other value. This allows virtual
5236 register instantiation to simply modify the constant rather than
5237 creating another one around this addition. */
5238 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5239 && GET_CODE (XEXP (value, 0)) == PLUS
5240 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5241 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5242 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5243 {
5244 rtx temp = expand_binop (GET_MODE (value), binoptab,
5245 XEXP (XEXP (value, 0), 0), op2,
5246 subtarget, 0, OPTAB_LIB_WIDEN);
5247 return expand_binop (GET_MODE (value), binoptab, temp,
5248 force_operand (XEXP (XEXP (value, 0), 1), 0),
5249 target, 0, OPTAB_LIB_WIDEN);
5250 }
5251
5252 tmp = force_operand (XEXP (value, 0), subtarget);
5253 return expand_binop (GET_MODE (value), binoptab, tmp,
5254 force_operand (op2, NULL_RTX),
5255 target, 0, OPTAB_LIB_WIDEN);
5256 /* We give UNSIGNEDP = 0 to expand_binop
5257 because the only operations we are expanding here are signed ones. */
5258 }
5259 return value;
5260 }
5261 \f
5262 /* Subroutine of expand_expr:
5263 save the non-copied parts (LIST) of an expr (LHS), and return a list
5264 which can restore these values to their previous values,
5265 should something modify their storage. */
5266
5267 static tree
5268 save_noncopied_parts (lhs, list)
5269 tree lhs;
5270 tree list;
5271 {
5272 tree tail;
5273 tree parts = 0;
5274
5275 for (tail = list; tail; tail = TREE_CHAIN (tail))
5276 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5277 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5278 else
5279 {
5280 tree part = TREE_VALUE (tail);
5281 tree part_type = TREE_TYPE (part);
5282 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5283 rtx target = assign_temp (part_type, 0, 1, 1);
5284 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5285 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5286 parts = tree_cons (to_be_saved,
5287 build (RTL_EXPR, part_type, NULL_TREE,
5288 (tree) target),
5289 parts);
5290 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5291 }
5292 return parts;
5293 }
5294
5295 /* Subroutine of expand_expr:
5296 record the non-copied parts (LIST) of an expr (LHS), and return a list
5297 which specifies the initial values of these parts. */
5298
5299 static tree
5300 init_noncopied_parts (lhs, list)
5301 tree lhs;
5302 tree list;
5303 {
5304 tree tail;
5305 tree parts = 0;
5306
5307 for (tail = list; tail; tail = TREE_CHAIN (tail))
5308 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5309 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5310 else if (TREE_PURPOSE (tail))
5311 {
5312 tree part = TREE_VALUE (tail);
5313 tree part_type = TREE_TYPE (part);
5314 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5315 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5316 }
5317 return parts;
5318 }
5319
5320 /* Subroutine of expand_expr: return nonzero iff there is no way that
5321 EXP can reference X, which is being modified. TOP_P is nonzero if this
5322 call is going to be used to determine whether we need a temporary
5323 for EXP, as opposed to a recursive call to this function.
5324
5325 It is always safe for this routine to return zero since it merely
5326 searches for optimization opportunities. */
5327
5328 static int
5329 safe_from_p (x, exp, top_p)
5330 rtx x;
5331 tree exp;
5332 int top_p;
5333 {
5334 rtx exp_rtl = 0;
5335 int i, nops;
5336 static int save_expr_count;
5337 static int save_expr_size = 0;
5338 static tree *save_expr_rewritten;
5339 static tree save_expr_trees[256];
5340
5341 if (x == 0
5342 /* If EXP has varying size, we MUST use a target since we currently
5343 have no way of allocating temporaries of variable size
5344 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5345 So we assume here that something at a higher level has prevented a
5346 clash. This is somewhat bogus, but the best we can do. Only
5347 do this when X is BLKmode and when we are at the top level. */
5348 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5349 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5350 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5351 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5352 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5353 != INTEGER_CST)
5354 && GET_MODE (x) == BLKmode))
5355 return 1;
5356
5357 if (top_p && save_expr_size == 0)
5358 {
5359 int rtn;
5360
5361 save_expr_count = 0;
5362 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5363 save_expr_rewritten = &save_expr_trees[0];
5364
5365 rtn = safe_from_p (x, exp, 1);
5366
5367 for (i = 0; i < save_expr_count; ++i)
5368 {
5369 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5370 abort ();
5371 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5372 }
5373
5374 save_expr_size = 0;
5375
5376 return rtn;
5377 }
5378
5379 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5380 find the underlying pseudo. */
5381 if (GET_CODE (x) == SUBREG)
5382 {
5383 x = SUBREG_REG (x);
5384 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5385 return 0;
5386 }
5387
5388 /* If X is a location in the outgoing argument area, it is always safe. */
5389 if (GET_CODE (x) == MEM
5390 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5391 || (GET_CODE (XEXP (x, 0)) == PLUS
5392 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5393 return 1;
5394
5395 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5396 {
5397 case 'd':
5398 exp_rtl = DECL_RTL (exp);
5399 break;
5400
5401 case 'c':
5402 return 1;
5403
5404 case 'x':
5405 if (TREE_CODE (exp) == TREE_LIST)
5406 return ((TREE_VALUE (exp) == 0
5407 || safe_from_p (x, TREE_VALUE (exp), 0))
5408 && (TREE_CHAIN (exp) == 0
5409 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5410 else if (TREE_CODE (exp) == ERROR_MARK)
5411 return 1; /* An already-visited SAVE_EXPR? */
5412 else
5413 return 0;
5414
5415 case '1':
5416 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5417
5418 case '2':
5419 case '<':
5420 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5421 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5422
5423 case 'e':
5424 case 'r':
5425 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5426 the expression. If it is set, we conflict iff we are that rtx or
5427 both are in memory. Otherwise, we check all operands of the
5428 expression recursively. */
5429
5430 switch (TREE_CODE (exp))
5431 {
5432 case ADDR_EXPR:
5433 return (staticp (TREE_OPERAND (exp, 0))
5434 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5435 || TREE_STATIC (exp));
5436
5437 case INDIRECT_REF:
5438 if (GET_CODE (x) == MEM)
5439 return 0;
5440 break;
5441
5442 case CALL_EXPR:
5443 exp_rtl = CALL_EXPR_RTL (exp);
5444 if (exp_rtl == 0)
5445 {
5446 /* Assume that the call will clobber all hard registers and
5447 all of memory. */
5448 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5449 || GET_CODE (x) == MEM)
5450 return 0;
5451 }
5452
5453 break;
5454
5455 case RTL_EXPR:
5456 /* If a sequence exists, we would have to scan every instruction
5457 in the sequence to see if it was safe. This is probably not
5458 worthwhile. */
5459 if (RTL_EXPR_SEQUENCE (exp))
5460 return 0;
5461
5462 exp_rtl = RTL_EXPR_RTL (exp);
5463 break;
5464
5465 case WITH_CLEANUP_EXPR:
5466 exp_rtl = RTL_EXPR_RTL (exp);
5467 break;
5468
5469 case CLEANUP_POINT_EXPR:
5470 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5471
5472 case SAVE_EXPR:
5473 exp_rtl = SAVE_EXPR_RTL (exp);
5474 if (exp_rtl)
5475 break;
5476
5477 /* This SAVE_EXPR might appear many times in the top-level
5478 safe_from_p() expression, and if it has a complex
5479 subexpression, examining it multiple times could result
5480 in a combinatorial explosion. E.g. on an Alpha
5481 running at least 200MHz, a Fortran test case compiled with
5482 optimization took about 28 minutes to compile -- even though
5483 it was only a few lines long, and the complicated line causing
5484 so much time to be spent in the earlier version of safe_from_p()
5485 had only 293 or so unique nodes.
5486
5487 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5488 where it is so we can turn it back in the top-level safe_from_p()
5489 when we're done. */
5490
5491 /* For now, don't bother re-sizing the array. */
5492 if (save_expr_count >= save_expr_size)
5493 return 0;
5494 save_expr_rewritten[save_expr_count++] = exp;
5495
5496 nops = tree_code_length[(int) SAVE_EXPR];
5497 for (i = 0; i < nops; i++)
5498 {
5499 tree operand = TREE_OPERAND (exp, i);
5500 if (operand == NULL_TREE)
5501 continue;
5502 TREE_SET_CODE (exp, ERROR_MARK);
5503 if (!safe_from_p (x, operand, 0))
5504 return 0;
5505 TREE_SET_CODE (exp, SAVE_EXPR);
5506 }
5507 TREE_SET_CODE (exp, ERROR_MARK);
5508 return 1;
5509
5510 case BIND_EXPR:
5511 /* The only operand we look at is operand 1. The rest aren't
5512 part of the expression. */
5513 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5514
5515 case METHOD_CALL_EXPR:
5516 /* This takes a rtx argument, but shouldn't appear here. */
5517 abort ();
5518
5519 default:
5520 break;
5521 }
5522
5523 /* If we have an rtx, we do not need to scan our operands. */
5524 if (exp_rtl)
5525 break;
5526
5527 nops = tree_code_length[(int) TREE_CODE (exp)];
5528 for (i = 0; i < nops; i++)
5529 if (TREE_OPERAND (exp, i) != 0
5530 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5531 return 0;
5532 }
5533
5534 /* If we have an rtl, find any enclosed object. Then see if we conflict
5535 with it. */
5536 if (exp_rtl)
5537 {
5538 if (GET_CODE (exp_rtl) == SUBREG)
5539 {
5540 exp_rtl = SUBREG_REG (exp_rtl);
5541 if (GET_CODE (exp_rtl) == REG
5542 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5543 return 0;
5544 }
5545
5546 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5547 are memory and EXP is not readonly. */
5548 return ! (rtx_equal_p (x, exp_rtl)
5549 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5550 && ! TREE_READONLY (exp)));
5551 }
5552
5553 /* If we reach here, it is safe. */
5554 return 1;
5555 }
5556
5557 /* Subroutine of expand_expr: return nonzero iff EXP is an
5558 expression whose type is statically determinable. */
5559
5560 static int
5561 fixed_type_p (exp)
5562 tree exp;
5563 {
5564 if (TREE_CODE (exp) == PARM_DECL
5565 || TREE_CODE (exp) == VAR_DECL
5566 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5567 || TREE_CODE (exp) == COMPONENT_REF
5568 || TREE_CODE (exp) == ARRAY_REF)
5569 return 1;
5570 return 0;
5571 }
5572
5573 /* Subroutine of expand_expr: return rtx if EXP is a
5574 variable or parameter; else return 0. */
5575
5576 static rtx
5577 var_rtx (exp)
5578 tree exp;
5579 {
5580 STRIP_NOPS (exp);
5581 switch (TREE_CODE (exp))
5582 {
5583 case PARM_DECL:
5584 case VAR_DECL:
5585 return DECL_RTL (exp);
5586 default:
5587 return 0;
5588 }
5589 }
5590
5591 #ifdef MAX_INTEGER_COMPUTATION_MODE
5592 void
5593 check_max_integer_computation_mode (exp)
5594 tree exp;
5595 {
5596 enum tree_code code;
5597 enum machine_mode mode;
5598
5599 /* Strip any NOPs that don't change the mode. */
5600 STRIP_NOPS (exp);
5601 code = TREE_CODE (exp);
5602
5603 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5604 if (code == NOP_EXPR
5605 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5606 return;
5607
5608 /* First check the type of the overall operation. We need only look at
5609 unary, binary and relational operations. */
5610 if (TREE_CODE_CLASS (code) == '1'
5611 || TREE_CODE_CLASS (code) == '2'
5612 || TREE_CODE_CLASS (code) == '<')
5613 {
5614 mode = TYPE_MODE (TREE_TYPE (exp));
5615 if (GET_MODE_CLASS (mode) == MODE_INT
5616 && mode > MAX_INTEGER_COMPUTATION_MODE)
5617 fatal ("unsupported wide integer operation");
5618 }
5619
5620 /* Check operand of a unary op. */
5621 if (TREE_CODE_CLASS (code) == '1')
5622 {
5623 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5624 if (GET_MODE_CLASS (mode) == MODE_INT
5625 && mode > MAX_INTEGER_COMPUTATION_MODE)
5626 fatal ("unsupported wide integer operation");
5627 }
5628
5629 /* Check operands of a binary/comparison op. */
5630 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5631 {
5632 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5633 if (GET_MODE_CLASS (mode) == MODE_INT
5634 && mode > MAX_INTEGER_COMPUTATION_MODE)
5635 fatal ("unsupported wide integer operation");
5636
5637 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5638 if (GET_MODE_CLASS (mode) == MODE_INT
5639 && mode > MAX_INTEGER_COMPUTATION_MODE)
5640 fatal ("unsupported wide integer operation");
5641 }
5642 }
5643 #endif
5644
5645 \f
5646 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5647 has any readonly fields. If any of the fields have types that
5648 contain readonly fields, return true as well. */
5649
5650 static int
5651 readonly_fields_p (type)
5652 tree type;
5653 {
5654 tree field;
5655
5656 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5657 if (TREE_CODE (field) == FIELD_DECL
5658 && (TREE_READONLY (field)
5659 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5660 && readonly_fields_p (TREE_TYPE (field)))))
5661 return 1;
5662
5663 return 0;
5664 }
5665 \f
5666 /* expand_expr: generate code for computing expression EXP.
5667 An rtx for the computed value is returned. The value is never null.
5668 In the case of a void EXP, const0_rtx is returned.
5669
5670 The value may be stored in TARGET if TARGET is nonzero.
5671 TARGET is just a suggestion; callers must assume that
5672 the rtx returned may not be the same as TARGET.
5673
5674 If TARGET is CONST0_RTX, it means that the value will be ignored.
5675
5676 If TMODE is not VOIDmode, it suggests generating the
5677 result in mode TMODE. But this is done only when convenient.
5678 Otherwise, TMODE is ignored and the value generated in its natural mode.
5679 TMODE is just a suggestion; callers must assume that
5680 the rtx returned may not have mode TMODE.
5681
5682 Note that TARGET may have neither TMODE nor MODE. In that case, it
5683 probably will not be used.
5684
5685 If MODIFIER is EXPAND_SUM then when EXP is an addition
5686 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5687 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5688 products as above, or REG or MEM, or constant.
5689 Ordinarily in such cases we would output mul or add instructions
5690 and then return a pseudo reg containing the sum.
5691
5692 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5693 it also marks a label as absolutely required (it can't be dead).
5694 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5695 This is used for outputting expressions used in initializers.
5696
5697 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5698 with a constant address even if that address is not normally legitimate.
5699 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5700
5701 rtx
5702 expand_expr (exp, target, tmode, modifier)
5703 register tree exp;
5704 rtx target;
5705 enum machine_mode tmode;
5706 enum expand_modifier modifier;
5707 {
5708 register rtx op0, op1, temp;
5709 tree type = TREE_TYPE (exp);
5710 int unsignedp = TREE_UNSIGNED (type);
5711 register enum machine_mode mode;
5712 register enum tree_code code = TREE_CODE (exp);
5713 optab this_optab;
5714 rtx subtarget, original_target;
5715 int ignore;
5716 tree context;
5717 /* Used by check-memory-usage to make modifier read only. */
5718 enum expand_modifier ro_modifier;
5719
5720 /* Handle ERROR_MARK before anybody tries to access its type. */
5721 if (TREE_CODE (exp) == ERROR_MARK)
5722 {
5723 op0 = CONST0_RTX (tmode);
5724 if (op0 != 0)
5725 return op0;
5726 return const0_rtx;
5727 }
5728
5729 mode = TYPE_MODE (type);
5730 /* Use subtarget as the target for operand 0 of a binary operation. */
5731 subtarget = get_subtarget (target);
5732 original_target = target;
5733 ignore = (target == const0_rtx
5734 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5735 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5736 || code == COND_EXPR)
5737 && TREE_CODE (type) == VOID_TYPE));
5738
5739 /* Make a read-only version of the modifier. */
5740 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5741 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5742 ro_modifier = modifier;
5743 else
5744 ro_modifier = EXPAND_NORMAL;
5745
5746 /* If we are going to ignore this result, we need only do something
5747 if there is a side-effect somewhere in the expression. If there
5748 is, short-circuit the most common cases here. Note that we must
5749 not call expand_expr with anything but const0_rtx in case this
5750 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5751
5752 if (ignore)
5753 {
5754 if (! TREE_SIDE_EFFECTS (exp))
5755 return const0_rtx;
5756
5757 /* Ensure we reference a volatile object even if value is ignored, but
5758 don't do this if all we are doing is taking its address. */
5759 if (TREE_THIS_VOLATILE (exp)
5760 && TREE_CODE (exp) != FUNCTION_DECL
5761 && mode != VOIDmode && mode != BLKmode
5762 && modifier != EXPAND_CONST_ADDRESS)
5763 {
5764 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5765 if (GET_CODE (temp) == MEM)
5766 temp = copy_to_reg (temp);
5767 return const0_rtx;
5768 }
5769
5770 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5771 || code == INDIRECT_REF || code == BUFFER_REF)
5772 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5773 VOIDmode, ro_modifier);
5774 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5775 || code == ARRAY_REF)
5776 {
5777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5778 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5779 return const0_rtx;
5780 }
5781 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5782 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5783 /* If the second operand has no side effects, just evaluate
5784 the first. */
5785 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5786 VOIDmode, ro_modifier);
5787 else if (code == BIT_FIELD_REF)
5788 {
5789 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5790 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5791 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5792 return const0_rtx;
5793 }
5794 ;
5795 target = 0;
5796 }
5797
5798 #ifdef MAX_INTEGER_COMPUTATION_MODE
5799 /* Only check stuff here if the mode we want is different from the mode
5800 of the expression; if it's the same, check_max_integer_computiation_mode
5801 will handle it. Do we really need to check this stuff at all? */
5802
5803 if (target
5804 && GET_MODE (target) != mode
5805 && TREE_CODE (exp) != INTEGER_CST
5806 && TREE_CODE (exp) != PARM_DECL
5807 && TREE_CODE (exp) != ARRAY_REF
5808 && TREE_CODE (exp) != COMPONENT_REF
5809 && TREE_CODE (exp) != BIT_FIELD_REF
5810 && TREE_CODE (exp) != INDIRECT_REF
5811 && TREE_CODE (exp) != CALL_EXPR
5812 && TREE_CODE (exp) != VAR_DECL
5813 && TREE_CODE (exp) != RTL_EXPR)
5814 {
5815 enum machine_mode mode = GET_MODE (target);
5816
5817 if (GET_MODE_CLASS (mode) == MODE_INT
5818 && mode > MAX_INTEGER_COMPUTATION_MODE)
5819 fatal ("unsupported wide integer operation");
5820 }
5821
5822 if (tmode != mode
5823 && TREE_CODE (exp) != INTEGER_CST
5824 && TREE_CODE (exp) != PARM_DECL
5825 && TREE_CODE (exp) != ARRAY_REF
5826 && TREE_CODE (exp) != COMPONENT_REF
5827 && TREE_CODE (exp) != BIT_FIELD_REF
5828 && TREE_CODE (exp) != INDIRECT_REF
5829 && TREE_CODE (exp) != VAR_DECL
5830 && TREE_CODE (exp) != CALL_EXPR
5831 && TREE_CODE (exp) != RTL_EXPR
5832 && GET_MODE_CLASS (tmode) == MODE_INT
5833 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5834 fatal ("unsupported wide integer operation");
5835
5836 check_max_integer_computation_mode (exp);
5837 #endif
5838
5839 /* If will do cse, generate all results into pseudo registers
5840 since 1) that allows cse to find more things
5841 and 2) otherwise cse could produce an insn the machine
5842 cannot support. */
5843
5844 if (! cse_not_expected && mode != BLKmode && target
5845 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5846 target = subtarget;
5847
5848 switch (code)
5849 {
5850 case LABEL_DECL:
5851 {
5852 tree function = decl_function_context (exp);
5853 /* Handle using a label in a containing function. */
5854 if (function != current_function_decl
5855 && function != inline_function_decl && function != 0)
5856 {
5857 struct function *p = find_function_data (function);
5858 /* Allocate in the memory associated with the function
5859 that the label is in. */
5860 push_obstacks (p->function_obstack,
5861 p->function_maybepermanent_obstack);
5862
5863 p->expr->x_forced_labels
5864 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5865 p->expr->x_forced_labels);
5866 pop_obstacks ();
5867 }
5868 else
5869 {
5870 if (modifier == EXPAND_INITIALIZER)
5871 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5872 label_rtx (exp),
5873 forced_labels);
5874 }
5875
5876 temp = gen_rtx_MEM (FUNCTION_MODE,
5877 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5878 if (function != current_function_decl
5879 && function != inline_function_decl && function != 0)
5880 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5881 return temp;
5882 }
5883
5884 case PARM_DECL:
5885 if (DECL_RTL (exp) == 0)
5886 {
5887 error_with_decl (exp, "prior parameter's size depends on `%s'");
5888 return CONST0_RTX (mode);
5889 }
5890
5891 /* ... fall through ... */
5892
5893 case VAR_DECL:
5894 /* If a static var's type was incomplete when the decl was written,
5895 but the type is complete now, lay out the decl now. */
5896 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5897 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5898 {
5899 push_obstacks_nochange ();
5900 end_temporary_allocation ();
5901 layout_decl (exp, 0);
5902 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5903 pop_obstacks ();
5904 }
5905
5906 /* Although static-storage variables start off initialized, according to
5907 ANSI C, a memcpy could overwrite them with uninitialized values. So
5908 we check them too. This also lets us check for read-only variables
5909 accessed via a non-const declaration, in case it won't be detected
5910 any other way (e.g., in an embedded system or OS kernel without
5911 memory protection).
5912
5913 Aggregates are not checked here; they're handled elsewhere. */
5914 if (cfun && current_function_check_memory_usage
5915 && code == VAR_DECL
5916 && GET_CODE (DECL_RTL (exp)) == MEM
5917 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5918 {
5919 enum memory_use_mode memory_usage;
5920 memory_usage = get_memory_usage_from_modifier (modifier);
5921
5922 if (memory_usage != MEMORY_USE_DONT)
5923 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5924 XEXP (DECL_RTL (exp), 0), Pmode,
5925 GEN_INT (int_size_in_bytes (type)),
5926 TYPE_MODE (sizetype),
5927 GEN_INT (memory_usage),
5928 TYPE_MODE (integer_type_node));
5929 }
5930
5931 /* ... fall through ... */
5932
5933 case FUNCTION_DECL:
5934 case RESULT_DECL:
5935 if (DECL_RTL (exp) == 0)
5936 abort ();
5937
5938 /* Ensure variable marked as used even if it doesn't go through
5939 a parser. If it hasn't be used yet, write out an external
5940 definition. */
5941 if (! TREE_USED (exp))
5942 {
5943 assemble_external (exp);
5944 TREE_USED (exp) = 1;
5945 }
5946
5947 /* Show we haven't gotten RTL for this yet. */
5948 temp = 0;
5949
5950 /* Handle variables inherited from containing functions. */
5951 context = decl_function_context (exp);
5952
5953 /* We treat inline_function_decl as an alias for the current function
5954 because that is the inline function whose vars, types, etc.
5955 are being merged into the current function.
5956 See expand_inline_function. */
5957
5958 if (context != 0 && context != current_function_decl
5959 && context != inline_function_decl
5960 /* If var is static, we don't need a static chain to access it. */
5961 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5962 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5963 {
5964 rtx addr;
5965
5966 /* Mark as non-local and addressable. */
5967 DECL_NONLOCAL (exp) = 1;
5968 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5969 abort ();
5970 mark_addressable (exp);
5971 if (GET_CODE (DECL_RTL (exp)) != MEM)
5972 abort ();
5973 addr = XEXP (DECL_RTL (exp), 0);
5974 if (GET_CODE (addr) == MEM)
5975 addr = gen_rtx_MEM (Pmode,
5976 fix_lexical_addr (XEXP (addr, 0), exp));
5977 else
5978 addr = fix_lexical_addr (addr, exp);
5979 temp = change_address (DECL_RTL (exp), mode, addr);
5980 }
5981
5982 /* This is the case of an array whose size is to be determined
5983 from its initializer, while the initializer is still being parsed.
5984 See expand_decl. */
5985
5986 else if (GET_CODE (DECL_RTL (exp)) == MEM
5987 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5988 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5989 XEXP (DECL_RTL (exp), 0));
5990
5991 /* If DECL_RTL is memory, we are in the normal case and either
5992 the address is not valid or it is not a register and -fforce-addr
5993 is specified, get the address into a register. */
5994
5995 else if (GET_CODE (DECL_RTL (exp)) == MEM
5996 && modifier != EXPAND_CONST_ADDRESS
5997 && modifier != EXPAND_SUM
5998 && modifier != EXPAND_INITIALIZER
5999 && (! memory_address_p (DECL_MODE (exp),
6000 XEXP (DECL_RTL (exp), 0))
6001 || (flag_force_addr
6002 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6003 temp = change_address (DECL_RTL (exp), VOIDmode,
6004 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6005
6006 /* If we got something, return it. But first, set the alignment
6007 the address is a register. */
6008 if (temp != 0)
6009 {
6010 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6011 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6012
6013 return temp;
6014 }
6015
6016 /* If the mode of DECL_RTL does not match that of the decl, it
6017 must be a promoted value. We return a SUBREG of the wanted mode,
6018 but mark it so that we know that it was already extended. */
6019
6020 if (GET_CODE (DECL_RTL (exp)) == REG
6021 && GET_MODE (DECL_RTL (exp)) != mode)
6022 {
6023 /* Get the signedness used for this variable. Ensure we get the
6024 same mode we got when the variable was declared. */
6025 if (GET_MODE (DECL_RTL (exp))
6026 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6027 abort ();
6028
6029 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6030 SUBREG_PROMOTED_VAR_P (temp) = 1;
6031 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6032 return temp;
6033 }
6034
6035 return DECL_RTL (exp);
6036
6037 case INTEGER_CST:
6038 return immed_double_const (TREE_INT_CST_LOW (exp),
6039 TREE_INT_CST_HIGH (exp), mode);
6040
6041 case CONST_DECL:
6042 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6043 EXPAND_MEMORY_USE_BAD);
6044
6045 case REAL_CST:
6046 /* If optimized, generate immediate CONST_DOUBLE
6047 which will be turned into memory by reload if necessary.
6048
6049 We used to force a register so that loop.c could see it. But
6050 this does not allow gen_* patterns to perform optimizations with
6051 the constants. It also produces two insns in cases like "x = 1.0;".
6052 On most machines, floating-point constants are not permitted in
6053 many insns, so we'd end up copying it to a register in any case.
6054
6055 Now, we do the copying in expand_binop, if appropriate. */
6056 return immed_real_const (exp);
6057
6058 case COMPLEX_CST:
6059 case STRING_CST:
6060 if (! TREE_CST_RTL (exp))
6061 output_constant_def (exp);
6062
6063 /* TREE_CST_RTL probably contains a constant address.
6064 On RISC machines where a constant address isn't valid,
6065 make some insns to get that address into a register. */
6066 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6067 && modifier != EXPAND_CONST_ADDRESS
6068 && modifier != EXPAND_INITIALIZER
6069 && modifier != EXPAND_SUM
6070 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6071 || (flag_force_addr
6072 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6073 return change_address (TREE_CST_RTL (exp), VOIDmode,
6074 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6075 return TREE_CST_RTL (exp);
6076
6077 case EXPR_WITH_FILE_LOCATION:
6078 {
6079 rtx to_return;
6080 const char *saved_input_filename = input_filename;
6081 int saved_lineno = lineno;
6082 input_filename = EXPR_WFL_FILENAME (exp);
6083 lineno = EXPR_WFL_LINENO (exp);
6084 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6085 emit_line_note (input_filename, lineno);
6086 /* Possibly avoid switching back and force here */
6087 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6088 input_filename = saved_input_filename;
6089 lineno = saved_lineno;
6090 return to_return;
6091 }
6092
6093 case SAVE_EXPR:
6094 context = decl_function_context (exp);
6095
6096 /* If this SAVE_EXPR was at global context, assume we are an
6097 initialization function and move it into our context. */
6098 if (context == 0)
6099 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6100
6101 /* We treat inline_function_decl as an alias for the current function
6102 because that is the inline function whose vars, types, etc.
6103 are being merged into the current function.
6104 See expand_inline_function. */
6105 if (context == current_function_decl || context == inline_function_decl)
6106 context = 0;
6107
6108 /* If this is non-local, handle it. */
6109 if (context)
6110 {
6111 /* The following call just exists to abort if the context is
6112 not of a containing function. */
6113 find_function_data (context);
6114
6115 temp = SAVE_EXPR_RTL (exp);
6116 if (temp && GET_CODE (temp) == REG)
6117 {
6118 put_var_into_stack (exp);
6119 temp = SAVE_EXPR_RTL (exp);
6120 }
6121 if (temp == 0 || GET_CODE (temp) != MEM)
6122 abort ();
6123 return change_address (temp, mode,
6124 fix_lexical_addr (XEXP (temp, 0), exp));
6125 }
6126 if (SAVE_EXPR_RTL (exp) == 0)
6127 {
6128 if (mode == VOIDmode)
6129 temp = const0_rtx;
6130 else
6131 temp = assign_temp (type, 3, 0, 0);
6132
6133 SAVE_EXPR_RTL (exp) = temp;
6134 if (!optimize && GET_CODE (temp) == REG)
6135 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6136 save_expr_regs);
6137
6138 /* If the mode of TEMP does not match that of the expression, it
6139 must be a promoted value. We pass store_expr a SUBREG of the
6140 wanted mode but mark it so that we know that it was already
6141 extended. Note that `unsignedp' was modified above in
6142 this case. */
6143
6144 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6145 {
6146 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6147 SUBREG_PROMOTED_VAR_P (temp) = 1;
6148 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6149 }
6150
6151 if (temp == const0_rtx)
6152 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6153 EXPAND_MEMORY_USE_BAD);
6154 else
6155 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6156
6157 TREE_USED (exp) = 1;
6158 }
6159
6160 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6161 must be a promoted value. We return a SUBREG of the wanted mode,
6162 but mark it so that we know that it was already extended. */
6163
6164 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6165 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6166 {
6167 /* Compute the signedness and make the proper SUBREG. */
6168 promote_mode (type, mode, &unsignedp, 0);
6169 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6170 SUBREG_PROMOTED_VAR_P (temp) = 1;
6171 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6172 return temp;
6173 }
6174
6175 return SAVE_EXPR_RTL (exp);
6176
6177 case UNSAVE_EXPR:
6178 {
6179 rtx temp;
6180 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6181 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6182 return temp;
6183 }
6184
6185 case PLACEHOLDER_EXPR:
6186 {
6187 tree placeholder_expr;
6188
6189 /* If there is an object on the head of the placeholder list,
6190 see if some object in it of type TYPE or a pointer to it. For
6191 further information, see tree.def. */
6192 for (placeholder_expr = placeholder_list;
6193 placeholder_expr != 0;
6194 placeholder_expr = TREE_CHAIN (placeholder_expr))
6195 {
6196 tree need_type = TYPE_MAIN_VARIANT (type);
6197 tree object = 0;
6198 tree old_list = placeholder_list;
6199 tree elt;
6200
6201 /* Find the outermost reference that is of the type we want.
6202 If none, see if any object has a type that is a pointer to
6203 the type we want. */
6204 for (elt = TREE_PURPOSE (placeholder_expr);
6205 elt != 0 && object == 0;
6206 elt
6207 = ((TREE_CODE (elt) == COMPOUND_EXPR
6208 || TREE_CODE (elt) == COND_EXPR)
6209 ? TREE_OPERAND (elt, 1)
6210 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6211 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6212 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6213 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6214 ? TREE_OPERAND (elt, 0) : 0))
6215 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6216 object = elt;
6217
6218 for (elt = TREE_PURPOSE (placeholder_expr);
6219 elt != 0 && object == 0;
6220 elt
6221 = ((TREE_CODE (elt) == COMPOUND_EXPR
6222 || TREE_CODE (elt) == COND_EXPR)
6223 ? TREE_OPERAND (elt, 1)
6224 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6225 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6226 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6227 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6228 ? TREE_OPERAND (elt, 0) : 0))
6229 if (POINTER_TYPE_P (TREE_TYPE (elt))
6230 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6231 == need_type))
6232 object = build1 (INDIRECT_REF, need_type, elt);
6233
6234 if (object != 0)
6235 {
6236 /* Expand this object skipping the list entries before
6237 it was found in case it is also a PLACEHOLDER_EXPR.
6238 In that case, we want to translate it using subsequent
6239 entries. */
6240 placeholder_list = TREE_CHAIN (placeholder_expr);
6241 temp = expand_expr (object, original_target, tmode,
6242 ro_modifier);
6243 placeholder_list = old_list;
6244 return temp;
6245 }
6246 }
6247 }
6248
6249 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6250 abort ();
6251
6252 case WITH_RECORD_EXPR:
6253 /* Put the object on the placeholder list, expand our first operand,
6254 and pop the list. */
6255 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6256 placeholder_list);
6257 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6258 tmode, ro_modifier);
6259 placeholder_list = TREE_CHAIN (placeholder_list);
6260 return target;
6261
6262 case GOTO_EXPR:
6263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6264 expand_goto (TREE_OPERAND (exp, 0));
6265 else
6266 expand_computed_goto (TREE_OPERAND (exp, 0));
6267 return const0_rtx;
6268
6269 case EXIT_EXPR:
6270 expand_exit_loop_if_false (NULL_PTR,
6271 invert_truthvalue (TREE_OPERAND (exp, 0)));
6272 return const0_rtx;
6273
6274 case LABELED_BLOCK_EXPR:
6275 if (LABELED_BLOCK_BODY (exp))
6276 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6277 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6278 return const0_rtx;
6279
6280 case EXIT_BLOCK_EXPR:
6281 if (EXIT_BLOCK_RETURN (exp))
6282 sorry ("returned value in block_exit_expr");
6283 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6284 return const0_rtx;
6285
6286 case LOOP_EXPR:
6287 push_temp_slots ();
6288 expand_start_loop (1);
6289 expand_expr_stmt (TREE_OPERAND (exp, 0));
6290 expand_end_loop ();
6291 pop_temp_slots ();
6292
6293 return const0_rtx;
6294
6295 case BIND_EXPR:
6296 {
6297 tree vars = TREE_OPERAND (exp, 0);
6298 int vars_need_expansion = 0;
6299
6300 /* Need to open a binding contour here because
6301 if there are any cleanups they must be contained here. */
6302 expand_start_bindings (2);
6303
6304 /* Mark the corresponding BLOCK for output in its proper place. */
6305 if (TREE_OPERAND (exp, 2) != 0
6306 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6307 insert_block (TREE_OPERAND (exp, 2));
6308
6309 /* If VARS have not yet been expanded, expand them now. */
6310 while (vars)
6311 {
6312 if (DECL_RTL (vars) == 0)
6313 {
6314 vars_need_expansion = 1;
6315 expand_decl (vars);
6316 }
6317 expand_decl_init (vars);
6318 vars = TREE_CHAIN (vars);
6319 }
6320
6321 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6322
6323 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6324
6325 return temp;
6326 }
6327
6328 case RTL_EXPR:
6329 if (RTL_EXPR_SEQUENCE (exp))
6330 {
6331 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6332 abort ();
6333 emit_insns (RTL_EXPR_SEQUENCE (exp));
6334 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6335 }
6336 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6337 free_temps_for_rtl_expr (exp);
6338 return RTL_EXPR_RTL (exp);
6339
6340 case CONSTRUCTOR:
6341 /* If we don't need the result, just ensure we evaluate any
6342 subexpressions. */
6343 if (ignore)
6344 {
6345 tree elt;
6346 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6347 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6348 EXPAND_MEMORY_USE_BAD);
6349 return const0_rtx;
6350 }
6351
6352 /* All elts simple constants => refer to a constant in memory. But
6353 if this is a non-BLKmode mode, let it store a field at a time
6354 since that should make a CONST_INT or CONST_DOUBLE when we
6355 fold. Likewise, if we have a target we can use, it is best to
6356 store directly into the target unless the type is large enough
6357 that memcpy will be used. If we are making an initializer and
6358 all operands are constant, put it in memory as well. */
6359 else if ((TREE_STATIC (exp)
6360 && ((mode == BLKmode
6361 && ! (target != 0 && safe_from_p (target, exp, 1)))
6362 || TREE_ADDRESSABLE (exp)
6363 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6364 && (! MOVE_BY_PIECES_P
6365 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6366 TYPE_ALIGN (type)))
6367 && ! mostly_zeros_p (exp))))
6368 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6369 {
6370 rtx constructor = output_constant_def (exp);
6371
6372 if (modifier != EXPAND_CONST_ADDRESS
6373 && modifier != EXPAND_INITIALIZER
6374 && modifier != EXPAND_SUM
6375 && (! memory_address_p (GET_MODE (constructor),
6376 XEXP (constructor, 0))
6377 || (flag_force_addr
6378 && GET_CODE (XEXP (constructor, 0)) != REG)))
6379 constructor = change_address (constructor, VOIDmode,
6380 XEXP (constructor, 0));
6381 return constructor;
6382 }
6383
6384 else
6385 {
6386 /* Handle calls that pass values in multiple non-contiguous
6387 locations. The Irix 6 ABI has examples of this. */
6388 if (target == 0 || ! safe_from_p (target, exp, 1)
6389 || GET_CODE (target) == PARALLEL)
6390 {
6391 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6393 else
6394 target = assign_temp (type, 0, 1, 1);
6395 }
6396
6397 if (TREE_READONLY (exp))
6398 {
6399 if (GET_CODE (target) == MEM)
6400 target = copy_rtx (target);
6401
6402 RTX_UNCHANGING_P (target) = 1;
6403 }
6404
6405 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6406 int_size_in_bytes (TREE_TYPE (exp)));
6407 return target;
6408 }
6409
6410 case INDIRECT_REF:
6411 {
6412 tree exp1 = TREE_OPERAND (exp, 0);
6413 tree exp2;
6414 tree index;
6415 tree string = string_constant (exp1, &index);
6416
6417 /* Try to optimize reads from const strings. */
6418 if (string
6419 && TREE_CODE (string) == STRING_CST
6420 && TREE_CODE (index) == INTEGER_CST
6421 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6422 && GET_MODE_CLASS (mode) == MODE_INT
6423 && GET_MODE_SIZE (mode) == 1
6424 && modifier != EXPAND_MEMORY_USE_WO)
6425 return
6426 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6427
6428 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6429 op0 = memory_address (mode, op0);
6430
6431 if (cfun && current_function_check_memory_usage
6432 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6433 {
6434 enum memory_use_mode memory_usage;
6435 memory_usage = get_memory_usage_from_modifier (modifier);
6436
6437 if (memory_usage != MEMORY_USE_DONT)
6438 {
6439 in_check_memory_usage = 1;
6440 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6441 op0, Pmode,
6442 GEN_INT (int_size_in_bytes (type)),
6443 TYPE_MODE (sizetype),
6444 GEN_INT (memory_usage),
6445 TYPE_MODE (integer_type_node));
6446 in_check_memory_usage = 0;
6447 }
6448 }
6449
6450 temp = gen_rtx_MEM (mode, op0);
6451 /* If address was computed by addition,
6452 mark this as an element of an aggregate. */
6453 if (TREE_CODE (exp1) == PLUS_EXPR
6454 || (TREE_CODE (exp1) == SAVE_EXPR
6455 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6456 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6457 || (TREE_CODE (exp1) == ADDR_EXPR
6458 && (exp2 = TREE_OPERAND (exp1, 0))
6459 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6460 MEM_SET_IN_STRUCT_P (temp, 1);
6461
6462 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6463 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6464
6465 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6466 here, because, in C and C++, the fact that a location is accessed
6467 through a pointer to const does not mean that the value there can
6468 never change. Languages where it can never change should
6469 also set TREE_STATIC. */
6470 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6471
6472 /* If we are writing to this object and its type is a record with
6473 readonly fields, we must mark it as readonly so it will
6474 conflict with readonly references to those fields. */
6475 if (modifier == EXPAND_MEMORY_USE_WO
6476 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6477 RTX_UNCHANGING_P (temp) = 1;
6478
6479 return temp;
6480 }
6481
6482 case ARRAY_REF:
6483 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6484 abort ();
6485
6486 {
6487 tree array = TREE_OPERAND (exp, 0);
6488 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6489 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6490 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6491 HOST_WIDE_INT i;
6492
6493 /* Optimize the special-case of a zero lower bound.
6494
6495 We convert the low_bound to sizetype to avoid some problems
6496 with constant folding. (E.g. suppose the lower bound is 1,
6497 and its mode is QI. Without the conversion, (ARRAY
6498 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6499 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6500
6501 if (! integer_zerop (low_bound))
6502 index = size_diffop (index, convert (sizetype, low_bound));
6503
6504 /* Fold an expression like: "foo"[2].
6505 This is not done in fold so it won't happen inside &.
6506 Don't fold if this is for wide characters since it's too
6507 difficult to do correctly and this is a very rare case. */
6508
6509 if (TREE_CODE (array) == STRING_CST
6510 && TREE_CODE (index) == INTEGER_CST
6511 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6512 && GET_MODE_CLASS (mode) == MODE_INT
6513 && GET_MODE_SIZE (mode) == 1)
6514 return
6515 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6516
6517 /* If this is a constant index into a constant array,
6518 just get the value from the array. Handle both the cases when
6519 we have an explicit constructor and when our operand is a variable
6520 that was declared const. */
6521
6522 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6523 && TREE_CODE (index) == INTEGER_CST
6524 && 0 > compare_tree_int (index,
6525 list_length (CONSTRUCTOR_ELTS
6526 (TREE_OPERAND (exp, 0)))))
6527 {
6528 tree elem;
6529
6530 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6531 i = TREE_INT_CST_LOW (index);
6532 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6533 ;
6534
6535 if (elem)
6536 return expand_expr (fold (TREE_VALUE (elem)), target,
6537 tmode, ro_modifier);
6538 }
6539
6540 else if (optimize >= 1
6541 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6542 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6543 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6544 {
6545 if (TREE_CODE (index) == INTEGER_CST)
6546 {
6547 tree init = DECL_INITIAL (array);
6548
6549 if (TREE_CODE (init) == CONSTRUCTOR)
6550 {
6551 tree elem;
6552
6553 for (elem = CONSTRUCTOR_ELTS (init);
6554 (elem
6555 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6556 elem = TREE_CHAIN (elem))
6557 ;
6558
6559 if (elem)
6560 return expand_expr (fold (TREE_VALUE (elem)), target,
6561 tmode, ro_modifier);
6562 }
6563 else if (TREE_CODE (init) == STRING_CST
6564 && 0 > compare_tree_int (index,
6565 TREE_STRING_LENGTH (init)))
6566 return (GEN_INT
6567 (TREE_STRING_POINTER
6568 (init)[TREE_INT_CST_LOW (index)]));
6569 }
6570 }
6571 }
6572
6573 /* ... fall through ... */
6574
6575 case COMPONENT_REF:
6576 case BIT_FIELD_REF:
6577 /* If the operand is a CONSTRUCTOR, we can just extract the
6578 appropriate field if it is present. Don't do this if we have
6579 already written the data since we want to refer to that copy
6580 and varasm.c assumes that's what we'll do. */
6581 if (code != ARRAY_REF
6582 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6583 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6584 {
6585 tree elt;
6586
6587 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6588 elt = TREE_CHAIN (elt))
6589 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6590 /* We can normally use the value of the field in the
6591 CONSTRUCTOR. However, if this is a bitfield in
6592 an integral mode that we can fit in a HOST_WIDE_INT,
6593 we must mask only the number of bits in the bitfield,
6594 since this is done implicitly by the constructor. If
6595 the bitfield does not meet either of those conditions,
6596 we can't do this optimization. */
6597 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6598 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6599 == MODE_INT)
6600 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6601 <= HOST_BITS_PER_WIDE_INT))))
6602 {
6603 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6604 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6605 {
6606 HOST_WIDE_INT bitsize
6607 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6608
6609 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6610 {
6611 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6612 op0 = expand_and (op0, op1, target);
6613 }
6614 else
6615 {
6616 enum machine_mode imode
6617 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6618 tree count
6619 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6620 0);
6621
6622 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6623 target, 0);
6624 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6625 target, 0);
6626 }
6627 }
6628
6629 return op0;
6630 }
6631 }
6632
6633 {
6634 enum machine_mode mode1;
6635 HOST_WIDE_INT bitsize, bitpos;
6636 tree offset;
6637 int volatilep = 0;
6638 unsigned int alignment;
6639 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6640 &mode1, &unsignedp, &volatilep,
6641 &alignment);
6642
6643 /* If we got back the original object, something is wrong. Perhaps
6644 we are evaluating an expression too early. In any event, don't
6645 infinitely recurse. */
6646 if (tem == exp)
6647 abort ();
6648
6649 /* If TEM's type is a union of variable size, pass TARGET to the inner
6650 computation, since it will need a temporary and TARGET is known
6651 to have to do. This occurs in unchecked conversion in Ada. */
6652
6653 op0 = expand_expr (tem,
6654 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6655 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6656 != INTEGER_CST)
6657 ? target : NULL_RTX),
6658 VOIDmode,
6659 (modifier == EXPAND_INITIALIZER
6660 || modifier == EXPAND_CONST_ADDRESS)
6661 ? modifier : EXPAND_NORMAL);
6662
6663 /* If this is a constant, put it into a register if it is a
6664 legitimate constant and OFFSET is 0 and memory if it isn't. */
6665 if (CONSTANT_P (op0))
6666 {
6667 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6668 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6669 && offset == 0)
6670 op0 = force_reg (mode, op0);
6671 else
6672 op0 = validize_mem (force_const_mem (mode, op0));
6673 }
6674
6675 if (offset != 0)
6676 {
6677 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6678
6679 /* If this object is in memory, put it into a register.
6680 This case can't occur in C, but can in Ada if we have
6681 unchecked conversion of an expression from a scalar type to
6682 an array or record type. */
6683 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6684 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6685 {
6686 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6687
6688 mark_temp_addr_taken (memloc);
6689 emit_move_insn (memloc, op0);
6690 op0 = memloc;
6691 }
6692
6693 if (GET_CODE (op0) != MEM)
6694 abort ();
6695
6696 if (GET_MODE (offset_rtx) != ptr_mode)
6697 {
6698 #ifdef POINTERS_EXTEND_UNSIGNED
6699 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6700 #else
6701 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6702 #endif
6703 }
6704
6705 /* A constant address in OP0 can have VOIDmode, we must not try
6706 to call force_reg for that case. Avoid that case. */
6707 if (GET_CODE (op0) == MEM
6708 && GET_MODE (op0) == BLKmode
6709 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6710 && bitsize != 0
6711 && (bitpos % bitsize) == 0
6712 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6713 && alignment == GET_MODE_ALIGNMENT (mode1))
6714 {
6715 rtx temp = change_address (op0, mode1,
6716 plus_constant (XEXP (op0, 0),
6717 (bitpos /
6718 BITS_PER_UNIT)));
6719 if (GET_CODE (XEXP (temp, 0)) == REG)
6720 op0 = temp;
6721 else
6722 op0 = change_address (op0, mode1,
6723 force_reg (GET_MODE (XEXP (temp, 0)),
6724 XEXP (temp, 0)));
6725 bitpos = 0;
6726 }
6727
6728
6729 op0 = change_address (op0, VOIDmode,
6730 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6731 force_reg (ptr_mode,
6732 offset_rtx)));
6733 }
6734
6735 /* Don't forget about volatility even if this is a bitfield. */
6736 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6737 {
6738 op0 = copy_rtx (op0);
6739 MEM_VOLATILE_P (op0) = 1;
6740 }
6741
6742 /* Check the access. */
6743 if (cfun != 0 && current_function_check_memory_usage
6744 && GET_CODE (op0) == MEM)
6745 {
6746 enum memory_use_mode memory_usage;
6747 memory_usage = get_memory_usage_from_modifier (modifier);
6748
6749 if (memory_usage != MEMORY_USE_DONT)
6750 {
6751 rtx to;
6752 int size;
6753
6754 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6755 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6756
6757 /* Check the access right of the pointer. */
6758 if (size > BITS_PER_UNIT)
6759 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6760 to, Pmode,
6761 GEN_INT (size / BITS_PER_UNIT),
6762 TYPE_MODE (sizetype),
6763 GEN_INT (memory_usage),
6764 TYPE_MODE (integer_type_node));
6765 }
6766 }
6767
6768 /* In cases where an aligned union has an unaligned object
6769 as a field, we might be extracting a BLKmode value from
6770 an integer-mode (e.g., SImode) object. Handle this case
6771 by doing the extract into an object as wide as the field
6772 (which we know to be the width of a basic mode), then
6773 storing into memory, and changing the mode to BLKmode.
6774 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6775 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6776 if (mode1 == VOIDmode
6777 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6778 || (modifier != EXPAND_CONST_ADDRESS
6779 && modifier != EXPAND_INITIALIZER
6780 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6781 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6782 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6783 /* If the field isn't aligned enough to fetch as a memref,
6784 fetch it as a bit field. */
6785 || (mode1 != BLKmode
6786 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6787 && ((TYPE_ALIGN (TREE_TYPE (tem))
6788 < GET_MODE_ALIGNMENT (mode))
6789 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6790 /* If the type and the field are a constant size and the
6791 size of the type isn't the same size as the bitfield,
6792 we must use bitfield operations. */
6793 || ((bitsize >= 0
6794 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6795 == INTEGER_CST)
6796 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6797 bitsize)))))
6798 || (modifier != EXPAND_CONST_ADDRESS
6799 && modifier != EXPAND_INITIALIZER
6800 && mode == BLKmode
6801 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6802 && (TYPE_ALIGN (type) > alignment
6803 || bitpos % TYPE_ALIGN (type) != 0)))
6804 {
6805 enum machine_mode ext_mode = mode;
6806
6807 if (ext_mode == BLKmode
6808 && ! (target != 0 && GET_CODE (op0) == MEM
6809 && GET_CODE (target) == MEM
6810 && bitpos % BITS_PER_UNIT == 0))
6811 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6812
6813 if (ext_mode == BLKmode)
6814 {
6815 /* In this case, BITPOS must start at a byte boundary and
6816 TARGET, if specified, must be a MEM. */
6817 if (GET_CODE (op0) != MEM
6818 || (target != 0 && GET_CODE (target) != MEM)
6819 || bitpos % BITS_PER_UNIT != 0)
6820 abort ();
6821
6822 op0 = change_address (op0, VOIDmode,
6823 plus_constant (XEXP (op0, 0),
6824 bitpos / BITS_PER_UNIT));
6825 if (target == 0)
6826 target = assign_temp (type, 0, 1, 1);
6827
6828 emit_block_move (target, op0,
6829 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6830 / BITS_PER_UNIT),
6831 BITS_PER_UNIT);
6832
6833 return target;
6834 }
6835
6836 op0 = validize_mem (op0);
6837
6838 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6839 mark_reg_pointer (XEXP (op0, 0), alignment);
6840
6841 op0 = extract_bit_field (op0, bitsize, bitpos,
6842 unsignedp, target, ext_mode, ext_mode,
6843 alignment,
6844 int_size_in_bytes (TREE_TYPE (tem)));
6845
6846 /* If the result is a record type and BITSIZE is narrower than
6847 the mode of OP0, an integral mode, and this is a big endian
6848 machine, we must put the field into the high-order bits. */
6849 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6850 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6851 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6852 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6853 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6854 - bitsize),
6855 op0, 1);
6856
6857 if (mode == BLKmode)
6858 {
6859 rtx new = assign_stack_temp (ext_mode,
6860 bitsize / BITS_PER_UNIT, 0);
6861
6862 emit_move_insn (new, op0);
6863 op0 = copy_rtx (new);
6864 PUT_MODE (op0, BLKmode);
6865 MEM_SET_IN_STRUCT_P (op0, 1);
6866 }
6867
6868 return op0;
6869 }
6870
6871 /* If the result is BLKmode, use that to access the object
6872 now as well. */
6873 if (mode == BLKmode)
6874 mode1 = BLKmode;
6875
6876 /* Get a reference to just this component. */
6877 if (modifier == EXPAND_CONST_ADDRESS
6878 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6879 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6880 (bitpos / BITS_PER_UNIT)));
6881 else
6882 op0 = change_address (op0, mode1,
6883 plus_constant (XEXP (op0, 0),
6884 (bitpos / BITS_PER_UNIT)));
6885
6886 if (GET_CODE (op0) == MEM)
6887 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6888
6889 if (GET_CODE (XEXP (op0, 0)) == REG)
6890 mark_reg_pointer (XEXP (op0, 0), alignment);
6891
6892 MEM_SET_IN_STRUCT_P (op0, 1);
6893 MEM_VOLATILE_P (op0) |= volatilep;
6894 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6895 || modifier == EXPAND_CONST_ADDRESS
6896 || modifier == EXPAND_INITIALIZER)
6897 return op0;
6898 else if (target == 0)
6899 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6900
6901 convert_move (target, op0, unsignedp);
6902 return target;
6903 }
6904
6905 /* Intended for a reference to a buffer of a file-object in Pascal.
6906 But it's not certain that a special tree code will really be
6907 necessary for these. INDIRECT_REF might work for them. */
6908 case BUFFER_REF:
6909 abort ();
6910
6911 case IN_EXPR:
6912 {
6913 /* Pascal set IN expression.
6914
6915 Algorithm:
6916 rlo = set_low - (set_low%bits_per_word);
6917 the_word = set [ (index - rlo)/bits_per_word ];
6918 bit_index = index % bits_per_word;
6919 bitmask = 1 << bit_index;
6920 return !!(the_word & bitmask); */
6921
6922 tree set = TREE_OPERAND (exp, 0);
6923 tree index = TREE_OPERAND (exp, 1);
6924 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6925 tree set_type = TREE_TYPE (set);
6926 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6927 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6928 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6929 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6930 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6931 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6932 rtx setaddr = XEXP (setval, 0);
6933 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6934 rtx rlow;
6935 rtx diff, quo, rem, addr, bit, result;
6936
6937 preexpand_calls (exp);
6938
6939 /* If domain is empty, answer is no. Likewise if index is constant
6940 and out of bounds. */
6941 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6942 && TREE_CODE (set_low_bound) == INTEGER_CST
6943 && tree_int_cst_lt (set_high_bound, set_low_bound))
6944 || (TREE_CODE (index) == INTEGER_CST
6945 && TREE_CODE (set_low_bound) == INTEGER_CST
6946 && tree_int_cst_lt (index, set_low_bound))
6947 || (TREE_CODE (set_high_bound) == INTEGER_CST
6948 && TREE_CODE (index) == INTEGER_CST
6949 && tree_int_cst_lt (set_high_bound, index))))
6950 return const0_rtx;
6951
6952 if (target == 0)
6953 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6954
6955 /* If we get here, we have to generate the code for both cases
6956 (in range and out of range). */
6957
6958 op0 = gen_label_rtx ();
6959 op1 = gen_label_rtx ();
6960
6961 if (! (GET_CODE (index_val) == CONST_INT
6962 && GET_CODE (lo_r) == CONST_INT))
6963 {
6964 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6965 GET_MODE (index_val), iunsignedp, 0, op1);
6966 }
6967
6968 if (! (GET_CODE (index_val) == CONST_INT
6969 && GET_CODE (hi_r) == CONST_INT))
6970 {
6971 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6972 GET_MODE (index_val), iunsignedp, 0, op1);
6973 }
6974
6975 /* Calculate the element number of bit zero in the first word
6976 of the set. */
6977 if (GET_CODE (lo_r) == CONST_INT)
6978 rlow = GEN_INT (INTVAL (lo_r)
6979 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6980 else
6981 rlow = expand_binop (index_mode, and_optab, lo_r,
6982 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6983 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6984
6985 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6986 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6987
6988 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6989 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6990 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6991 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6992
6993 addr = memory_address (byte_mode,
6994 expand_binop (index_mode, add_optab, diff,
6995 setaddr, NULL_RTX, iunsignedp,
6996 OPTAB_LIB_WIDEN));
6997
6998 /* Extract the bit we want to examine */
6999 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7000 gen_rtx_MEM (byte_mode, addr),
7001 make_tree (TREE_TYPE (index), rem),
7002 NULL_RTX, 1);
7003 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7004 GET_MODE (target) == byte_mode ? target : 0,
7005 1, OPTAB_LIB_WIDEN);
7006
7007 if (result != target)
7008 convert_move (target, result, 1);
7009
7010 /* Output the code to handle the out-of-range case. */
7011 emit_jump (op0);
7012 emit_label (op1);
7013 emit_move_insn (target, const0_rtx);
7014 emit_label (op0);
7015 return target;
7016 }
7017
7018 case WITH_CLEANUP_EXPR:
7019 if (RTL_EXPR_RTL (exp) == 0)
7020 {
7021 RTL_EXPR_RTL (exp)
7022 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7023 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7024
7025 /* That's it for this cleanup. */
7026 TREE_OPERAND (exp, 2) = 0;
7027 }
7028 return RTL_EXPR_RTL (exp);
7029
7030 case CLEANUP_POINT_EXPR:
7031 {
7032 /* Start a new binding layer that will keep track of all cleanup
7033 actions to be performed. */
7034 expand_start_bindings (2);
7035
7036 target_temp_slot_level = temp_slot_level;
7037
7038 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7039 /* If we're going to use this value, load it up now. */
7040 if (! ignore)
7041 op0 = force_not_mem (op0);
7042 preserve_temp_slots (op0);
7043 expand_end_bindings (NULL_TREE, 0, 0);
7044 }
7045 return op0;
7046
7047 case CALL_EXPR:
7048 /* Check for a built-in function. */
7049 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7050 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7051 == FUNCTION_DECL)
7052 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7053 return expand_builtin (exp, target, subtarget, tmode, ignore);
7054
7055 /* If this call was expanded already by preexpand_calls,
7056 just return the result we got. */
7057 if (CALL_EXPR_RTL (exp) != 0)
7058 return CALL_EXPR_RTL (exp);
7059
7060 return expand_call (exp, target, ignore);
7061
7062 case NON_LVALUE_EXPR:
7063 case NOP_EXPR:
7064 case CONVERT_EXPR:
7065 case REFERENCE_EXPR:
7066 if (TREE_CODE (type) == UNION_TYPE)
7067 {
7068 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7069
7070 /* If both input and output are BLKmode, this conversion
7071 isn't actually doing anything unless we need to make the
7072 alignment stricter. */
7073 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7074 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7075 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7076 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7077 modifier);
7078
7079 if (target == 0)
7080 {
7081 if (mode != BLKmode)
7082 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7083 else
7084 target = assign_temp (type, 0, 1, 1);
7085 }
7086
7087 if (GET_CODE (target) == MEM)
7088 /* Store data into beginning of memory target. */
7089 store_expr (TREE_OPERAND (exp, 0),
7090 change_address (target, TYPE_MODE (valtype), 0), 0);
7091
7092 else if (GET_CODE (target) == REG)
7093 /* Store this field into a union of the proper type. */
7094 store_field (target,
7095 MIN ((int_size_in_bytes (TREE_TYPE
7096 (TREE_OPERAND (exp, 0)))
7097 * BITS_PER_UNIT),
7098 GET_MODE_BITSIZE (mode)),
7099 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7100 VOIDmode, 0, BITS_PER_UNIT,
7101 int_size_in_bytes (type), 0);
7102 else
7103 abort ();
7104
7105 /* Return the entire union. */
7106 return target;
7107 }
7108
7109 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7110 {
7111 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7112 ro_modifier);
7113
7114 /* If the signedness of the conversion differs and OP0 is
7115 a promoted SUBREG, clear that indication since we now
7116 have to do the proper extension. */
7117 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7118 && GET_CODE (op0) == SUBREG)
7119 SUBREG_PROMOTED_VAR_P (op0) = 0;
7120
7121 return op0;
7122 }
7123
7124 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7125 if (GET_MODE (op0) == mode)
7126 return op0;
7127
7128 /* If OP0 is a constant, just convert it into the proper mode. */
7129 if (CONSTANT_P (op0))
7130 return
7131 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7132 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7133
7134 if (modifier == EXPAND_INITIALIZER)
7135 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7136
7137 if (target == 0)
7138 return
7139 convert_to_mode (mode, op0,
7140 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7141 else
7142 convert_move (target, op0,
7143 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7144 return target;
7145
7146 case PLUS_EXPR:
7147 /* We come here from MINUS_EXPR when the second operand is a
7148 constant. */
7149 plus_expr:
7150 this_optab = add_optab;
7151
7152 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7153 something else, make sure we add the register to the constant and
7154 then to the other thing. This case can occur during strength
7155 reduction and doing it this way will produce better code if the
7156 frame pointer or argument pointer is eliminated.
7157
7158 fold-const.c will ensure that the constant is always in the inner
7159 PLUS_EXPR, so the only case we need to do anything about is if
7160 sp, ap, or fp is our second argument, in which case we must swap
7161 the innermost first argument and our second argument. */
7162
7163 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7164 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7165 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7166 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7167 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7168 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7169 {
7170 tree t = TREE_OPERAND (exp, 1);
7171
7172 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7173 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7174 }
7175
7176 /* If the result is to be ptr_mode and we are adding an integer to
7177 something, we might be forming a constant. So try to use
7178 plus_constant. If it produces a sum and we can't accept it,
7179 use force_operand. This allows P = &ARR[const] to generate
7180 efficient code on machines where a SYMBOL_REF is not a valid
7181 address.
7182
7183 If this is an EXPAND_SUM call, always return the sum. */
7184 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7185 || mode == ptr_mode)
7186 {
7187 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7188 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7189 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7190 {
7191 rtx constant_part;
7192
7193 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7194 EXPAND_SUM);
7195 /* Use immed_double_const to ensure that the constant is
7196 truncated according to the mode of OP1, then sign extended
7197 to a HOST_WIDE_INT. Using the constant directly can result
7198 in non-canonical RTL in a 64x32 cross compile. */
7199 constant_part
7200 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7201 (HOST_WIDE_INT) 0,
7202 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7203 op1 = plus_constant (op1, INTVAL (constant_part));
7204 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7205 op1 = force_operand (op1, target);
7206 return op1;
7207 }
7208
7209 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7210 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7211 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7212 {
7213 rtx constant_part;
7214
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7216 EXPAND_SUM);
7217 if (! CONSTANT_P (op0))
7218 {
7219 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7220 VOIDmode, modifier);
7221 /* Don't go to both_summands if modifier
7222 says it's not right to return a PLUS. */
7223 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7224 goto binop2;
7225 goto both_summands;
7226 }
7227 /* Use immed_double_const to ensure that the constant is
7228 truncated according to the mode of OP1, then sign extended
7229 to a HOST_WIDE_INT. Using the constant directly can result
7230 in non-canonical RTL in a 64x32 cross compile. */
7231 constant_part
7232 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7233 (HOST_WIDE_INT) 0,
7234 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7235 op0 = plus_constant (op0, INTVAL (constant_part));
7236 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7237 op0 = force_operand (op0, target);
7238 return op0;
7239 }
7240 }
7241
7242 /* No sense saving up arithmetic to be done
7243 if it's all in the wrong mode to form part of an address.
7244 And force_operand won't know whether to sign-extend or
7245 zero-extend. */
7246 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7247 || mode != ptr_mode)
7248 goto binop;
7249
7250 preexpand_calls (exp);
7251 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7252 subtarget = 0;
7253
7254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7255 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7256
7257 both_summands:
7258 /* Make sure any term that's a sum with a constant comes last. */
7259 if (GET_CODE (op0) == PLUS
7260 && CONSTANT_P (XEXP (op0, 1)))
7261 {
7262 temp = op0;
7263 op0 = op1;
7264 op1 = temp;
7265 }
7266 /* If adding to a sum including a constant,
7267 associate it to put the constant outside. */
7268 if (GET_CODE (op1) == PLUS
7269 && CONSTANT_P (XEXP (op1, 1)))
7270 {
7271 rtx constant_term = const0_rtx;
7272
7273 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7274 if (temp != 0)
7275 op0 = temp;
7276 /* Ensure that MULT comes first if there is one. */
7277 else if (GET_CODE (op0) == MULT)
7278 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7279 else
7280 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7281
7282 /* Let's also eliminate constants from op0 if possible. */
7283 op0 = eliminate_constant_term (op0, &constant_term);
7284
7285 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7286 their sum should be a constant. Form it into OP1, since the
7287 result we want will then be OP0 + OP1. */
7288
7289 temp = simplify_binary_operation (PLUS, mode, constant_term,
7290 XEXP (op1, 1));
7291 if (temp != 0)
7292 op1 = temp;
7293 else
7294 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7295 }
7296
7297 /* Put a constant term last and put a multiplication first. */
7298 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7299 temp = op1, op1 = op0, op0 = temp;
7300
7301 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7302 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7303
7304 case MINUS_EXPR:
7305 /* For initializers, we are allowed to return a MINUS of two
7306 symbolic constants. Here we handle all cases when both operands
7307 are constant. */
7308 /* Handle difference of two symbolic constants,
7309 for the sake of an initializer. */
7310 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7311 && really_constant_p (TREE_OPERAND (exp, 0))
7312 && really_constant_p (TREE_OPERAND (exp, 1)))
7313 {
7314 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7315 VOIDmode, ro_modifier);
7316 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7317 VOIDmode, ro_modifier);
7318
7319 /* If the last operand is a CONST_INT, use plus_constant of
7320 the negated constant. Else make the MINUS. */
7321 if (GET_CODE (op1) == CONST_INT)
7322 return plus_constant (op0, - INTVAL (op1));
7323 else
7324 return gen_rtx_MINUS (mode, op0, op1);
7325 }
7326 /* Convert A - const to A + (-const). */
7327 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7328 {
7329 tree negated = fold (build1 (NEGATE_EXPR, type,
7330 TREE_OPERAND (exp, 1)));
7331
7332 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7333 /* If we can't negate the constant in TYPE, leave it alone and
7334 expand_binop will negate it for us. We used to try to do it
7335 here in the signed version of TYPE, but that doesn't work
7336 on POINTER_TYPEs. */;
7337 else
7338 {
7339 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7340 goto plus_expr;
7341 }
7342 }
7343 this_optab = sub_optab;
7344 goto binop;
7345
7346 case MULT_EXPR:
7347 preexpand_calls (exp);
7348 /* If first operand is constant, swap them.
7349 Thus the following special case checks need only
7350 check the second operand. */
7351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7352 {
7353 register tree t1 = TREE_OPERAND (exp, 0);
7354 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7355 TREE_OPERAND (exp, 1) = t1;
7356 }
7357
7358 /* Attempt to return something suitable for generating an
7359 indexed address, for machines that support that. */
7360
7361 if (modifier == EXPAND_SUM && mode == ptr_mode
7362 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7364 {
7365 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7366 EXPAND_SUM);
7367
7368 /* Apply distributive law if OP0 is x+c. */
7369 if (GET_CODE (op0) == PLUS
7370 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7371 return
7372 gen_rtx_PLUS
7373 (mode,
7374 gen_rtx_MULT
7375 (mode, XEXP (op0, 0),
7376 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7377 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7378 * INTVAL (XEXP (op0, 1))));
7379
7380 if (GET_CODE (op0) != REG)
7381 op0 = force_operand (op0, NULL_RTX);
7382 if (GET_CODE (op0) != REG)
7383 op0 = copy_to_mode_reg (mode, op0);
7384
7385 return
7386 gen_rtx_MULT (mode, op0,
7387 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7388 }
7389
7390 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7391 subtarget = 0;
7392
7393 /* Check for multiplying things that have been extended
7394 from a narrower type. If this machine supports multiplying
7395 in that narrower type with a result in the desired type,
7396 do it that way, and avoid the explicit type-conversion. */
7397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7398 && TREE_CODE (type) == INTEGER_TYPE
7399 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7400 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7401 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7402 && int_fits_type_p (TREE_OPERAND (exp, 1),
7403 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7404 /* Don't use a widening multiply if a shift will do. */
7405 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7406 > HOST_BITS_PER_WIDE_INT)
7407 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7408 ||
7409 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7410 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7411 ==
7412 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7413 /* If both operands are extended, they must either both
7414 be zero-extended or both be sign-extended. */
7415 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7416 ==
7417 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7418 {
7419 enum machine_mode innermode
7420 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7421 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7422 ? smul_widen_optab : umul_widen_optab);
7423 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7424 ? umul_widen_optab : smul_widen_optab);
7425 if (mode == GET_MODE_WIDER_MODE (innermode))
7426 {
7427 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7428 {
7429 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7430 NULL_RTX, VOIDmode, 0);
7431 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7432 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7433 VOIDmode, 0);
7434 else
7435 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7436 NULL_RTX, VOIDmode, 0);
7437 goto binop2;
7438 }
7439 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7440 && innermode == word_mode)
7441 {
7442 rtx htem;
7443 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7444 NULL_RTX, VOIDmode, 0);
7445 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7447 VOIDmode, 0);
7448 else
7449 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7450 NULL_RTX, VOIDmode, 0);
7451 temp = expand_binop (mode, other_optab, op0, op1, target,
7452 unsignedp, OPTAB_LIB_WIDEN);
7453 htem = expand_mult_highpart_adjust (innermode,
7454 gen_highpart (innermode, temp),
7455 op0, op1,
7456 gen_highpart (innermode, temp),
7457 unsignedp);
7458 emit_move_insn (gen_highpart (innermode, temp), htem);
7459 return temp;
7460 }
7461 }
7462 }
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7465 return expand_mult (mode, op0, op1, target, unsignedp);
7466
7467 case TRUNC_DIV_EXPR:
7468 case FLOOR_DIV_EXPR:
7469 case CEIL_DIV_EXPR:
7470 case ROUND_DIV_EXPR:
7471 case EXACT_DIV_EXPR:
7472 preexpand_calls (exp);
7473 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7474 subtarget = 0;
7475 /* Possible optimization: compute the dividend with EXPAND_SUM
7476 then if the divisor is constant can optimize the case
7477 where some terms of the dividend have coeffs divisible by it. */
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7479 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7480 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7481
7482 case RDIV_EXPR:
7483 this_optab = flodiv_optab;
7484 goto binop;
7485
7486 case TRUNC_MOD_EXPR:
7487 case FLOOR_MOD_EXPR:
7488 case CEIL_MOD_EXPR:
7489 case ROUND_MOD_EXPR:
7490 preexpand_calls (exp);
7491 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7492 subtarget = 0;
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7494 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7495 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7496
7497 case FIX_ROUND_EXPR:
7498 case FIX_FLOOR_EXPR:
7499 case FIX_CEIL_EXPR:
7500 abort (); /* Not used for C. */
7501
7502 case FIX_TRUNC_EXPR:
7503 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7504 if (target == 0)
7505 target = gen_reg_rtx (mode);
7506 expand_fix (target, op0, unsignedp);
7507 return target;
7508
7509 case FLOAT_EXPR:
7510 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7511 if (target == 0)
7512 target = gen_reg_rtx (mode);
7513 /* expand_float can't figure out what to do if FROM has VOIDmode.
7514 So give it the correct mode. With -O, cse will optimize this. */
7515 if (GET_MODE (op0) == VOIDmode)
7516 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7517 op0);
7518 expand_float (target, op0,
7519 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7520 return target;
7521
7522 case NEGATE_EXPR:
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7524 temp = expand_unop (mode, neg_optab, op0, target, 0);
7525 if (temp == 0)
7526 abort ();
7527 return temp;
7528
7529 case ABS_EXPR:
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7531
7532 /* Handle complex values specially. */
7533 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7534 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7535 return expand_complex_abs (mode, op0, target, unsignedp);
7536
7537 /* Unsigned abs is simply the operand. Testing here means we don't
7538 risk generating incorrect code below. */
7539 if (TREE_UNSIGNED (type))
7540 return op0;
7541
7542 return expand_abs (mode, op0, target,
7543 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7544
7545 case MAX_EXPR:
7546 case MIN_EXPR:
7547 target = original_target;
7548 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7549 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7550 || GET_MODE (target) != mode
7551 || (GET_CODE (target) == REG
7552 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7553 target = gen_reg_rtx (mode);
7554 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7555 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7556
7557 /* First try to do it with a special MIN or MAX instruction.
7558 If that does not win, use a conditional jump to select the proper
7559 value. */
7560 this_optab = (TREE_UNSIGNED (type)
7561 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7562 : (code == MIN_EXPR ? smin_optab : smax_optab));
7563
7564 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7565 OPTAB_WIDEN);
7566 if (temp != 0)
7567 return temp;
7568
7569 /* At this point, a MEM target is no longer useful; we will get better
7570 code without it. */
7571
7572 if (GET_CODE (target) == MEM)
7573 target = gen_reg_rtx (mode);
7574
7575 if (target != op0)
7576 emit_move_insn (target, op0);
7577
7578 op0 = gen_label_rtx ();
7579
7580 /* If this mode is an integer too wide to compare properly,
7581 compare word by word. Rely on cse to optimize constant cases. */
7582 if (GET_MODE_CLASS (mode) == MODE_INT
7583 && ! can_compare_p (GE, mode, ccp_jump))
7584 {
7585 if (code == MAX_EXPR)
7586 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7587 target, op1, NULL_RTX, op0);
7588 else
7589 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7590 op1, target, NULL_RTX, op0);
7591 }
7592 else
7593 {
7594 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7595 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7596 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7597 op0);
7598 }
7599 emit_move_insn (target, op1);
7600 emit_label (op0);
7601 return target;
7602
7603 case BIT_NOT_EXPR:
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7605 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7606 if (temp == 0)
7607 abort ();
7608 return temp;
7609
7610 case FFS_EXPR:
7611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7612 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7613 if (temp == 0)
7614 abort ();
7615 return temp;
7616
7617 /* ??? Can optimize bitwise operations with one arg constant.
7618 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7619 and (a bitwise1 b) bitwise2 b (etc)
7620 but that is probably not worth while. */
7621
7622 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7623 boolean values when we want in all cases to compute both of them. In
7624 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7625 as actual zero-or-1 values and then bitwise anding. In cases where
7626 there cannot be any side effects, better code would be made by
7627 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7628 how to recognize those cases. */
7629
7630 case TRUTH_AND_EXPR:
7631 case BIT_AND_EXPR:
7632 this_optab = and_optab;
7633 goto binop;
7634
7635 case TRUTH_OR_EXPR:
7636 case BIT_IOR_EXPR:
7637 this_optab = ior_optab;
7638 goto binop;
7639
7640 case TRUTH_XOR_EXPR:
7641 case BIT_XOR_EXPR:
7642 this_optab = xor_optab;
7643 goto binop;
7644
7645 case LSHIFT_EXPR:
7646 case RSHIFT_EXPR:
7647 case LROTATE_EXPR:
7648 case RROTATE_EXPR:
7649 preexpand_calls (exp);
7650 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7651 subtarget = 0;
7652 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7653 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7654 unsignedp);
7655
7656 /* Could determine the answer when only additive constants differ. Also,
7657 the addition of one can be handled by changing the condition. */
7658 case LT_EXPR:
7659 case LE_EXPR:
7660 case GT_EXPR:
7661 case GE_EXPR:
7662 case EQ_EXPR:
7663 case NE_EXPR:
7664 case UNORDERED_EXPR:
7665 case ORDERED_EXPR:
7666 case UNLT_EXPR:
7667 case UNLE_EXPR:
7668 case UNGT_EXPR:
7669 case UNGE_EXPR:
7670 case UNEQ_EXPR:
7671 preexpand_calls (exp);
7672 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7673 if (temp != 0)
7674 return temp;
7675
7676 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7677 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7678 && original_target
7679 && GET_CODE (original_target) == REG
7680 && (GET_MODE (original_target)
7681 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7682 {
7683 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7684 VOIDmode, 0);
7685
7686 if (temp != original_target)
7687 temp = copy_to_reg (temp);
7688
7689 op1 = gen_label_rtx ();
7690 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7691 GET_MODE (temp), unsignedp, 0, op1);
7692 emit_move_insn (temp, const1_rtx);
7693 emit_label (op1);
7694 return temp;
7695 }
7696
7697 /* If no set-flag instruction, must generate a conditional
7698 store into a temporary variable. Drop through
7699 and handle this like && and ||. */
7700
7701 case TRUTH_ANDIF_EXPR:
7702 case TRUTH_ORIF_EXPR:
7703 if (! ignore
7704 && (target == 0 || ! safe_from_p (target, exp, 1)
7705 /* Make sure we don't have a hard reg (such as function's return
7706 value) live across basic blocks, if not optimizing. */
7707 || (!optimize && GET_CODE (target) == REG
7708 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7709 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7710
7711 if (target)
7712 emit_clr_insn (target);
7713
7714 op1 = gen_label_rtx ();
7715 jumpifnot (exp, op1);
7716
7717 if (target)
7718 emit_0_to_1_insn (target);
7719
7720 emit_label (op1);
7721 return ignore ? const0_rtx : target;
7722
7723 case TRUTH_NOT_EXPR:
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7725 /* The parser is careful to generate TRUTH_NOT_EXPR
7726 only with operands that are always zero or one. */
7727 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7728 target, 1, OPTAB_LIB_WIDEN);
7729 if (temp == 0)
7730 abort ();
7731 return temp;
7732
7733 case COMPOUND_EXPR:
7734 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7735 emit_queue ();
7736 return expand_expr (TREE_OPERAND (exp, 1),
7737 (ignore ? const0_rtx : target),
7738 VOIDmode, 0);
7739
7740 case COND_EXPR:
7741 /* If we would have a "singleton" (see below) were it not for a
7742 conversion in each arm, bring that conversion back out. */
7743 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7744 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7745 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7746 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7747 {
7748 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7749 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7750
7751 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7752 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7753 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7754 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7755 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7756 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7757 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7758 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7759 return expand_expr (build1 (NOP_EXPR, type,
7760 build (COND_EXPR, TREE_TYPE (true),
7761 TREE_OPERAND (exp, 0),
7762 true, false)),
7763 target, tmode, modifier);
7764 }
7765
7766 {
7767 /* Note that COND_EXPRs whose type is a structure or union
7768 are required to be constructed to contain assignments of
7769 a temporary variable, so that we can evaluate them here
7770 for side effect only. If type is void, we must do likewise. */
7771
7772 /* If an arm of the branch requires a cleanup,
7773 only that cleanup is performed. */
7774
7775 tree singleton = 0;
7776 tree binary_op = 0, unary_op = 0;
7777
7778 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7779 convert it to our mode, if necessary. */
7780 if (integer_onep (TREE_OPERAND (exp, 1))
7781 && integer_zerop (TREE_OPERAND (exp, 2))
7782 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7783 {
7784 if (ignore)
7785 {
7786 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7787 ro_modifier);
7788 return const0_rtx;
7789 }
7790
7791 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7792 if (GET_MODE (op0) == mode)
7793 return op0;
7794
7795 if (target == 0)
7796 target = gen_reg_rtx (mode);
7797 convert_move (target, op0, unsignedp);
7798 return target;
7799 }
7800
7801 /* Check for X ? A + B : A. If we have this, we can copy A to the
7802 output and conditionally add B. Similarly for unary operations.
7803 Don't do this if X has side-effects because those side effects
7804 might affect A or B and the "?" operation is a sequence point in
7805 ANSI. (operand_equal_p tests for side effects.) */
7806
7807 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7808 && operand_equal_p (TREE_OPERAND (exp, 2),
7809 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7810 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7811 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7812 && operand_equal_p (TREE_OPERAND (exp, 1),
7813 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7814 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7815 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7816 && operand_equal_p (TREE_OPERAND (exp, 2),
7817 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7818 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7819 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7820 && operand_equal_p (TREE_OPERAND (exp, 1),
7821 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7822 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7823
7824 /* If we are not to produce a result, we have no target. Otherwise,
7825 if a target was specified use it; it will not be used as an
7826 intermediate target unless it is safe. If no target, use a
7827 temporary. */
7828
7829 if (ignore)
7830 temp = 0;
7831 else if (original_target
7832 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7833 || (singleton && GET_CODE (original_target) == REG
7834 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7835 && original_target == var_rtx (singleton)))
7836 && GET_MODE (original_target) == mode
7837 #ifdef HAVE_conditional_move
7838 && (! can_conditionally_move_p (mode)
7839 || GET_CODE (original_target) == REG
7840 || TREE_ADDRESSABLE (type))
7841 #endif
7842 && ! (GET_CODE (original_target) == MEM
7843 && MEM_VOLATILE_P (original_target)))
7844 temp = original_target;
7845 else if (TREE_ADDRESSABLE (type))
7846 abort ();
7847 else
7848 temp = assign_temp (type, 0, 0, 1);
7849
7850 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7851 do the test of X as a store-flag operation, do this as
7852 A + ((X != 0) << log C). Similarly for other simple binary
7853 operators. Only do for C == 1 if BRANCH_COST is low. */
7854 if (temp && singleton && binary_op
7855 && (TREE_CODE (binary_op) == PLUS_EXPR
7856 || TREE_CODE (binary_op) == MINUS_EXPR
7857 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7858 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7859 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7860 : integer_onep (TREE_OPERAND (binary_op, 1)))
7861 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7862 {
7863 rtx result;
7864 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7865 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7866 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7867 : xor_optab);
7868
7869 /* If we had X ? A : A + 1, do this as A + (X == 0).
7870
7871 We have to invert the truth value here and then put it
7872 back later if do_store_flag fails. We cannot simply copy
7873 TREE_OPERAND (exp, 0) to another variable and modify that
7874 because invert_truthvalue can modify the tree pointed to
7875 by its argument. */
7876 if (singleton == TREE_OPERAND (exp, 1))
7877 TREE_OPERAND (exp, 0)
7878 = invert_truthvalue (TREE_OPERAND (exp, 0));
7879
7880 result = do_store_flag (TREE_OPERAND (exp, 0),
7881 (safe_from_p (temp, singleton, 1)
7882 ? temp : NULL_RTX),
7883 mode, BRANCH_COST <= 1);
7884
7885 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7886 result = expand_shift (LSHIFT_EXPR, mode, result,
7887 build_int_2 (tree_log2
7888 (TREE_OPERAND
7889 (binary_op, 1)),
7890 0),
7891 (safe_from_p (temp, singleton, 1)
7892 ? temp : NULL_RTX), 0);
7893
7894 if (result)
7895 {
7896 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7897 return expand_binop (mode, boptab, op1, result, temp,
7898 unsignedp, OPTAB_LIB_WIDEN);
7899 }
7900 else if (singleton == TREE_OPERAND (exp, 1))
7901 TREE_OPERAND (exp, 0)
7902 = invert_truthvalue (TREE_OPERAND (exp, 0));
7903 }
7904
7905 do_pending_stack_adjust ();
7906 NO_DEFER_POP;
7907 op0 = gen_label_rtx ();
7908
7909 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7910 {
7911 if (temp != 0)
7912 {
7913 /* If the target conflicts with the other operand of the
7914 binary op, we can't use it. Also, we can't use the target
7915 if it is a hard register, because evaluating the condition
7916 might clobber it. */
7917 if ((binary_op
7918 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7919 || (GET_CODE (temp) == REG
7920 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7921 temp = gen_reg_rtx (mode);
7922 store_expr (singleton, temp, 0);
7923 }
7924 else
7925 expand_expr (singleton,
7926 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7927 if (singleton == TREE_OPERAND (exp, 1))
7928 jumpif (TREE_OPERAND (exp, 0), op0);
7929 else
7930 jumpifnot (TREE_OPERAND (exp, 0), op0);
7931
7932 start_cleanup_deferral ();
7933 if (binary_op && temp == 0)
7934 /* Just touch the other operand. */
7935 expand_expr (TREE_OPERAND (binary_op, 1),
7936 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7937 else if (binary_op)
7938 store_expr (build (TREE_CODE (binary_op), type,
7939 make_tree (type, temp),
7940 TREE_OPERAND (binary_op, 1)),
7941 temp, 0);
7942 else
7943 store_expr (build1 (TREE_CODE (unary_op), type,
7944 make_tree (type, temp)),
7945 temp, 0);
7946 op1 = op0;
7947 }
7948 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7949 comparison operator. If we have one of these cases, set the
7950 output to A, branch on A (cse will merge these two references),
7951 then set the output to FOO. */
7952 else if (temp
7953 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7954 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7955 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7956 TREE_OPERAND (exp, 1), 0)
7957 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7958 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7959 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7960 {
7961 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7962 temp = gen_reg_rtx (mode);
7963 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7964 jumpif (TREE_OPERAND (exp, 0), op0);
7965
7966 start_cleanup_deferral ();
7967 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7968 op1 = op0;
7969 }
7970 else if (temp
7971 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7972 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7974 TREE_OPERAND (exp, 2), 0)
7975 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7976 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7977 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7978 {
7979 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7980 temp = gen_reg_rtx (mode);
7981 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7982 jumpifnot (TREE_OPERAND (exp, 0), op0);
7983
7984 start_cleanup_deferral ();
7985 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7986 op1 = op0;
7987 }
7988 else
7989 {
7990 op1 = gen_label_rtx ();
7991 jumpifnot (TREE_OPERAND (exp, 0), op0);
7992
7993 start_cleanup_deferral ();
7994
7995 /* One branch of the cond can be void, if it never returns. For
7996 example A ? throw : E */
7997 if (temp != 0
7998 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7999 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8000 else
8001 expand_expr (TREE_OPERAND (exp, 1),
8002 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8003 end_cleanup_deferral ();
8004 emit_queue ();
8005 emit_jump_insn (gen_jump (op1));
8006 emit_barrier ();
8007 emit_label (op0);
8008 start_cleanup_deferral ();
8009 if (temp != 0
8010 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8011 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8012 else
8013 expand_expr (TREE_OPERAND (exp, 2),
8014 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8015 }
8016
8017 end_cleanup_deferral ();
8018
8019 emit_queue ();
8020 emit_label (op1);
8021 OK_DEFER_POP;
8022
8023 return temp;
8024 }
8025
8026 case TARGET_EXPR:
8027 {
8028 /* Something needs to be initialized, but we didn't know
8029 where that thing was when building the tree. For example,
8030 it could be the return value of a function, or a parameter
8031 to a function which lays down in the stack, or a temporary
8032 variable which must be passed by reference.
8033
8034 We guarantee that the expression will either be constructed
8035 or copied into our original target. */
8036
8037 tree slot = TREE_OPERAND (exp, 0);
8038 tree cleanups = NULL_TREE;
8039 tree exp1;
8040
8041 if (TREE_CODE (slot) != VAR_DECL)
8042 abort ();
8043
8044 if (! ignore)
8045 target = original_target;
8046
8047 /* Set this here so that if we get a target that refers to a
8048 register variable that's already been used, put_reg_into_stack
8049 knows that it should fix up those uses. */
8050 TREE_USED (slot) = 1;
8051
8052 if (target == 0)
8053 {
8054 if (DECL_RTL (slot) != 0)
8055 {
8056 target = DECL_RTL (slot);
8057 /* If we have already expanded the slot, so don't do
8058 it again. (mrs) */
8059 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8060 return target;
8061 }
8062 else
8063 {
8064 target = assign_temp (type, 2, 0, 1);
8065 /* All temp slots at this level must not conflict. */
8066 preserve_temp_slots (target);
8067 DECL_RTL (slot) = target;
8068 if (TREE_ADDRESSABLE (slot))
8069 {
8070 TREE_ADDRESSABLE (slot) = 0;
8071 mark_addressable (slot);
8072 }
8073
8074 /* Since SLOT is not known to the called function
8075 to belong to its stack frame, we must build an explicit
8076 cleanup. This case occurs when we must build up a reference
8077 to pass the reference as an argument. In this case,
8078 it is very likely that such a reference need not be
8079 built here. */
8080
8081 if (TREE_OPERAND (exp, 2) == 0)
8082 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8083 cleanups = TREE_OPERAND (exp, 2);
8084 }
8085 }
8086 else
8087 {
8088 /* This case does occur, when expanding a parameter which
8089 needs to be constructed on the stack. The target
8090 is the actual stack address that we want to initialize.
8091 The function we call will perform the cleanup in this case. */
8092
8093 /* If we have already assigned it space, use that space,
8094 not target that we were passed in, as our target
8095 parameter is only a hint. */
8096 if (DECL_RTL (slot) != 0)
8097 {
8098 target = DECL_RTL (slot);
8099 /* If we have already expanded the slot, so don't do
8100 it again. (mrs) */
8101 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8102 return target;
8103 }
8104 else
8105 {
8106 DECL_RTL (slot) = target;
8107 /* If we must have an addressable slot, then make sure that
8108 the RTL that we just stored in slot is OK. */
8109 if (TREE_ADDRESSABLE (slot))
8110 {
8111 TREE_ADDRESSABLE (slot) = 0;
8112 mark_addressable (slot);
8113 }
8114 }
8115 }
8116
8117 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8118 /* Mark it as expanded. */
8119 TREE_OPERAND (exp, 1) = NULL_TREE;
8120
8121 store_expr (exp1, target, 0);
8122
8123 expand_decl_cleanup (NULL_TREE, cleanups);
8124
8125 return target;
8126 }
8127
8128 case INIT_EXPR:
8129 {
8130 tree lhs = TREE_OPERAND (exp, 0);
8131 tree rhs = TREE_OPERAND (exp, 1);
8132 tree noncopied_parts = 0;
8133 tree lhs_type = TREE_TYPE (lhs);
8134
8135 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8136 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8137 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8138 TYPE_NONCOPIED_PARTS (lhs_type));
8139 while (noncopied_parts != 0)
8140 {
8141 expand_assignment (TREE_VALUE (noncopied_parts),
8142 TREE_PURPOSE (noncopied_parts), 0, 0);
8143 noncopied_parts = TREE_CHAIN (noncopied_parts);
8144 }
8145 return temp;
8146 }
8147
8148 case MODIFY_EXPR:
8149 {
8150 /* If lhs is complex, expand calls in rhs before computing it.
8151 That's so we don't compute a pointer and save it over a call.
8152 If lhs is simple, compute it first so we can give it as a
8153 target if the rhs is just a call. This avoids an extra temp and copy
8154 and that prevents a partial-subsumption which makes bad code.
8155 Actually we could treat component_ref's of vars like vars. */
8156
8157 tree lhs = TREE_OPERAND (exp, 0);
8158 tree rhs = TREE_OPERAND (exp, 1);
8159 tree noncopied_parts = 0;
8160 tree lhs_type = TREE_TYPE (lhs);
8161
8162 temp = 0;
8163
8164 if (TREE_CODE (lhs) != VAR_DECL
8165 && TREE_CODE (lhs) != RESULT_DECL
8166 && TREE_CODE (lhs) != PARM_DECL
8167 && ! (TREE_CODE (lhs) == INDIRECT_REF
8168 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8169 preexpand_calls (exp);
8170
8171 /* Check for |= or &= of a bitfield of size one into another bitfield
8172 of size 1. In this case, (unless we need the result of the
8173 assignment) we can do this more efficiently with a
8174 test followed by an assignment, if necessary.
8175
8176 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8177 things change so we do, this code should be enhanced to
8178 support it. */
8179 if (ignore
8180 && TREE_CODE (lhs) == COMPONENT_REF
8181 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8182 || TREE_CODE (rhs) == BIT_AND_EXPR)
8183 && TREE_OPERAND (rhs, 0) == lhs
8184 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8185 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8186 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8187 {
8188 rtx label = gen_label_rtx ();
8189
8190 do_jump (TREE_OPERAND (rhs, 1),
8191 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8192 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8193 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8194 (TREE_CODE (rhs) == BIT_IOR_EXPR
8195 ? integer_one_node
8196 : integer_zero_node)),
8197 0, 0);
8198 do_pending_stack_adjust ();
8199 emit_label (label);
8200 return const0_rtx;
8201 }
8202
8203 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8204 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8205 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8206 TYPE_NONCOPIED_PARTS (lhs_type));
8207
8208 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8209 while (noncopied_parts != 0)
8210 {
8211 expand_assignment (TREE_PURPOSE (noncopied_parts),
8212 TREE_VALUE (noncopied_parts), 0, 0);
8213 noncopied_parts = TREE_CHAIN (noncopied_parts);
8214 }
8215 return temp;
8216 }
8217
8218 case RETURN_EXPR:
8219 if (!TREE_OPERAND (exp, 0))
8220 expand_null_return ();
8221 else
8222 expand_return (TREE_OPERAND (exp, 0));
8223 return const0_rtx;
8224
8225 case PREINCREMENT_EXPR:
8226 case PREDECREMENT_EXPR:
8227 return expand_increment (exp, 0, ignore);
8228
8229 case POSTINCREMENT_EXPR:
8230 case POSTDECREMENT_EXPR:
8231 /* Faster to treat as pre-increment if result is not used. */
8232 return expand_increment (exp, ! ignore, ignore);
8233
8234 case ADDR_EXPR:
8235 /* If nonzero, TEMP will be set to the address of something that might
8236 be a MEM corresponding to a stack slot. */
8237 temp = 0;
8238
8239 /* Are we taking the address of a nested function? */
8240 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8241 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8242 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8243 && ! TREE_STATIC (exp))
8244 {
8245 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8246 op0 = force_operand (op0, target);
8247 }
8248 /* If we are taking the address of something erroneous, just
8249 return a zero. */
8250 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8251 return const0_rtx;
8252 else
8253 {
8254 /* We make sure to pass const0_rtx down if we came in with
8255 ignore set, to avoid doing the cleanups twice for something. */
8256 op0 = expand_expr (TREE_OPERAND (exp, 0),
8257 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8258 (modifier == EXPAND_INITIALIZER
8259 ? modifier : EXPAND_CONST_ADDRESS));
8260
8261 /* If we are going to ignore the result, OP0 will have been set
8262 to const0_rtx, so just return it. Don't get confused and
8263 think we are taking the address of the constant. */
8264 if (ignore)
8265 return op0;
8266
8267 op0 = protect_from_queue (op0, 0);
8268
8269 /* We would like the object in memory. If it is a constant, we can
8270 have it be statically allocated into memory. For a non-constant,
8271 we need to allocate some memory and store the value into it. */
8272
8273 if (CONSTANT_P (op0))
8274 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8275 op0);
8276 else if (GET_CODE (op0) == MEM)
8277 {
8278 mark_temp_addr_taken (op0);
8279 temp = XEXP (op0, 0);
8280 }
8281
8282 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8283 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8284 {
8285 /* If this object is in a register, it must be not
8286 be BLKmode. */
8287 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8288 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8289
8290 mark_temp_addr_taken (memloc);
8291 emit_move_insn (memloc, op0);
8292 op0 = memloc;
8293 }
8294
8295 if (GET_CODE (op0) != MEM)
8296 abort ();
8297
8298 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8299 {
8300 temp = XEXP (op0, 0);
8301 #ifdef POINTERS_EXTEND_UNSIGNED
8302 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8303 && mode == ptr_mode)
8304 temp = convert_memory_address (ptr_mode, temp);
8305 #endif
8306 return temp;
8307 }
8308
8309 op0 = force_operand (XEXP (op0, 0), target);
8310 }
8311
8312 if (flag_force_addr && GET_CODE (op0) != REG)
8313 op0 = force_reg (Pmode, op0);
8314
8315 if (GET_CODE (op0) == REG
8316 && ! REG_USERVAR_P (op0))
8317 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8318
8319 /* If we might have had a temp slot, add an equivalent address
8320 for it. */
8321 if (temp != 0)
8322 update_temp_slot_address (temp, op0);
8323
8324 #ifdef POINTERS_EXTEND_UNSIGNED
8325 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8326 && mode == ptr_mode)
8327 op0 = convert_memory_address (ptr_mode, op0);
8328 #endif
8329
8330 return op0;
8331
8332 case ENTRY_VALUE_EXPR:
8333 abort ();
8334
8335 /* COMPLEX type for Extended Pascal & Fortran */
8336 case COMPLEX_EXPR:
8337 {
8338 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8339 rtx insns;
8340
8341 /* Get the rtx code of the operands. */
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8343 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8344
8345 if (! target)
8346 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8347
8348 start_sequence ();
8349
8350 /* Move the real (op0) and imaginary (op1) parts to their location. */
8351 emit_move_insn (gen_realpart (mode, target), op0);
8352 emit_move_insn (gen_imagpart (mode, target), op1);
8353
8354 insns = get_insns ();
8355 end_sequence ();
8356
8357 /* Complex construction should appear as a single unit. */
8358 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8359 each with a separate pseudo as destination.
8360 It's not correct for flow to treat them as a unit. */
8361 if (GET_CODE (target) != CONCAT)
8362 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8363 else
8364 emit_insns (insns);
8365
8366 return target;
8367 }
8368
8369 case REALPART_EXPR:
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8371 return gen_realpart (mode, op0);
8372
8373 case IMAGPART_EXPR:
8374 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8375 return gen_imagpart (mode, op0);
8376
8377 case CONJ_EXPR:
8378 {
8379 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8380 rtx imag_t;
8381 rtx insns;
8382
8383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8384
8385 if (! target)
8386 target = gen_reg_rtx (mode);
8387
8388 start_sequence ();
8389
8390 /* Store the realpart and the negated imagpart to target. */
8391 emit_move_insn (gen_realpart (partmode, target),
8392 gen_realpart (partmode, op0));
8393
8394 imag_t = gen_imagpart (partmode, target);
8395 temp = expand_unop (partmode, neg_optab,
8396 gen_imagpart (partmode, op0), imag_t, 0);
8397 if (temp != imag_t)
8398 emit_move_insn (imag_t, temp);
8399
8400 insns = get_insns ();
8401 end_sequence ();
8402
8403 /* Conjugate should appear as a single unit
8404 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8405 each with a separate pseudo as destination.
8406 It's not correct for flow to treat them as a unit. */
8407 if (GET_CODE (target) != CONCAT)
8408 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8409 else
8410 emit_insns (insns);
8411
8412 return target;
8413 }
8414
8415 case TRY_CATCH_EXPR:
8416 {
8417 tree handler = TREE_OPERAND (exp, 1);
8418
8419 expand_eh_region_start ();
8420
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8422
8423 expand_eh_region_end (handler);
8424
8425 return op0;
8426 }
8427
8428 case TRY_FINALLY_EXPR:
8429 {
8430 tree try_block = TREE_OPERAND (exp, 0);
8431 tree finally_block = TREE_OPERAND (exp, 1);
8432 rtx finally_label = gen_label_rtx ();
8433 rtx done_label = gen_label_rtx ();
8434 rtx return_link = gen_reg_rtx (Pmode);
8435 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8436 (tree) finally_label, (tree) return_link);
8437 TREE_SIDE_EFFECTS (cleanup) = 1;
8438
8439 /* Start a new binding layer that will keep track of all cleanup
8440 actions to be performed. */
8441 expand_start_bindings (2);
8442
8443 target_temp_slot_level = temp_slot_level;
8444
8445 expand_decl_cleanup (NULL_TREE, cleanup);
8446 op0 = expand_expr (try_block, target, tmode, modifier);
8447
8448 preserve_temp_slots (op0);
8449 expand_end_bindings (NULL_TREE, 0, 0);
8450 emit_jump (done_label);
8451 emit_label (finally_label);
8452 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8453 emit_indirect_jump (return_link);
8454 emit_label (done_label);
8455 return op0;
8456 }
8457
8458 case GOTO_SUBROUTINE_EXPR:
8459 {
8460 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8461 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8462 rtx return_address = gen_label_rtx ();
8463 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8464 emit_jump (subr);
8465 emit_label (return_address);
8466 return const0_rtx;
8467 }
8468
8469 case POPDCC_EXPR:
8470 {
8471 rtx dcc = get_dynamic_cleanup_chain ();
8472 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8473 return const0_rtx;
8474 }
8475
8476 case POPDHC_EXPR:
8477 {
8478 rtx dhc = get_dynamic_handler_chain ();
8479 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8480 return const0_rtx;
8481 }
8482
8483 case VA_ARG_EXPR:
8484 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8485
8486 default:
8487 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8488 }
8489
8490 /* Here to do an ordinary binary operator, generating an instruction
8491 from the optab already placed in `this_optab'. */
8492 binop:
8493 preexpand_calls (exp);
8494 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8495 subtarget = 0;
8496 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8497 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8498 binop2:
8499 temp = expand_binop (mode, this_optab, op0, op1, target,
8500 unsignedp, OPTAB_LIB_WIDEN);
8501 if (temp == 0)
8502 abort ();
8503 return temp;
8504 }
8505 \f
8506 /* Similar to expand_expr, except that we don't specify a target, target
8507 mode, or modifier and we return the alignment of the inner type. This is
8508 used in cases where it is not necessary to align the result to the
8509 alignment of its type as long as we know the alignment of the result, for
8510 example for comparisons of BLKmode values. */
8511
8512 static rtx
8513 expand_expr_unaligned (exp, palign)
8514 register tree exp;
8515 unsigned int *palign;
8516 {
8517 register rtx op0;
8518 tree type = TREE_TYPE (exp);
8519 register enum machine_mode mode = TYPE_MODE (type);
8520
8521 /* Default the alignment we return to that of the type. */
8522 *palign = TYPE_ALIGN (type);
8523
8524 /* The only cases in which we do anything special is if the resulting mode
8525 is BLKmode. */
8526 if (mode != BLKmode)
8527 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8528
8529 switch (TREE_CODE (exp))
8530 {
8531 case CONVERT_EXPR:
8532 case NOP_EXPR:
8533 case NON_LVALUE_EXPR:
8534 /* Conversions between BLKmode values don't change the underlying
8535 alignment or value. */
8536 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8537 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8538 break;
8539
8540 case ARRAY_REF:
8541 /* Much of the code for this case is copied directly from expand_expr.
8542 We need to duplicate it here because we will do something different
8543 in the fall-through case, so we need to handle the same exceptions
8544 it does. */
8545 {
8546 tree array = TREE_OPERAND (exp, 0);
8547 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8548 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8549 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8550 HOST_WIDE_INT i;
8551
8552 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8553 abort ();
8554
8555 /* Optimize the special-case of a zero lower bound.
8556
8557 We convert the low_bound to sizetype to avoid some problems
8558 with constant folding. (E.g. suppose the lower bound is 1,
8559 and its mode is QI. Without the conversion, (ARRAY
8560 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8561 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8562
8563 if (! integer_zerop (low_bound))
8564 index = size_diffop (index, convert (sizetype, low_bound));
8565
8566 /* If this is a constant index into a constant array,
8567 just get the value from the array. Handle both the cases when
8568 we have an explicit constructor and when our operand is a variable
8569 that was declared const. */
8570
8571 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8572 && 0 > compare_tree_int (index,
8573 list_length (CONSTRUCTOR_ELTS
8574 (TREE_OPERAND (exp, 0)))))
8575 {
8576 tree elem;
8577
8578 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8579 i = TREE_INT_CST_LOW (index);
8580 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8581 ;
8582
8583 if (elem)
8584 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8585 }
8586
8587 else if (optimize >= 1
8588 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8589 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8590 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8591 {
8592 if (TREE_CODE (index) == INTEGER_CST)
8593 {
8594 tree init = DECL_INITIAL (array);
8595
8596 if (TREE_CODE (init) == CONSTRUCTOR)
8597 {
8598 tree elem;
8599
8600 for (elem = CONSTRUCTOR_ELTS (init);
8601 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8602 elem = TREE_CHAIN (elem))
8603 ;
8604
8605 if (elem)
8606 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8607 palign);
8608 }
8609 }
8610 }
8611 }
8612
8613 /* ... fall through ... */
8614
8615 case COMPONENT_REF:
8616 case BIT_FIELD_REF:
8617 /* If the operand is a CONSTRUCTOR, we can just extract the
8618 appropriate field if it is present. Don't do this if we have
8619 already written the data since we want to refer to that copy
8620 and varasm.c assumes that's what we'll do. */
8621 if (TREE_CODE (exp) != ARRAY_REF
8622 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8623 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8624 {
8625 tree elt;
8626
8627 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8628 elt = TREE_CHAIN (elt))
8629 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8630 /* Note that unlike the case in expand_expr, we know this is
8631 BLKmode and hence not an integer. */
8632 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8633 }
8634
8635 {
8636 enum machine_mode mode1;
8637 HOST_WIDE_INT bitsize, bitpos;
8638 tree offset;
8639 int volatilep = 0;
8640 unsigned int alignment;
8641 int unsignedp;
8642 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8643 &mode1, &unsignedp, &volatilep,
8644 &alignment);
8645
8646 /* If we got back the original object, something is wrong. Perhaps
8647 we are evaluating an expression too early. In any event, don't
8648 infinitely recurse. */
8649 if (tem == exp)
8650 abort ();
8651
8652 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8653
8654 /* If this is a constant, put it into a register if it is a
8655 legitimate constant and OFFSET is 0 and memory if it isn't. */
8656 if (CONSTANT_P (op0))
8657 {
8658 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8659
8660 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8661 && offset == 0)
8662 op0 = force_reg (inner_mode, op0);
8663 else
8664 op0 = validize_mem (force_const_mem (inner_mode, op0));
8665 }
8666
8667 if (offset != 0)
8668 {
8669 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8670
8671 /* If this object is in a register, put it into memory.
8672 This case can't occur in C, but can in Ada if we have
8673 unchecked conversion of an expression from a scalar type to
8674 an array or record type. */
8675 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8676 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8677 {
8678 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8679
8680 mark_temp_addr_taken (memloc);
8681 emit_move_insn (memloc, op0);
8682 op0 = memloc;
8683 }
8684
8685 if (GET_CODE (op0) != MEM)
8686 abort ();
8687
8688 if (GET_MODE (offset_rtx) != ptr_mode)
8689 {
8690 #ifdef POINTERS_EXTEND_UNSIGNED
8691 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8692 #else
8693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8694 #endif
8695 }
8696
8697 op0 = change_address (op0, VOIDmode,
8698 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8699 force_reg (ptr_mode,
8700 offset_rtx)));
8701 }
8702
8703 /* Don't forget about volatility even if this is a bitfield. */
8704 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8705 {
8706 op0 = copy_rtx (op0);
8707 MEM_VOLATILE_P (op0) = 1;
8708 }
8709
8710 /* Check the access. */
8711 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8712 {
8713 rtx to;
8714 int size;
8715
8716 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8717 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8718
8719 /* Check the access right of the pointer. */
8720 if (size > BITS_PER_UNIT)
8721 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8722 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8723 TYPE_MODE (sizetype),
8724 GEN_INT (MEMORY_USE_RO),
8725 TYPE_MODE (integer_type_node));
8726 }
8727
8728 /* In cases where an aligned union has an unaligned object
8729 as a field, we might be extracting a BLKmode value from
8730 an integer-mode (e.g., SImode) object. Handle this case
8731 by doing the extract into an object as wide as the field
8732 (which we know to be the width of a basic mode), then
8733 storing into memory, and changing the mode to BLKmode.
8734 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8735 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8736 if (mode1 == VOIDmode
8737 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8738 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8739 && (TYPE_ALIGN (type) > alignment
8740 || bitpos % TYPE_ALIGN (type) != 0)))
8741 {
8742 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8743
8744 if (ext_mode == BLKmode)
8745 {
8746 /* In this case, BITPOS must start at a byte boundary. */
8747 if (GET_CODE (op0) != MEM
8748 || bitpos % BITS_PER_UNIT != 0)
8749 abort ();
8750
8751 op0 = change_address (op0, VOIDmode,
8752 plus_constant (XEXP (op0, 0),
8753 bitpos / BITS_PER_UNIT));
8754 }
8755 else
8756 {
8757 rtx new = assign_stack_temp (ext_mode,
8758 bitsize / BITS_PER_UNIT, 0);
8759
8760 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8761 unsignedp, NULL_RTX, ext_mode,
8762 ext_mode, alignment,
8763 int_size_in_bytes (TREE_TYPE (tem)));
8764
8765 /* If the result is a record type and BITSIZE is narrower than
8766 the mode of OP0, an integral mode, and this is a big endian
8767 machine, we must put the field into the high-order bits. */
8768 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8769 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8770 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8771 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8772 size_int (GET_MODE_BITSIZE
8773 (GET_MODE (op0))
8774 - bitsize),
8775 op0, 1);
8776
8777
8778 emit_move_insn (new, op0);
8779 op0 = copy_rtx (new);
8780 PUT_MODE (op0, BLKmode);
8781 }
8782 }
8783 else
8784 /* Get a reference to just this component. */
8785 op0 = change_address (op0, mode1,
8786 plus_constant (XEXP (op0, 0),
8787 (bitpos / BITS_PER_UNIT)));
8788
8789 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8790
8791 /* Adjust the alignment in case the bit position is not
8792 a multiple of the alignment of the inner object. */
8793 while (bitpos % alignment != 0)
8794 alignment >>= 1;
8795
8796 if (GET_CODE (XEXP (op0, 0)) == REG)
8797 mark_reg_pointer (XEXP (op0, 0), alignment);
8798
8799 MEM_IN_STRUCT_P (op0) = 1;
8800 MEM_VOLATILE_P (op0) |= volatilep;
8801
8802 *palign = alignment;
8803 return op0;
8804 }
8805
8806 default:
8807 break;
8808
8809 }
8810
8811 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8812 }
8813 \f
8814 /* Return the tree node if a ARG corresponds to a string constant or zero
8815 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8816 in bytes within the string that ARG is accessing. The type of the
8817 offset will be `sizetype'. */
8818
8819 tree
8820 string_constant (arg, ptr_offset)
8821 tree arg;
8822 tree *ptr_offset;
8823 {
8824 STRIP_NOPS (arg);
8825
8826 if (TREE_CODE (arg) == ADDR_EXPR
8827 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8828 {
8829 *ptr_offset = size_zero_node;
8830 return TREE_OPERAND (arg, 0);
8831 }
8832 else if (TREE_CODE (arg) == PLUS_EXPR)
8833 {
8834 tree arg0 = TREE_OPERAND (arg, 0);
8835 tree arg1 = TREE_OPERAND (arg, 1);
8836
8837 STRIP_NOPS (arg0);
8838 STRIP_NOPS (arg1);
8839
8840 if (TREE_CODE (arg0) == ADDR_EXPR
8841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8842 {
8843 *ptr_offset = convert (sizetype, arg1);
8844 return TREE_OPERAND (arg0, 0);
8845 }
8846 else if (TREE_CODE (arg1) == ADDR_EXPR
8847 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8848 {
8849 *ptr_offset = convert (sizetype, arg0);
8850 return TREE_OPERAND (arg1, 0);
8851 }
8852 }
8853
8854 return 0;
8855 }
8856 \f
8857 /* Expand code for a post- or pre- increment or decrement
8858 and return the RTX for the result.
8859 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8860
8861 static rtx
8862 expand_increment (exp, post, ignore)
8863 register tree exp;
8864 int post, ignore;
8865 {
8866 register rtx op0, op1;
8867 register rtx temp, value;
8868 register tree incremented = TREE_OPERAND (exp, 0);
8869 optab this_optab = add_optab;
8870 int icode;
8871 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8872 int op0_is_copy = 0;
8873 int single_insn = 0;
8874 /* 1 means we can't store into OP0 directly,
8875 because it is a subreg narrower than a word,
8876 and we don't dare clobber the rest of the word. */
8877 int bad_subreg = 0;
8878
8879 /* Stabilize any component ref that might need to be
8880 evaluated more than once below. */
8881 if (!post
8882 || TREE_CODE (incremented) == BIT_FIELD_REF
8883 || (TREE_CODE (incremented) == COMPONENT_REF
8884 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8885 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8886 incremented = stabilize_reference (incremented);
8887 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8888 ones into save exprs so that they don't accidentally get evaluated
8889 more than once by the code below. */
8890 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8891 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8892 incremented = save_expr (incremented);
8893
8894 /* Compute the operands as RTX.
8895 Note whether OP0 is the actual lvalue or a copy of it:
8896 I believe it is a copy iff it is a register or subreg
8897 and insns were generated in computing it. */
8898
8899 temp = get_last_insn ();
8900 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8901
8902 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8903 in place but instead must do sign- or zero-extension during assignment,
8904 so we copy it into a new register and let the code below use it as
8905 a copy.
8906
8907 Note that we can safely modify this SUBREG since it is know not to be
8908 shared (it was made by the expand_expr call above). */
8909
8910 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8911 {
8912 if (post)
8913 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8914 else
8915 bad_subreg = 1;
8916 }
8917 else if (GET_CODE (op0) == SUBREG
8918 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8919 {
8920 /* We cannot increment this SUBREG in place. If we are
8921 post-incrementing, get a copy of the old value. Otherwise,
8922 just mark that we cannot increment in place. */
8923 if (post)
8924 op0 = copy_to_reg (op0);
8925 else
8926 bad_subreg = 1;
8927 }
8928
8929 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8930 && temp != get_last_insn ());
8931 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8932 EXPAND_MEMORY_USE_BAD);
8933
8934 /* Decide whether incrementing or decrementing. */
8935 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8936 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8937 this_optab = sub_optab;
8938
8939 /* Convert decrement by a constant into a negative increment. */
8940 if (this_optab == sub_optab
8941 && GET_CODE (op1) == CONST_INT)
8942 {
8943 op1 = GEN_INT (- INTVAL (op1));
8944 this_optab = add_optab;
8945 }
8946
8947 /* For a preincrement, see if we can do this with a single instruction. */
8948 if (!post)
8949 {
8950 icode = (int) this_optab->handlers[(int) mode].insn_code;
8951 if (icode != (int) CODE_FOR_nothing
8952 /* Make sure that OP0 is valid for operands 0 and 1
8953 of the insn we want to queue. */
8954 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8955 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8956 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8957 single_insn = 1;
8958 }
8959
8960 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8961 then we cannot just increment OP0. We must therefore contrive to
8962 increment the original value. Then, for postincrement, we can return
8963 OP0 since it is a copy of the old value. For preincrement, expand here
8964 unless we can do it with a single insn.
8965
8966 Likewise if storing directly into OP0 would clobber high bits
8967 we need to preserve (bad_subreg). */
8968 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8969 {
8970 /* This is the easiest way to increment the value wherever it is.
8971 Problems with multiple evaluation of INCREMENTED are prevented
8972 because either (1) it is a component_ref or preincrement,
8973 in which case it was stabilized above, or (2) it is an array_ref
8974 with constant index in an array in a register, which is
8975 safe to reevaluate. */
8976 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8977 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8978 ? MINUS_EXPR : PLUS_EXPR),
8979 TREE_TYPE (exp),
8980 incremented,
8981 TREE_OPERAND (exp, 1));
8982
8983 while (TREE_CODE (incremented) == NOP_EXPR
8984 || TREE_CODE (incremented) == CONVERT_EXPR)
8985 {
8986 newexp = convert (TREE_TYPE (incremented), newexp);
8987 incremented = TREE_OPERAND (incremented, 0);
8988 }
8989
8990 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8991 return post ? op0 : temp;
8992 }
8993
8994 if (post)
8995 {
8996 /* We have a true reference to the value in OP0.
8997 If there is an insn to add or subtract in this mode, queue it.
8998 Queueing the increment insn avoids the register shuffling
8999 that often results if we must increment now and first save
9000 the old value for subsequent use. */
9001
9002 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9003 op0 = stabilize (op0);
9004 #endif
9005
9006 icode = (int) this_optab->handlers[(int) mode].insn_code;
9007 if (icode != (int) CODE_FOR_nothing
9008 /* Make sure that OP0 is valid for operands 0 and 1
9009 of the insn we want to queue. */
9010 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9011 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9012 {
9013 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9014 op1 = force_reg (mode, op1);
9015
9016 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9017 }
9018 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9019 {
9020 rtx addr = (general_operand (XEXP (op0, 0), mode)
9021 ? force_reg (Pmode, XEXP (op0, 0))
9022 : copy_to_reg (XEXP (op0, 0)));
9023 rtx temp, result;
9024
9025 op0 = change_address (op0, VOIDmode, addr);
9026 temp = force_reg (GET_MODE (op0), op0);
9027 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9028 op1 = force_reg (mode, op1);
9029
9030 /* The increment queue is LIFO, thus we have to `queue'
9031 the instructions in reverse order. */
9032 enqueue_insn (op0, gen_move_insn (op0, temp));
9033 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9034 return result;
9035 }
9036 }
9037
9038 /* Preincrement, or we can't increment with one simple insn. */
9039 if (post)
9040 /* Save a copy of the value before inc or dec, to return it later. */
9041 temp = value = copy_to_reg (op0);
9042 else
9043 /* Arrange to return the incremented value. */
9044 /* Copy the rtx because expand_binop will protect from the queue,
9045 and the results of that would be invalid for us to return
9046 if our caller does emit_queue before using our result. */
9047 temp = copy_rtx (value = op0);
9048
9049 /* Increment however we can. */
9050 op1 = expand_binop (mode, this_optab, value, op1,
9051 current_function_check_memory_usage ? NULL_RTX : op0,
9052 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9053 /* Make sure the value is stored into OP0. */
9054 if (op1 != op0)
9055 emit_move_insn (op0, op1);
9056
9057 return temp;
9058 }
9059 \f
9060 /* Expand all function calls contained within EXP, innermost ones first.
9061 But don't look within expressions that have sequence points.
9062 For each CALL_EXPR, record the rtx for its value
9063 in the CALL_EXPR_RTL field. */
9064
9065 static void
9066 preexpand_calls (exp)
9067 tree exp;
9068 {
9069 register int nops, i;
9070 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9071
9072 if (! do_preexpand_calls)
9073 return;
9074
9075 /* Only expressions and references can contain calls. */
9076
9077 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9078 return;
9079
9080 switch (TREE_CODE (exp))
9081 {
9082 case CALL_EXPR:
9083 /* Do nothing if already expanded. */
9084 if (CALL_EXPR_RTL (exp) != 0
9085 /* Do nothing if the call returns a variable-sized object. */
9086 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9088 /* Do nothing to built-in functions. */
9089 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9090 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9091 == FUNCTION_DECL)
9092 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9093 return;
9094
9095 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9096 return;
9097
9098 case COMPOUND_EXPR:
9099 case COND_EXPR:
9100 case TRUTH_ANDIF_EXPR:
9101 case TRUTH_ORIF_EXPR:
9102 /* If we find one of these, then we can be sure
9103 the adjust will be done for it (since it makes jumps).
9104 Do it now, so that if this is inside an argument
9105 of a function, we don't get the stack adjustment
9106 after some other args have already been pushed. */
9107 do_pending_stack_adjust ();
9108 return;
9109
9110 case BLOCK:
9111 case RTL_EXPR:
9112 case WITH_CLEANUP_EXPR:
9113 case CLEANUP_POINT_EXPR:
9114 case TRY_CATCH_EXPR:
9115 return;
9116
9117 case SAVE_EXPR:
9118 if (SAVE_EXPR_RTL (exp) != 0)
9119 return;
9120
9121 default:
9122 break;
9123 }
9124
9125 nops = tree_code_length[(int) TREE_CODE (exp)];
9126 for (i = 0; i < nops; i++)
9127 if (TREE_OPERAND (exp, i) != 0)
9128 {
9129 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9130 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9131 It doesn't happen before the call is made. */
9132 ;
9133 else
9134 {
9135 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9136 if (type == 'e' || type == '<' || type == '1' || type == '2'
9137 || type == 'r')
9138 preexpand_calls (TREE_OPERAND (exp, i));
9139 }
9140 }
9141 }
9142 \f
9143 /* At the start of a function, record that we have no previously-pushed
9144 arguments waiting to be popped. */
9145
9146 void
9147 init_pending_stack_adjust ()
9148 {
9149 pending_stack_adjust = 0;
9150 }
9151
9152 /* When exiting from function, if safe, clear out any pending stack adjust
9153 so the adjustment won't get done.
9154
9155 Note, if the current function calls alloca, then it must have a
9156 frame pointer regardless of the value of flag_omit_frame_pointer. */
9157
9158 void
9159 clear_pending_stack_adjust ()
9160 {
9161 #ifdef EXIT_IGNORE_STACK
9162 if (optimize > 0
9163 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9164 && EXIT_IGNORE_STACK
9165 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9166 && ! flag_inline_functions)
9167 {
9168 stack_pointer_delta -= pending_stack_adjust,
9169 pending_stack_adjust = 0;
9170 }
9171 #endif
9172 }
9173
9174 /* Pop any previously-pushed arguments that have not been popped yet. */
9175
9176 void
9177 do_pending_stack_adjust ()
9178 {
9179 if (inhibit_defer_pop == 0)
9180 {
9181 if (pending_stack_adjust != 0)
9182 adjust_stack (GEN_INT (pending_stack_adjust));
9183 pending_stack_adjust = 0;
9184 }
9185 }
9186 \f
9187 /* Expand conditional expressions. */
9188
9189 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9190 LABEL is an rtx of code CODE_LABEL, in this function and all the
9191 functions here. */
9192
9193 void
9194 jumpifnot (exp, label)
9195 tree exp;
9196 rtx label;
9197 {
9198 do_jump (exp, label, NULL_RTX);
9199 }
9200
9201 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9202
9203 void
9204 jumpif (exp, label)
9205 tree exp;
9206 rtx label;
9207 {
9208 do_jump (exp, NULL_RTX, label);
9209 }
9210
9211 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9212 the result is zero, or IF_TRUE_LABEL if the result is one.
9213 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9214 meaning fall through in that case.
9215
9216 do_jump always does any pending stack adjust except when it does not
9217 actually perform a jump. An example where there is no jump
9218 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9219
9220 This function is responsible for optimizing cases such as
9221 &&, || and comparison operators in EXP. */
9222
9223 void
9224 do_jump (exp, if_false_label, if_true_label)
9225 tree exp;
9226 rtx if_false_label, if_true_label;
9227 {
9228 register enum tree_code code = TREE_CODE (exp);
9229 /* Some cases need to create a label to jump to
9230 in order to properly fall through.
9231 These cases set DROP_THROUGH_LABEL nonzero. */
9232 rtx drop_through_label = 0;
9233 rtx temp;
9234 int i;
9235 tree type;
9236 enum machine_mode mode;
9237
9238 #ifdef MAX_INTEGER_COMPUTATION_MODE
9239 check_max_integer_computation_mode (exp);
9240 #endif
9241
9242 emit_queue ();
9243
9244 switch (code)
9245 {
9246 case ERROR_MARK:
9247 break;
9248
9249 case INTEGER_CST:
9250 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9251 if (temp)
9252 emit_jump (temp);
9253 break;
9254
9255 #if 0
9256 /* This is not true with #pragma weak */
9257 case ADDR_EXPR:
9258 /* The address of something can never be zero. */
9259 if (if_true_label)
9260 emit_jump (if_true_label);
9261 break;
9262 #endif
9263
9264 case NOP_EXPR:
9265 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9266 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9267 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9268 goto normal;
9269 case CONVERT_EXPR:
9270 /* If we are narrowing the operand, we have to do the compare in the
9271 narrower mode. */
9272 if ((TYPE_PRECISION (TREE_TYPE (exp))
9273 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9274 goto normal;
9275 case NON_LVALUE_EXPR:
9276 case REFERENCE_EXPR:
9277 case ABS_EXPR:
9278 case NEGATE_EXPR:
9279 case LROTATE_EXPR:
9280 case RROTATE_EXPR:
9281 /* These cannot change zero->non-zero or vice versa. */
9282 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9283 break;
9284
9285 case WITH_RECORD_EXPR:
9286 /* Put the object on the placeholder list, recurse through our first
9287 operand, and pop the list. */
9288 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9289 placeholder_list);
9290 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9291 placeholder_list = TREE_CHAIN (placeholder_list);
9292 break;
9293
9294 #if 0
9295 /* This is never less insns than evaluating the PLUS_EXPR followed by
9296 a test and can be longer if the test is eliminated. */
9297 case PLUS_EXPR:
9298 /* Reduce to minus. */
9299 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9300 TREE_OPERAND (exp, 0),
9301 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9302 TREE_OPERAND (exp, 1))));
9303 /* Process as MINUS. */
9304 #endif
9305
9306 case MINUS_EXPR:
9307 /* Non-zero iff operands of minus differ. */
9308 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9309 TREE_OPERAND (exp, 0),
9310 TREE_OPERAND (exp, 1)),
9311 NE, NE, if_false_label, if_true_label);
9312 break;
9313
9314 case BIT_AND_EXPR:
9315 /* If we are AND'ing with a small constant, do this comparison in the
9316 smallest type that fits. If the machine doesn't have comparisons
9317 that small, it will be converted back to the wider comparison.
9318 This helps if we are testing the sign bit of a narrower object.
9319 combine can't do this for us because it can't know whether a
9320 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9321
9322 if (! SLOW_BYTE_ACCESS
9323 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9324 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9325 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9326 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9327 && (type = type_for_mode (mode, 1)) != 0
9328 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9329 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9330 != CODE_FOR_nothing))
9331 {
9332 do_jump (convert (type, exp), if_false_label, if_true_label);
9333 break;
9334 }
9335 goto normal;
9336
9337 case TRUTH_NOT_EXPR:
9338 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9339 break;
9340
9341 case TRUTH_ANDIF_EXPR:
9342 if (if_false_label == 0)
9343 if_false_label = drop_through_label = gen_label_rtx ();
9344 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9345 start_cleanup_deferral ();
9346 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9347 end_cleanup_deferral ();
9348 break;
9349
9350 case TRUTH_ORIF_EXPR:
9351 if (if_true_label == 0)
9352 if_true_label = drop_through_label = gen_label_rtx ();
9353 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9354 start_cleanup_deferral ();
9355 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9356 end_cleanup_deferral ();
9357 break;
9358
9359 case COMPOUND_EXPR:
9360 push_temp_slots ();
9361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9362 preserve_temp_slots (NULL_RTX);
9363 free_temp_slots ();
9364 pop_temp_slots ();
9365 emit_queue ();
9366 do_pending_stack_adjust ();
9367 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9368 break;
9369
9370 case COMPONENT_REF:
9371 case BIT_FIELD_REF:
9372 case ARRAY_REF:
9373 {
9374 HOST_WIDE_INT bitsize, bitpos;
9375 int unsignedp;
9376 enum machine_mode mode;
9377 tree type;
9378 tree offset;
9379 int volatilep = 0;
9380 unsigned int alignment;
9381
9382 /* Get description of this reference. We don't actually care
9383 about the underlying object here. */
9384 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9385 &unsignedp, &volatilep, &alignment);
9386
9387 type = type_for_size (bitsize, unsignedp);
9388 if (! SLOW_BYTE_ACCESS
9389 && type != 0 && bitsize >= 0
9390 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9391 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9392 != CODE_FOR_nothing))
9393 {
9394 do_jump (convert (type, exp), if_false_label, if_true_label);
9395 break;
9396 }
9397 goto normal;
9398 }
9399
9400 case COND_EXPR:
9401 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9402 if (integer_onep (TREE_OPERAND (exp, 1))
9403 && integer_zerop (TREE_OPERAND (exp, 2)))
9404 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9405
9406 else if (integer_zerop (TREE_OPERAND (exp, 1))
9407 && integer_onep (TREE_OPERAND (exp, 2)))
9408 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9409
9410 else
9411 {
9412 register rtx label1 = gen_label_rtx ();
9413 drop_through_label = gen_label_rtx ();
9414
9415 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9416
9417 start_cleanup_deferral ();
9418 /* Now the THEN-expression. */
9419 do_jump (TREE_OPERAND (exp, 1),
9420 if_false_label ? if_false_label : drop_through_label,
9421 if_true_label ? if_true_label : drop_through_label);
9422 /* In case the do_jump just above never jumps. */
9423 do_pending_stack_adjust ();
9424 emit_label (label1);
9425
9426 /* Now the ELSE-expression. */
9427 do_jump (TREE_OPERAND (exp, 2),
9428 if_false_label ? if_false_label : drop_through_label,
9429 if_true_label ? if_true_label : drop_through_label);
9430 end_cleanup_deferral ();
9431 }
9432 break;
9433
9434 case EQ_EXPR:
9435 {
9436 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9437
9438 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9439 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9440 {
9441 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9442 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9443 do_jump
9444 (fold
9445 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9446 fold (build (EQ_EXPR, TREE_TYPE (exp),
9447 fold (build1 (REALPART_EXPR,
9448 TREE_TYPE (inner_type),
9449 exp0)),
9450 fold (build1 (REALPART_EXPR,
9451 TREE_TYPE (inner_type),
9452 exp1)))),
9453 fold (build (EQ_EXPR, TREE_TYPE (exp),
9454 fold (build1 (IMAGPART_EXPR,
9455 TREE_TYPE (inner_type),
9456 exp0)),
9457 fold (build1 (IMAGPART_EXPR,
9458 TREE_TYPE (inner_type),
9459 exp1)))))),
9460 if_false_label, if_true_label);
9461 }
9462
9463 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9464 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9465
9466 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9467 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9468 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9469 else
9470 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9471 break;
9472 }
9473
9474 case NE_EXPR:
9475 {
9476 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9477
9478 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9479 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9480 {
9481 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9482 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9483 do_jump
9484 (fold
9485 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9486 fold (build (NE_EXPR, TREE_TYPE (exp),
9487 fold (build1 (REALPART_EXPR,
9488 TREE_TYPE (inner_type),
9489 exp0)),
9490 fold (build1 (REALPART_EXPR,
9491 TREE_TYPE (inner_type),
9492 exp1)))),
9493 fold (build (NE_EXPR, TREE_TYPE (exp),
9494 fold (build1 (IMAGPART_EXPR,
9495 TREE_TYPE (inner_type),
9496 exp0)),
9497 fold (build1 (IMAGPART_EXPR,
9498 TREE_TYPE (inner_type),
9499 exp1)))))),
9500 if_false_label, if_true_label);
9501 }
9502
9503 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9504 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9505
9506 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9507 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9508 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9509 else
9510 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9511 break;
9512 }
9513
9514 case LT_EXPR:
9515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9516 if (GET_MODE_CLASS (mode) == MODE_INT
9517 && ! can_compare_p (LT, mode, ccp_jump))
9518 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9519 else
9520 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9521 break;
9522
9523 case LE_EXPR:
9524 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9525 if (GET_MODE_CLASS (mode) == MODE_INT
9526 && ! can_compare_p (LE, mode, ccp_jump))
9527 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9528 else
9529 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9530 break;
9531
9532 case GT_EXPR:
9533 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9534 if (GET_MODE_CLASS (mode) == MODE_INT
9535 && ! can_compare_p (GT, mode, ccp_jump))
9536 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9537 else
9538 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9539 break;
9540
9541 case GE_EXPR:
9542 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9543 if (GET_MODE_CLASS (mode) == MODE_INT
9544 && ! can_compare_p (GE, mode, ccp_jump))
9545 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9546 else
9547 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9548 break;
9549
9550 case UNORDERED_EXPR:
9551 case ORDERED_EXPR:
9552 {
9553 enum rtx_code cmp, rcmp;
9554 int do_rev;
9555
9556 if (code == UNORDERED_EXPR)
9557 cmp = UNORDERED, rcmp = ORDERED;
9558 else
9559 cmp = ORDERED, rcmp = UNORDERED;
9560 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9561
9562 do_rev = 0;
9563 if (! can_compare_p (cmp, mode, ccp_jump)
9564 && (can_compare_p (rcmp, mode, ccp_jump)
9565 /* If the target doesn't provide either UNORDERED or ORDERED
9566 comparisons, canonicalize on UNORDERED for the library. */
9567 || rcmp == UNORDERED))
9568 do_rev = 1;
9569
9570 if (! do_rev)
9571 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9572 else
9573 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9574 }
9575 break;
9576
9577 {
9578 enum rtx_code rcode1;
9579 enum tree_code tcode2;
9580
9581 case UNLT_EXPR:
9582 rcode1 = UNLT;
9583 tcode2 = LT_EXPR;
9584 goto unordered_bcc;
9585 case UNLE_EXPR:
9586 rcode1 = UNLE;
9587 tcode2 = LE_EXPR;
9588 goto unordered_bcc;
9589 case UNGT_EXPR:
9590 rcode1 = UNGT;
9591 tcode2 = GT_EXPR;
9592 goto unordered_bcc;
9593 case UNGE_EXPR:
9594 rcode1 = UNGE;
9595 tcode2 = GE_EXPR;
9596 goto unordered_bcc;
9597 case UNEQ_EXPR:
9598 rcode1 = UNEQ;
9599 tcode2 = EQ_EXPR;
9600 goto unordered_bcc;
9601
9602 unordered_bcc:
9603 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9604 if (can_compare_p (rcode1, mode, ccp_jump))
9605 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9606 if_true_label);
9607 else
9608 {
9609 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9610 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9611 tree cmp0, cmp1;
9612
9613 /* If the target doesn't support combined unordered
9614 compares, decompose into UNORDERED + comparison. */
9615 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9616 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9617 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9618 do_jump (exp, if_false_label, if_true_label);
9619 }
9620 }
9621 break;
9622
9623 default:
9624 normal:
9625 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9626 #if 0
9627 /* This is not needed any more and causes poor code since it causes
9628 comparisons and tests from non-SI objects to have different code
9629 sequences. */
9630 /* Copy to register to avoid generating bad insns by cse
9631 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9632 if (!cse_not_expected && GET_CODE (temp) == MEM)
9633 temp = copy_to_reg (temp);
9634 #endif
9635 do_pending_stack_adjust ();
9636 /* Do any postincrements in the expression that was tested. */
9637 emit_queue ();
9638
9639 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9640 {
9641 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9642 if (target)
9643 emit_jump (target);
9644 }
9645 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9646 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9647 /* Note swapping the labels gives us not-equal. */
9648 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9649 else if (GET_MODE (temp) != VOIDmode)
9650 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9651 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9652 GET_MODE (temp), NULL_RTX, 0,
9653 if_false_label, if_true_label);
9654 else
9655 abort ();
9656 }
9657
9658 if (drop_through_label)
9659 {
9660 /* If do_jump produces code that might be jumped around,
9661 do any stack adjusts from that code, before the place
9662 where control merges in. */
9663 do_pending_stack_adjust ();
9664 emit_label (drop_through_label);
9665 }
9666 }
9667 \f
9668 /* Given a comparison expression EXP for values too wide to be compared
9669 with one insn, test the comparison and jump to the appropriate label.
9670 The code of EXP is ignored; we always test GT if SWAP is 0,
9671 and LT if SWAP is 1. */
9672
9673 static void
9674 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9675 tree exp;
9676 int swap;
9677 rtx if_false_label, if_true_label;
9678 {
9679 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9680 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9681 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9683
9684 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9685 }
9686
9687 /* Compare OP0 with OP1, word at a time, in mode MODE.
9688 UNSIGNEDP says to do unsigned comparison.
9689 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9690
9691 void
9692 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9693 enum machine_mode mode;
9694 int unsignedp;
9695 rtx op0, op1;
9696 rtx if_false_label, if_true_label;
9697 {
9698 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9699 rtx drop_through_label = 0;
9700 int i;
9701
9702 if (! if_true_label || ! if_false_label)
9703 drop_through_label = gen_label_rtx ();
9704 if (! if_true_label)
9705 if_true_label = drop_through_label;
9706 if (! if_false_label)
9707 if_false_label = drop_through_label;
9708
9709 /* Compare a word at a time, high order first. */
9710 for (i = 0; i < nwords; i++)
9711 {
9712 rtx op0_word, op1_word;
9713
9714 if (WORDS_BIG_ENDIAN)
9715 {
9716 op0_word = operand_subword_force (op0, i, mode);
9717 op1_word = operand_subword_force (op1, i, mode);
9718 }
9719 else
9720 {
9721 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9722 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9723 }
9724
9725 /* All but high-order word must be compared as unsigned. */
9726 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9727 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9728 NULL_RTX, if_true_label);
9729
9730 /* Consider lower words only if these are equal. */
9731 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9732 NULL_RTX, 0, NULL_RTX, if_false_label);
9733 }
9734
9735 if (if_false_label)
9736 emit_jump (if_false_label);
9737 if (drop_through_label)
9738 emit_label (drop_through_label);
9739 }
9740
9741 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9742 with one insn, test the comparison and jump to the appropriate label. */
9743
9744 static void
9745 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9746 tree exp;
9747 rtx if_false_label, if_true_label;
9748 {
9749 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9750 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9751 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9752 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9753 int i;
9754 rtx drop_through_label = 0;
9755
9756 if (! if_false_label)
9757 drop_through_label = if_false_label = gen_label_rtx ();
9758
9759 for (i = 0; i < nwords; i++)
9760 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9761 operand_subword_force (op1, i, mode),
9762 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9763 word_mode, NULL_RTX, 0, if_false_label,
9764 NULL_RTX);
9765
9766 if (if_true_label)
9767 emit_jump (if_true_label);
9768 if (drop_through_label)
9769 emit_label (drop_through_label);
9770 }
9771 \f
9772 /* Jump according to whether OP0 is 0.
9773 We assume that OP0 has an integer mode that is too wide
9774 for the available compare insns. */
9775
9776 void
9777 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9778 rtx op0;
9779 rtx if_false_label, if_true_label;
9780 {
9781 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9782 rtx part;
9783 int i;
9784 rtx drop_through_label = 0;
9785
9786 /* The fastest way of doing this comparison on almost any machine is to
9787 "or" all the words and compare the result. If all have to be loaded
9788 from memory and this is a very wide item, it's possible this may
9789 be slower, but that's highly unlikely. */
9790
9791 part = gen_reg_rtx (word_mode);
9792 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9793 for (i = 1; i < nwords && part != 0; i++)
9794 part = expand_binop (word_mode, ior_optab, part,
9795 operand_subword_force (op0, i, GET_MODE (op0)),
9796 part, 1, OPTAB_WIDEN);
9797
9798 if (part != 0)
9799 {
9800 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9801 NULL_RTX, 0, if_false_label, if_true_label);
9802
9803 return;
9804 }
9805
9806 /* If we couldn't do the "or" simply, do this with a series of compares. */
9807 if (! if_false_label)
9808 drop_through_label = if_false_label = gen_label_rtx ();
9809
9810 for (i = 0; i < nwords; i++)
9811 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9812 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9813 if_false_label, NULL_RTX);
9814
9815 if (if_true_label)
9816 emit_jump (if_true_label);
9817
9818 if (drop_through_label)
9819 emit_label (drop_through_label);
9820 }
9821 \f
9822 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9823 (including code to compute the values to be compared)
9824 and set (CC0) according to the result.
9825 The decision as to signed or unsigned comparison must be made by the caller.
9826
9827 We force a stack adjustment unless there are currently
9828 things pushed on the stack that aren't yet used.
9829
9830 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9831 compared.
9832
9833 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9834 size of MODE should be used. */
9835
9836 rtx
9837 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9838 register rtx op0, op1;
9839 enum rtx_code code;
9840 int unsignedp;
9841 enum machine_mode mode;
9842 rtx size;
9843 unsigned int align;
9844 {
9845 rtx tem;
9846
9847 /* If one operand is constant, make it the second one. Only do this
9848 if the other operand is not constant as well. */
9849
9850 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9851 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9852 {
9853 tem = op0;
9854 op0 = op1;
9855 op1 = tem;
9856 code = swap_condition (code);
9857 }
9858
9859 if (flag_force_mem)
9860 {
9861 op0 = force_not_mem (op0);
9862 op1 = force_not_mem (op1);
9863 }
9864
9865 do_pending_stack_adjust ();
9866
9867 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9868 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9869 return tem;
9870
9871 #if 0
9872 /* There's no need to do this now that combine.c can eliminate lots of
9873 sign extensions. This can be less efficient in certain cases on other
9874 machines. */
9875
9876 /* If this is a signed equality comparison, we can do it as an
9877 unsigned comparison since zero-extension is cheaper than sign
9878 extension and comparisons with zero are done as unsigned. This is
9879 the case even on machines that can do fast sign extension, since
9880 zero-extension is easier to combine with other operations than
9881 sign-extension is. If we are comparing against a constant, we must
9882 convert it to what it would look like unsigned. */
9883 if ((code == EQ || code == NE) && ! unsignedp
9884 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9885 {
9886 if (GET_CODE (op1) == CONST_INT
9887 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9888 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9889 unsignedp = 1;
9890 }
9891 #endif
9892
9893 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9894
9895 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9896 }
9897
9898 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9899 The decision as to signed or unsigned comparison must be made by the caller.
9900
9901 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9902 compared.
9903
9904 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9905 size of MODE should be used. */
9906
9907 void
9908 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9909 if_false_label, if_true_label)
9910 register rtx op0, op1;
9911 enum rtx_code code;
9912 int unsignedp;
9913 enum machine_mode mode;
9914 rtx size;
9915 unsigned int align;
9916 rtx if_false_label, if_true_label;
9917 {
9918 rtx tem;
9919 int dummy_true_label = 0;
9920
9921 /* Reverse the comparison if that is safe and we want to jump if it is
9922 false. */
9923 if (! if_true_label && ! FLOAT_MODE_P (mode))
9924 {
9925 if_true_label = if_false_label;
9926 if_false_label = 0;
9927 code = reverse_condition (code);
9928 }
9929
9930 /* If one operand is constant, make it the second one. Only do this
9931 if the other operand is not constant as well. */
9932
9933 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9934 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9935 {
9936 tem = op0;
9937 op0 = op1;
9938 op1 = tem;
9939 code = swap_condition (code);
9940 }
9941
9942 if (flag_force_mem)
9943 {
9944 op0 = force_not_mem (op0);
9945 op1 = force_not_mem (op1);
9946 }
9947
9948 do_pending_stack_adjust ();
9949
9950 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9951 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9952 {
9953 if (tem == const_true_rtx)
9954 {
9955 if (if_true_label)
9956 emit_jump (if_true_label);
9957 }
9958 else
9959 {
9960 if (if_false_label)
9961 emit_jump (if_false_label);
9962 }
9963 return;
9964 }
9965
9966 #if 0
9967 /* There's no need to do this now that combine.c can eliminate lots of
9968 sign extensions. This can be less efficient in certain cases on other
9969 machines. */
9970
9971 /* If this is a signed equality comparison, we can do it as an
9972 unsigned comparison since zero-extension is cheaper than sign
9973 extension and comparisons with zero are done as unsigned. This is
9974 the case even on machines that can do fast sign extension, since
9975 zero-extension is easier to combine with other operations than
9976 sign-extension is. If we are comparing against a constant, we must
9977 convert it to what it would look like unsigned. */
9978 if ((code == EQ || code == NE) && ! unsignedp
9979 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9980 {
9981 if (GET_CODE (op1) == CONST_INT
9982 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9983 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9984 unsignedp = 1;
9985 }
9986 #endif
9987
9988 if (! if_true_label)
9989 {
9990 dummy_true_label = 1;
9991 if_true_label = gen_label_rtx ();
9992 }
9993
9994 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9995 if_true_label);
9996
9997 if (if_false_label)
9998 emit_jump (if_false_label);
9999 if (dummy_true_label)
10000 emit_label (if_true_label);
10001 }
10002
10003 /* Generate code for a comparison expression EXP (including code to compute
10004 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10005 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10006 generated code will drop through.
10007 SIGNED_CODE should be the rtx operation for this comparison for
10008 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10009
10010 We force a stack adjustment unless there are currently
10011 things pushed on the stack that aren't yet used. */
10012
10013 static void
10014 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10015 if_true_label)
10016 register tree exp;
10017 enum rtx_code signed_code, unsigned_code;
10018 rtx if_false_label, if_true_label;
10019 {
10020 unsigned int align0, align1;
10021 register rtx op0, op1;
10022 register tree type;
10023 register enum machine_mode mode;
10024 int unsignedp;
10025 enum rtx_code code;
10026
10027 /* Don't crash if the comparison was erroneous. */
10028 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10030 return;
10031
10032 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10033 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10034 mode = TYPE_MODE (type);
10035 unsignedp = TREE_UNSIGNED (type);
10036 code = unsignedp ? unsigned_code : signed_code;
10037
10038 #ifdef HAVE_canonicalize_funcptr_for_compare
10039 /* If function pointers need to be "canonicalized" before they can
10040 be reliably compared, then canonicalize them. */
10041 if (HAVE_canonicalize_funcptr_for_compare
10042 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10043 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10044 == FUNCTION_TYPE))
10045 {
10046 rtx new_op0 = gen_reg_rtx (mode);
10047
10048 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10049 op0 = new_op0;
10050 }
10051
10052 if (HAVE_canonicalize_funcptr_for_compare
10053 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10054 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10055 == FUNCTION_TYPE))
10056 {
10057 rtx new_op1 = gen_reg_rtx (mode);
10058
10059 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10060 op1 = new_op1;
10061 }
10062 #endif
10063
10064 /* Do any postincrements in the expression that was tested. */
10065 emit_queue ();
10066
10067 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10068 ((mode == BLKmode)
10069 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10070 MIN (align0, align1),
10071 if_false_label, if_true_label);
10072 }
10073 \f
10074 /* Generate code to calculate EXP using a store-flag instruction
10075 and return an rtx for the result. EXP is either a comparison
10076 or a TRUTH_NOT_EXPR whose operand is a comparison.
10077
10078 If TARGET is nonzero, store the result there if convenient.
10079
10080 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10081 cheap.
10082
10083 Return zero if there is no suitable set-flag instruction
10084 available on this machine.
10085
10086 Once expand_expr has been called on the arguments of the comparison,
10087 we are committed to doing the store flag, since it is not safe to
10088 re-evaluate the expression. We emit the store-flag insn by calling
10089 emit_store_flag, but only expand the arguments if we have a reason
10090 to believe that emit_store_flag will be successful. If we think that
10091 it will, but it isn't, we have to simulate the store-flag with a
10092 set/jump/set sequence. */
10093
10094 static rtx
10095 do_store_flag (exp, target, mode, only_cheap)
10096 tree exp;
10097 rtx target;
10098 enum machine_mode mode;
10099 int only_cheap;
10100 {
10101 enum rtx_code code;
10102 tree arg0, arg1, type;
10103 tree tem;
10104 enum machine_mode operand_mode;
10105 int invert = 0;
10106 int unsignedp;
10107 rtx op0, op1;
10108 enum insn_code icode;
10109 rtx subtarget = target;
10110 rtx result, label;
10111
10112 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10113 result at the end. We can't simply invert the test since it would
10114 have already been inverted if it were valid. This case occurs for
10115 some floating-point comparisons. */
10116
10117 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10118 invert = 1, exp = TREE_OPERAND (exp, 0);
10119
10120 arg0 = TREE_OPERAND (exp, 0);
10121 arg1 = TREE_OPERAND (exp, 1);
10122 type = TREE_TYPE (arg0);
10123 operand_mode = TYPE_MODE (type);
10124 unsignedp = TREE_UNSIGNED (type);
10125
10126 /* We won't bother with BLKmode store-flag operations because it would mean
10127 passing a lot of information to emit_store_flag. */
10128 if (operand_mode == BLKmode)
10129 return 0;
10130
10131 /* We won't bother with store-flag operations involving function pointers
10132 when function pointers must be canonicalized before comparisons. */
10133 #ifdef HAVE_canonicalize_funcptr_for_compare
10134 if (HAVE_canonicalize_funcptr_for_compare
10135 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10136 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10137 == FUNCTION_TYPE))
10138 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10139 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10140 == FUNCTION_TYPE))))
10141 return 0;
10142 #endif
10143
10144 STRIP_NOPS (arg0);
10145 STRIP_NOPS (arg1);
10146
10147 /* Get the rtx comparison code to use. We know that EXP is a comparison
10148 operation of some type. Some comparisons against 1 and -1 can be
10149 converted to comparisons with zero. Do so here so that the tests
10150 below will be aware that we have a comparison with zero. These
10151 tests will not catch constants in the first operand, but constants
10152 are rarely passed as the first operand. */
10153
10154 switch (TREE_CODE (exp))
10155 {
10156 case EQ_EXPR:
10157 code = EQ;
10158 break;
10159 case NE_EXPR:
10160 code = NE;
10161 break;
10162 case LT_EXPR:
10163 if (integer_onep (arg1))
10164 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10165 else
10166 code = unsignedp ? LTU : LT;
10167 break;
10168 case LE_EXPR:
10169 if (! unsignedp && integer_all_onesp (arg1))
10170 arg1 = integer_zero_node, code = LT;
10171 else
10172 code = unsignedp ? LEU : LE;
10173 break;
10174 case GT_EXPR:
10175 if (! unsignedp && integer_all_onesp (arg1))
10176 arg1 = integer_zero_node, code = GE;
10177 else
10178 code = unsignedp ? GTU : GT;
10179 break;
10180 case GE_EXPR:
10181 if (integer_onep (arg1))
10182 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10183 else
10184 code = unsignedp ? GEU : GE;
10185 break;
10186
10187 case UNORDERED_EXPR:
10188 code = UNORDERED;
10189 break;
10190 case ORDERED_EXPR:
10191 code = ORDERED;
10192 break;
10193 case UNLT_EXPR:
10194 code = UNLT;
10195 break;
10196 case UNLE_EXPR:
10197 code = UNLE;
10198 break;
10199 case UNGT_EXPR:
10200 code = UNGT;
10201 break;
10202 case UNGE_EXPR:
10203 code = UNGE;
10204 break;
10205 case UNEQ_EXPR:
10206 code = UNEQ;
10207 break;
10208
10209 default:
10210 abort ();
10211 }
10212
10213 /* Put a constant second. */
10214 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10215 {
10216 tem = arg0; arg0 = arg1; arg1 = tem;
10217 code = swap_condition (code);
10218 }
10219
10220 /* If this is an equality or inequality test of a single bit, we can
10221 do this by shifting the bit being tested to the low-order bit and
10222 masking the result with the constant 1. If the condition was EQ,
10223 we xor it with 1. This does not require an scc insn and is faster
10224 than an scc insn even if we have it. */
10225
10226 if ((code == NE || code == EQ)
10227 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10228 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10229 {
10230 tree inner = TREE_OPERAND (arg0, 0);
10231 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10232 int ops_unsignedp;
10233
10234 /* If INNER is a right shift of a constant and it plus BITNUM does
10235 not overflow, adjust BITNUM and INNER. */
10236
10237 if (TREE_CODE (inner) == RSHIFT_EXPR
10238 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10239 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10240 && bitnum < TYPE_PRECISION (type)
10241 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10242 bitnum - TYPE_PRECISION (type)))
10243 {
10244 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10245 inner = TREE_OPERAND (inner, 0);
10246 }
10247
10248 /* If we are going to be able to omit the AND below, we must do our
10249 operations as unsigned. If we must use the AND, we have a choice.
10250 Normally unsigned is faster, but for some machines signed is. */
10251 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10252 #ifdef LOAD_EXTEND_OP
10253 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10254 #else
10255 : 1
10256 #endif
10257 );
10258
10259 if (! get_subtarget (subtarget)
10260 || GET_MODE (subtarget) != operand_mode
10261 || ! safe_from_p (subtarget, inner, 1))
10262 subtarget = 0;
10263
10264 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10265
10266 if (bitnum != 0)
10267 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10268 size_int (bitnum), subtarget, ops_unsignedp);
10269
10270 if (GET_MODE (op0) != mode)
10271 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10272
10273 if ((code == EQ && ! invert) || (code == NE && invert))
10274 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10275 ops_unsignedp, OPTAB_LIB_WIDEN);
10276
10277 /* Put the AND last so it can combine with more things. */
10278 if (bitnum != TYPE_PRECISION (type) - 1)
10279 op0 = expand_and (op0, const1_rtx, subtarget);
10280
10281 return op0;
10282 }
10283
10284 /* Now see if we are likely to be able to do this. Return if not. */
10285 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10286 return 0;
10287
10288 icode = setcc_gen_code[(int) code];
10289 if (icode == CODE_FOR_nothing
10290 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10291 {
10292 /* We can only do this if it is one of the special cases that
10293 can be handled without an scc insn. */
10294 if ((code == LT && integer_zerop (arg1))
10295 || (! only_cheap && code == GE && integer_zerop (arg1)))
10296 ;
10297 else if (BRANCH_COST >= 0
10298 && ! only_cheap && (code == NE || code == EQ)
10299 && TREE_CODE (type) != REAL_TYPE
10300 && ((abs_optab->handlers[(int) operand_mode].insn_code
10301 != CODE_FOR_nothing)
10302 || (ffs_optab->handlers[(int) operand_mode].insn_code
10303 != CODE_FOR_nothing)))
10304 ;
10305 else
10306 return 0;
10307 }
10308
10309 preexpand_calls (exp);
10310 if (! get_subtarget (target)
10311 || GET_MODE (subtarget) != operand_mode
10312 || ! safe_from_p (subtarget, arg1, 1))
10313 subtarget = 0;
10314
10315 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10316 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10317
10318 if (target == 0)
10319 target = gen_reg_rtx (mode);
10320
10321 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10322 because, if the emit_store_flag does anything it will succeed and
10323 OP0 and OP1 will not be used subsequently. */
10324
10325 result = emit_store_flag (target, code,
10326 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10327 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10328 operand_mode, unsignedp, 1);
10329
10330 if (result)
10331 {
10332 if (invert)
10333 result = expand_binop (mode, xor_optab, result, const1_rtx,
10334 result, 0, OPTAB_LIB_WIDEN);
10335 return result;
10336 }
10337
10338 /* If this failed, we have to do this with set/compare/jump/set code. */
10339 if (GET_CODE (target) != REG
10340 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10341 target = gen_reg_rtx (GET_MODE (target));
10342
10343 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10344 result = compare_from_rtx (op0, op1, code, unsignedp,
10345 operand_mode, NULL_RTX, 0);
10346 if (GET_CODE (result) == CONST_INT)
10347 return (((result == const0_rtx && ! invert)
10348 || (result != const0_rtx && invert))
10349 ? const0_rtx : const1_rtx);
10350
10351 label = gen_label_rtx ();
10352 if (bcc_gen_fctn[(int) code] == 0)
10353 abort ();
10354
10355 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10356 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10357 emit_label (label);
10358
10359 return target;
10360 }
10361 \f
10362 /* Generate a tablejump instruction (used for switch statements). */
10363
10364 #ifdef HAVE_tablejump
10365
10366 /* INDEX is the value being switched on, with the lowest value
10367 in the table already subtracted.
10368 MODE is its expected mode (needed if INDEX is constant).
10369 RANGE is the length of the jump table.
10370 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10371
10372 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10373 index value is out of range. */
10374
10375 void
10376 do_tablejump (index, mode, range, table_label, default_label)
10377 rtx index, range, table_label, default_label;
10378 enum machine_mode mode;
10379 {
10380 register rtx temp, vector;
10381
10382 /* Do an unsigned comparison (in the proper mode) between the index
10383 expression and the value which represents the length of the range.
10384 Since we just finished subtracting the lower bound of the range
10385 from the index expression, this comparison allows us to simultaneously
10386 check that the original index expression value is both greater than
10387 or equal to the minimum value of the range and less than or equal to
10388 the maximum value of the range. */
10389
10390 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10391 0, default_label);
10392
10393 /* If index is in range, it must fit in Pmode.
10394 Convert to Pmode so we can index with it. */
10395 if (mode != Pmode)
10396 index = convert_to_mode (Pmode, index, 1);
10397
10398 /* Don't let a MEM slip thru, because then INDEX that comes
10399 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10400 and break_out_memory_refs will go to work on it and mess it up. */
10401 #ifdef PIC_CASE_VECTOR_ADDRESS
10402 if (flag_pic && GET_CODE (index) != REG)
10403 index = copy_to_mode_reg (Pmode, index);
10404 #endif
10405
10406 /* If flag_force_addr were to affect this address
10407 it could interfere with the tricky assumptions made
10408 about addresses that contain label-refs,
10409 which may be valid only very near the tablejump itself. */
10410 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10411 GET_MODE_SIZE, because this indicates how large insns are. The other
10412 uses should all be Pmode, because they are addresses. This code
10413 could fail if addresses and insns are not the same size. */
10414 index = gen_rtx_PLUS (Pmode,
10415 gen_rtx_MULT (Pmode, index,
10416 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10417 gen_rtx_LABEL_REF (Pmode, table_label));
10418 #ifdef PIC_CASE_VECTOR_ADDRESS
10419 if (flag_pic)
10420 index = PIC_CASE_VECTOR_ADDRESS (index);
10421 else
10422 #endif
10423 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10424 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10425 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10426 RTX_UNCHANGING_P (vector) = 1;
10427 convert_move (temp, vector, 0);
10428
10429 emit_jump_insn (gen_tablejump (temp, table_label));
10430
10431 /* If we are generating PIC code or if the table is PC-relative, the
10432 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10433 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10434 emit_barrier ();
10435 }
10436
10437 #endif /* HAVE_tablejump */