[multiple changes]
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PROTO ((int));
135
136 static rtx enqueue_insn PROTO((rtx, rtx));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PROTO((rtx, int, int));
141 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PROTO((tree));
145 static int mostly_zeros_p PROTO((tree));
146 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
147 tree, tree, int, int));
148 static void store_constructor PROTO((tree, rtx, int, int));
149 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
150 enum machine_mode, int, int,
151 int, int));
152 static enum memory_use_mode
153 get_memory_usage_from_modifier PROTO((enum expand_modifier));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree, int));
157 static int fixed_type_p PROTO((tree));
158 static rtx var_rtx PROTO((tree));
159 static int readonly_fields_p PROTO((tree));
160 static rtx expand_expr_unaligned PROTO((tree, int *));
161 static rtx expand_increment PROTO((tree, int, int));
162 static void preexpand_calls PROTO((tree));
163 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
164 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
165 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
166 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
167
168 /* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
171
172 static char direct_load[NUM_MACHINE_MODES];
173 static char direct_store[NUM_MACHINE_MODES];
174
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
177
178 #ifndef MOVE_RATIO
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
180 #define MOVE_RATIO 2
181 #else
182 /* If we are optimizing for space (-Os), cut down the default move ratio */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
184 #endif
185 #endif
186
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
192 #endif
193
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
204 #endif
205 \f
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
208
209 void
210 init_expr_once ()
211 {
212 rtx insn, pat;
213 enum machine_mode mode;
214 int num_clobbers;
215 rtx mem, mem1;
216 char *free_point;
217
218 start_sequence ();
219
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
222 makes. */
223 free_point = (char *) oballoc (0);
224
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230
231 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
232 pat = PATTERN (insn);
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238 rtx reg;
239
240 direct_load[(int) mode] = direct_store[(int) mode] = 0;
241 PUT_MODE (mem, mode);
242 PUT_MODE (mem1, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 reg = gen_rtx_REG (mode, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 end_sequence ();
280 obfree (free_point);
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr ()
287 {
288 current_function->expr
289 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
290
291 pending_chain = 0;
292 pending_stack_adjust = 0;
293 inhibit_defer_pop = 0;
294 saveregs_value = 0;
295 apply_args_value = 0;
296 forced_labels = 0;
297 }
298
299 void
300 mark_expr_status (p)
301 struct expr_status *p;
302 {
303 if (p == NULL)
304 return;
305
306 ggc_mark_rtx (p->x_saveregs_value);
307 ggc_mark_rtx (p->x_apply_args_value);
308 ggc_mark_rtx (p->x_forced_labels);
309 }
310
311 void
312 free_expr_status (f)
313 struct function *f;
314 {
315 free (f->expr);
316 f->expr = NULL;
317 }
318
319 /* Small sanity check that the queue is empty at the end of a function. */
320 void
321 finish_expr_for_function ()
322 {
323 if (pending_chain)
324 abort ();
325 }
326 \f
327 /* Manage the queue of increment instructions to be output
328 for POSTINCREMENT_EXPR expressions, etc. */
329
330 /* Queue up to increment (or change) VAR later. BODY says how:
331 BODY should be the same thing you would pass to emit_insn
332 to increment right away. It will go to emit_insn later on.
333
334 The value is a QUEUED expression to be used in place of VAR
335 where you want to guarantee the pre-incrementation value of VAR. */
336
337 static rtx
338 enqueue_insn (var, body)
339 rtx var, body;
340 {
341 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
342 body, pending_chain);
343 return pending_chain;
344 }
345
346 /* Use protect_from_queue to convert a QUEUED expression
347 into something that you can put immediately into an instruction.
348 If the queued incrementation has not happened yet,
349 protect_from_queue returns the variable itself.
350 If the incrementation has happened, protect_from_queue returns a temp
351 that contains a copy of the old value of the variable.
352
353 Any time an rtx which might possibly be a QUEUED is to be put
354 into an instruction, it must be passed through protect_from_queue first.
355 QUEUED expressions are not meaningful in instructions.
356
357 Do not pass a value through protect_from_queue and then hold
358 on to it for a while before putting it in an instruction!
359 If the queue is flushed in between, incorrect code will result. */
360
361 rtx
362 protect_from_queue (x, modify)
363 register rtx x;
364 int modify;
365 {
366 register RTX_CODE code = GET_CODE (x);
367
368 #if 0 /* A QUEUED can hang around after the queue is forced out. */
369 /* Shortcut for most common case. */
370 if (pending_chain == 0)
371 return x;
372 #endif
373
374 if (code != QUEUED)
375 {
376 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
377 use of autoincrement. Make a copy of the contents of the memory
378 location rather than a copy of the address, but not if the value is
379 of mode BLKmode. Don't modify X in place since it might be
380 shared. */
381 if (code == MEM && GET_MODE (x) != BLKmode
382 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 {
384 register rtx y = XEXP (x, 0);
385 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
386
387 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
388 MEM_COPY_ATTRIBUTES (new, x);
389 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
390
391 if (QUEUED_INSN (y))
392 {
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
397 }
398 return new;
399 }
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
403 {
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
406 {
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
409 }
410 }
411 else if (code == PLUS || code == MULT)
412 {
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
416 {
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
420 }
421 }
422 return x;
423 }
424 /* If the increment has not happened, use the variable itself. */
425 if (QUEUED_INSN (x) == 0)
426 return QUEUED_VAR (x);
427 /* If the increment has happened and a pre-increment copy exists,
428 use that copy. */
429 if (QUEUED_COPY (x) != 0)
430 return QUEUED_COPY (x);
431 /* The increment has happened but we haven't set up a pre-increment copy.
432 Set one up now, and use it. */
433 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
434 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
435 QUEUED_INSN (x));
436 return QUEUED_COPY (x);
437 }
438
439 /* Return nonzero if X contains a QUEUED expression:
440 if it contains anything that will be altered by a queued increment.
441 We handle only combinations of MEM, PLUS, MINUS and MULT operators
442 since memory addresses generally contain only those. */
443
444 int
445 queued_subexp_p (x)
446 rtx x;
447 {
448 register enum rtx_code code = GET_CODE (x);
449 switch (code)
450 {
451 case QUEUED:
452 return 1;
453 case MEM:
454 return queued_subexp_p (XEXP (x, 0));
455 case MULT:
456 case PLUS:
457 case MINUS:
458 return (queued_subexp_p (XEXP (x, 0))
459 || queued_subexp_p (XEXP (x, 1)));
460 default:
461 return 0;
462 }
463 }
464
465 /* Perform all the pending incrementations. */
466
467 void
468 emit_queue ()
469 {
470 register rtx p;
471 while ((p = pending_chain))
472 {
473 rtx body = QUEUED_BODY (p);
474
475 if (GET_CODE (body) == SEQUENCE)
476 {
477 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
478 emit_insn (QUEUED_BODY (p));
479 }
480 else
481 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
482 pending_chain = QUEUED_NEXT (p);
483 }
484 }
485 \f
486 /* Copy data from FROM to TO, where the machine modes are not the same.
487 Both modes may be integer, or both may be floating.
488 UNSIGNEDP should be nonzero if FROM is an unsigned type.
489 This causes zero-extension instead of sign-extension. */
490
491 void
492 convert_move (to, from, unsignedp)
493 register rtx to, from;
494 int unsignedp;
495 {
496 enum machine_mode to_mode = GET_MODE (to);
497 enum machine_mode from_mode = GET_MODE (from);
498 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
499 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
500 enum insn_code code;
501 rtx libcall;
502
503 /* rtx code for making an equivalent value. */
504 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
505
506 to = protect_from_queue (to, 1);
507 from = protect_from_queue (from, 0);
508
509 if (to_real != from_real)
510 abort ();
511
512 /* If FROM is a SUBREG that indicates that we have already done at least
513 the required extension, strip it. We don't handle such SUBREGs as
514 TO here. */
515
516 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
517 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
518 >= GET_MODE_SIZE (to_mode))
519 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
520 from = gen_lowpart (to_mode, from), from_mode = to_mode;
521
522 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
523 abort ();
524
525 if (to_mode == from_mode
526 || (from_mode == VOIDmode && CONSTANT_P (from)))
527 {
528 emit_move_insn (to, from);
529 return;
530 }
531
532 if (to_real)
533 {
534 rtx value;
535
536 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
537 {
538 /* Try converting directly if the insn is supported. */
539 if ((code = can_extend_p (to_mode, from_mode, 0))
540 != CODE_FOR_nothing)
541 {
542 emit_unop_insn (code, to, from, UNKNOWN);
543 return;
544 }
545 }
546
547 #ifdef HAVE_trunchfqf2
548 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
549 {
550 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
551 return;
552 }
553 #endif
554 #ifdef HAVE_trunctqfqf2
555 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_truncsfqf2
562 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncdfqf2
569 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncxfqf2
576 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_trunctfqf2
583 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589
590 #ifdef HAVE_trunctqfhf2
591 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
592 {
593 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
594 return;
595 }
596 #endif
597 #ifdef HAVE_truncsfhf2
598 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncdfhf2
605 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncxfhf2
612 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_trunctfhf2
619 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625
626 #ifdef HAVE_truncsftqf2
627 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncdftqf2
634 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncxftqf2
641 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_trunctftqf2
648 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654
655 #ifdef HAVE_truncdfsf2
656 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662 #ifdef HAVE_truncxfsf2
663 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_trunctfsf2
670 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncxfdf2
677 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_trunctfdf2
684 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690
691 libcall = (rtx) 0;
692 switch (from_mode)
693 {
694 case SFmode:
695 switch (to_mode)
696 {
697 case DFmode:
698 libcall = extendsfdf2_libfunc;
699 break;
700
701 case XFmode:
702 libcall = extendsfxf2_libfunc;
703 break;
704
705 case TFmode:
706 libcall = extendsftf2_libfunc;
707 break;
708
709 default:
710 break;
711 }
712 break;
713
714 case DFmode:
715 switch (to_mode)
716 {
717 case SFmode:
718 libcall = truncdfsf2_libfunc;
719 break;
720
721 case XFmode:
722 libcall = extenddfxf2_libfunc;
723 break;
724
725 case TFmode:
726 libcall = extenddftf2_libfunc;
727 break;
728
729 default:
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case TFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = trunctfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = trunctfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 if (libcall == (rtx) 0)
771 /* This conversion is not implemented yet. */
772 abort ();
773
774 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
775 1, from, from_mode);
776 emit_move_insn (to, value);
777 return;
778 }
779
780 /* Now both modes are integers. */
781
782 /* Handle expanding beyond a word. */
783 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
784 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
785 {
786 rtx insns;
787 rtx lowpart;
788 rtx fill_value;
789 rtx lowfrom;
790 int i;
791 enum machine_mode lowpart_mode;
792 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
793
794 /* Try converting directly if the insn is supported. */
795 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
796 != CODE_FOR_nothing)
797 {
798 /* If FROM is a SUBREG, put it into a register. Do this
799 so that we always generate the same set of insns for
800 better cse'ing; if an intermediate assignment occurred,
801 we won't be doing the operation directly on the SUBREG. */
802 if (optimize > 0 && GET_CODE (from) == SUBREG)
803 from = force_reg (from_mode, from);
804 emit_unop_insn (code, to, from, equiv_code);
805 return;
806 }
807 /* Next, try converting via full word. */
808 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
809 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
810 != CODE_FOR_nothing))
811 {
812 if (GET_CODE (to) == REG)
813 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
814 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
815 emit_unop_insn (code, to,
816 gen_lowpart (word_mode, to), equiv_code);
817 return;
818 }
819
820 /* No special multiword conversion insn; do it by hand. */
821 start_sequence ();
822
823 /* Since we will turn this into a no conflict block, we must ensure
824 that the source does not overlap the target. */
825
826 if (reg_overlap_mentioned_p (to, from))
827 from = force_reg (from_mode, from);
828
829 /* Get a copy of FROM widened to a word, if necessary. */
830 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
831 lowpart_mode = word_mode;
832 else
833 lowpart_mode = from_mode;
834
835 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
836
837 lowpart = gen_lowpart (lowpart_mode, to);
838 emit_move_insn (lowpart, lowfrom);
839
840 /* Compute the value to put in each remaining word. */
841 if (unsignedp)
842 fill_value = const0_rtx;
843 else
844 {
845 #ifdef HAVE_slt
846 if (HAVE_slt
847 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
848 && STORE_FLAG_VALUE == -1)
849 {
850 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
851 lowpart_mode, 0, 0);
852 fill_value = gen_reg_rtx (word_mode);
853 emit_insn (gen_slt (fill_value));
854 }
855 else
856 #endif
857 {
858 fill_value
859 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
860 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
861 NULL_RTX, 0);
862 fill_value = convert_to_mode (word_mode, fill_value, 1);
863 }
864 }
865
866 /* Fill the remaining words. */
867 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
868 {
869 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
870 rtx subword = operand_subword (to, index, 1, to_mode);
871
872 if (subword == 0)
873 abort ();
874
875 if (fill_value != subword)
876 emit_move_insn (subword, fill_value);
877 }
878
879 insns = get_insns ();
880 end_sequence ();
881
882 emit_no_conflict_block (insns, to, from, NULL_RTX,
883 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
884 return;
885 }
886
887 /* Truncating multi-word to a word or less. */
888 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
889 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
890 {
891 if (!((GET_CODE (from) == MEM
892 && ! MEM_VOLATILE_P (from)
893 && direct_load[(int) to_mode]
894 && ! mode_dependent_address_p (XEXP (from, 0)))
895 || GET_CODE (from) == REG
896 || GET_CODE (from) == SUBREG))
897 from = force_reg (from_mode, from);
898 convert_move (to, gen_lowpart (word_mode, from), 0);
899 return;
900 }
901
902 /* Handle pointer conversion */ /* SPEE 900220 */
903 if (to_mode == PQImode)
904 {
905 if (from_mode != QImode)
906 from = convert_to_mode (QImode, from, unsignedp);
907
908 #ifdef HAVE_truncqipqi2
909 if (HAVE_truncqipqi2)
910 {
911 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
912 return;
913 }
914 #endif /* HAVE_truncqipqi2 */
915 abort ();
916 }
917
918 if (from_mode == PQImode)
919 {
920 if (to_mode != QImode)
921 {
922 from = convert_to_mode (QImode, from, unsignedp);
923 from_mode = QImode;
924 }
925 else
926 {
927 #ifdef HAVE_extendpqiqi2
928 if (HAVE_extendpqiqi2)
929 {
930 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
931 return;
932 }
933 #endif /* HAVE_extendpqiqi2 */
934 abort ();
935 }
936 }
937
938 if (to_mode == PSImode)
939 {
940 if (from_mode != SImode)
941 from = convert_to_mode (SImode, from, unsignedp);
942
943 #ifdef HAVE_truncsipsi2
944 if (HAVE_truncsipsi2)
945 {
946 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
947 return;
948 }
949 #endif /* HAVE_truncsipsi2 */
950 abort ();
951 }
952
953 if (from_mode == PSImode)
954 {
955 if (to_mode != SImode)
956 {
957 from = convert_to_mode (SImode, from, unsignedp);
958 from_mode = SImode;
959 }
960 else
961 {
962 #ifdef HAVE_extendpsisi2
963 if (HAVE_extendpsisi2)
964 {
965 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
966 return;
967 }
968 #endif /* HAVE_extendpsisi2 */
969 abort ();
970 }
971 }
972
973 if (to_mode == PDImode)
974 {
975 if (from_mode != DImode)
976 from = convert_to_mode (DImode, from, unsignedp);
977
978 #ifdef HAVE_truncdipdi2
979 if (HAVE_truncdipdi2)
980 {
981 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
982 return;
983 }
984 #endif /* HAVE_truncdipdi2 */
985 abort ();
986 }
987
988 if (from_mode == PDImode)
989 {
990 if (to_mode != DImode)
991 {
992 from = convert_to_mode (DImode, from, unsignedp);
993 from_mode = DImode;
994 }
995 else
996 {
997 #ifdef HAVE_extendpdidi2
998 if (HAVE_extendpdidi2)
999 {
1000 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1001 return;
1002 }
1003 #endif /* HAVE_extendpdidi2 */
1004 abort ();
1005 }
1006 }
1007
1008 /* Now follow all the conversions between integers
1009 no more than a word long. */
1010
1011 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1012 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1013 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1014 GET_MODE_BITSIZE (from_mode)))
1015 {
1016 if (!((GET_CODE (from) == MEM
1017 && ! MEM_VOLATILE_P (from)
1018 && direct_load[(int) to_mode]
1019 && ! mode_dependent_address_p (XEXP (from, 0)))
1020 || GET_CODE (from) == REG
1021 || GET_CODE (from) == SUBREG))
1022 from = force_reg (from_mode, from);
1023 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1024 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1025 from = copy_to_reg (from);
1026 emit_move_insn (to, gen_lowpart (to_mode, from));
1027 return;
1028 }
1029
1030 /* Handle extension. */
1031 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1032 {
1033 /* Convert directly if that works. */
1034 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1035 != CODE_FOR_nothing)
1036 {
1037 emit_unop_insn (code, to, from, equiv_code);
1038 return;
1039 }
1040 else
1041 {
1042 enum machine_mode intermediate;
1043 rtx tmp;
1044 tree shift_amount;
1045
1046 /* Search for a mode to convert via. */
1047 for (intermediate = from_mode; intermediate != VOIDmode;
1048 intermediate = GET_MODE_WIDER_MODE (intermediate))
1049 if (((can_extend_p (to_mode, intermediate, unsignedp)
1050 != CODE_FOR_nothing)
1051 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (intermediate))))
1054 && (can_extend_p (intermediate, from_mode, unsignedp)
1055 != CODE_FOR_nothing))
1056 {
1057 convert_move (to, convert_to_mode (intermediate, from,
1058 unsignedp), unsignedp);
1059 return;
1060 }
1061
1062 /* No suitable intermediate mode.
1063 Generate what we need with shifts. */
1064 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1065 - GET_MODE_BITSIZE (from_mode), 0);
1066 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1067 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1068 to, unsignedp);
1069 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1070 to, unsignedp);
1071 if (tmp != to)
1072 emit_move_insn (to, tmp);
1073 return;
1074 }
1075 }
1076
1077 /* Support special truncate insns for certain modes. */
1078
1079 if (from_mode == DImode && to_mode == SImode)
1080 {
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2)
1083 {
1084 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1085 return;
1086 }
1087 #endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1090 }
1091
1092 if (from_mode == DImode && to_mode == HImode)
1093 {
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2)
1096 {
1097 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1098 return;
1099 }
1100 #endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1103 }
1104
1105 if (from_mode == DImode && to_mode == QImode)
1106 {
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2)
1109 {
1110 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1111 return;
1112 }
1113 #endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1116 }
1117
1118 if (from_mode == SImode && to_mode == HImode)
1119 {
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2)
1122 {
1123 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1124 return;
1125 }
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
1131 if (from_mode == SImode && to_mode == QImode)
1132 {
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1137 return;
1138 }
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == HImode && to_mode == QImode)
1145 {
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2)
1148 {
1149 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1150 return;
1151 }
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == TImode && to_mode == DImode)
1158 {
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2)
1161 {
1162 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1163 return;
1164 }
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == TImode && to_mode == SImode)
1171 {
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2)
1174 {
1175 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1176 return;
1177 }
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == TImode && to_mode == HImode)
1184 {
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2)
1187 {
1188 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1189 return;
1190 }
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == TImode && to_mode == QImode)
1197 {
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1213 {
1214 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1215 emit_move_insn (to, temp);
1216 return;
1217 }
1218
1219 /* Mode combination is not recognized. */
1220 abort ();
1221 }
1222
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1229
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1232
1233 rtx
1234 convert_to_mode (mode, x, unsignedp)
1235 enum machine_mode mode;
1236 rtx x;
1237 int unsignedp;
1238 {
1239 return convert_modes (mode, VOIDmode, x, unsignedp);
1240 }
1241
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1246
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1249
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1251
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1254
1255 rtx
1256 convert_modes (mode, oldmode, x, unsignedp)
1257 enum machine_mode mode, oldmode;
1258 rtx x;
1259 int unsignedp;
1260 {
1261 register rtx temp;
1262
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1265
1266 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1269 x = gen_lowpart (mode, x);
1270
1271 if (GET_MODE (x) != VOIDmode)
1272 oldmode = GET_MODE (x);
1273
1274 if (mode == oldmode)
1275 return x;
1276
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1282
1283 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1284 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1286 {
1287 HOST_WIDE_INT val = INTVAL (x);
1288
1289 if (oldmode != VOIDmode
1290 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1291 {
1292 int width = GET_MODE_BITSIZE (oldmode);
1293
1294 /* We need to zero extend VAL. */
1295 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1296 }
1297
1298 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1299 }
1300
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1305
1306 if ((GET_CODE (x) == CONST_INT
1307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1308 || (GET_MODE_CLASS (mode) == MODE_INT
1309 && GET_MODE_CLASS (oldmode) == MODE_INT
1310 && (GET_CODE (x) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1312 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1313 && direct_load[(int) mode])
1314 || (GET_CODE (x) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1316 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1317 {
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1322 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325 int width = GET_MODE_BITSIZE (oldmode);
1326
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 if (! unsignedp
1331 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1332 val |= (HOST_WIDE_INT) (-1) << width;
1333
1334 return GEN_INT (val);
1335 }
1336
1337 return gen_lowpart (mode, x);
1338 }
1339
1340 temp = gen_reg_rtx (mode);
1341 convert_move (temp, x, unsignedp);
1342 return temp;
1343 }
1344 \f
1345
1346 /* This macro is used to determine what the largest unit size that
1347 move_by_pieces can use is. */
1348
1349 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1350 move efficiently, as opposed to MOVE_MAX which is the maximum
1351 number of bhytes we can move with a single instruction. */
1352
1353 #ifndef MOVE_MAX_PIECES
1354 #define MOVE_MAX_PIECES MOVE_MAX
1355 #endif
1356
1357 /* Generate several move instructions to copy LEN bytes
1358 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1359 The caller must pass FROM and TO
1360 through protect_from_queue before calling.
1361 ALIGN (in bytes) is maximum alignment we can assume. */
1362
1363 void
1364 move_by_pieces (to, from, len, align)
1365 rtx to, from;
1366 int len, align;
1367 {
1368 struct move_by_pieces data;
1369 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1370 int max_size = MOVE_MAX_PIECES + 1;
1371 enum machine_mode mode = VOIDmode, tmode;
1372 enum insn_code icode;
1373
1374 data.offset = 0;
1375 data.to_addr = to_addr;
1376 data.from_addr = from_addr;
1377 data.to = to;
1378 data.from = from;
1379 data.autinc_to
1380 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1381 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1382 data.autinc_from
1383 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1384 || GET_CODE (from_addr) == POST_INC
1385 || GET_CODE (from_addr) == POST_DEC);
1386
1387 data.explicit_inc_from = 0;
1388 data.explicit_inc_to = 0;
1389 data.reverse
1390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1391 if (data.reverse) data.offset = len;
1392 data.len = len;
1393
1394 data.to_struct = MEM_IN_STRUCT_P (to);
1395 data.from_struct = MEM_IN_STRUCT_P (from);
1396 data.to_readonly = RTX_UNCHANGING_P (to);
1397 data.from_readonly = RTX_UNCHANGING_P (from);
1398
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1404 {
1405 /* Find the mode of the largest move... */
1406 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1407 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1408 if (GET_MODE_SIZE (tmode) < max_size)
1409 mode = tmode;
1410
1411 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1412 {
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1416 }
1417 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1418 {
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 data.autinc_from = 1;
1421 data.explicit_inc_from = 1;
1422 }
1423 if (!data.autinc_from && CONSTANT_P (from_addr))
1424 data.from_addr = copy_addr_to_reg (from_addr);
1425 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = -1;
1430 }
1431 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
1437 if (!data.autinc_to && CONSTANT_P (to_addr))
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 }
1440
1441 if (! SLOW_UNALIGNED_ACCESS
1442 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1443 align = MOVE_MAX;
1444
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1447
1448 while (max_size > 1)
1449 {
1450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1452 if (GET_MODE_SIZE (tmode) < max_size)
1453 mode = tmode;
1454
1455 if (mode == VOIDmode)
1456 break;
1457
1458 icode = mov_optab->handlers[(int) mode].insn_code;
1459 if (icode != CODE_FOR_nothing
1460 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1461 GET_MODE_SIZE (mode)))
1462 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1463
1464 max_size = GET_MODE_SIZE (mode);
1465 }
1466
1467 /* The code above should have handled everything. */
1468 if (data.len > 0)
1469 abort ();
1470 }
1471
1472 /* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1474
1475 static int
1476 move_by_pieces_ninsns (l, align)
1477 unsigned int l;
1478 int align;
1479 {
1480 register int n_insns = 0;
1481 int max_size = MOVE_MAX + 1;
1482
1483 if (! SLOW_UNALIGNED_ACCESS
1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 align = MOVE_MAX;
1486
1487 while (max_size > 1)
1488 {
1489 enum machine_mode mode = VOIDmode, tmode;
1490 enum insn_code icode;
1491
1492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1494 if (GET_MODE_SIZE (tmode) < max_size)
1495 mode = tmode;
1496
1497 if (mode == VOIDmode)
1498 break;
1499
1500 icode = mov_optab->handlers[(int) mode].insn_code;
1501 if (icode != CODE_FOR_nothing
1502 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1503 GET_MODE_SIZE (mode)))
1504 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1505
1506 max_size = GET_MODE_SIZE (mode);
1507 }
1508
1509 return n_insns;
1510 }
1511
1512 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1513 with move instructions for mode MODE. GENFUN is the gen_... function
1514 to make a move insn for that mode. DATA has all the other info. */
1515
1516 static void
1517 move_by_pieces_1 (genfun, mode, data)
1518 rtx (*genfun) PROTO ((rtx, ...));
1519 enum machine_mode mode;
1520 struct move_by_pieces *data;
1521 {
1522 register int size = GET_MODE_SIZE (mode);
1523 register rtx to1, from1;
1524
1525 while (data->len >= size)
1526 {
1527 if (data->reverse) data->offset -= size;
1528
1529 to1 = (data->autinc_to
1530 ? gen_rtx_MEM (mode, data->to_addr)
1531 : copy_rtx (change_address (data->to, mode,
1532 plus_constant (data->to_addr,
1533 data->offset))));
1534 MEM_IN_STRUCT_P (to1) = data->to_struct;
1535 RTX_UNCHANGING_P (to1) = data->to_readonly;
1536
1537 from1
1538 = (data->autinc_from
1539 ? gen_rtx_MEM (mode, data->from_addr)
1540 : copy_rtx (change_address (data->from, mode,
1541 plus_constant (data->from_addr,
1542 data->offset))));
1543 MEM_IN_STRUCT_P (from1) = data->from_struct;
1544 RTX_UNCHANGING_P (from1) = data->from_readonly;
1545
1546 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1548 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1549 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1550
1551 emit_insn ((*genfun) (to1, from1));
1552 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1553 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1554 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1555 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1556
1557 if (! data->reverse) data->offset += size;
1558
1559 data->len -= size;
1560 }
1561 }
1562 \f
1563 /* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1566
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 with mode BLKmode.
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
1571 measured in bytes.
1572
1573 Return the address of the new block, if memcpy is called and returns it,
1574 0 otherwise. */
1575
1576 rtx
1577 emit_block_move (x, y, size, align)
1578 rtx x, y;
1579 rtx size;
1580 int align;
1581 {
1582 rtx retval = 0;
1583 #ifdef TARGET_MEM_FUNCTIONS
1584 static tree fn;
1585 tree call_expr, arg_list;
1586 #endif
1587
1588 if (GET_MODE (x) != BLKmode)
1589 abort ();
1590
1591 if (GET_MODE (y) != BLKmode)
1592 abort ();
1593
1594 x = protect_from_queue (x, 1);
1595 y = protect_from_queue (y, 0);
1596 size = protect_from_queue (size, 0);
1597
1598 if (GET_CODE (x) != MEM)
1599 abort ();
1600 if (GET_CODE (y) != MEM)
1601 abort ();
1602 if (size == 0)
1603 abort ();
1604
1605 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1606 move_by_pieces (x, y, INTVAL (size), align);
1607 else
1608 {
1609 /* Try the most limited insn first, because there's no point
1610 including more than one in the machine description unless
1611 the more limited one has some advantage. */
1612
1613 rtx opalign = GEN_INT (align);
1614 enum machine_mode mode;
1615
1616 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1617 mode = GET_MODE_WIDER_MODE (mode))
1618 {
1619 enum insn_code code = movstr_optab[(int) mode];
1620 insn_operand_predicate_fn pred;
1621
1622 if (code != CODE_FOR_nothing
1623 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1624 here because if SIZE is less than the mode mask, as it is
1625 returned by the macro, it will definitely be less than the
1626 actual mode mask. */
1627 && ((GET_CODE (size) == CONST_INT
1628 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1629 <= (GET_MODE_MASK (mode) >> 1)))
1630 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1631 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1632 || (*pred) (x, BLKmode))
1633 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1634 || (*pred) (y, BLKmode))
1635 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1636 || (*pred) (opalign, VOIDmode)))
1637 {
1638 rtx op2;
1639 rtx last = get_last_insn ();
1640 rtx pat;
1641
1642 op2 = convert_to_mode (mode, size, 1);
1643 pred = insn_data[(int) code].operand[2].predicate;
1644 if (pred != 0 && ! (*pred) (op2, mode))
1645 op2 = copy_to_mode_reg (mode, op2);
1646
1647 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1648 if (pat)
1649 {
1650 emit_insn (pat);
1651 return 0;
1652 }
1653 else
1654 delete_insns_since (last);
1655 }
1656 }
1657
1658 /* X, Y, or SIZE may have been passed through protect_from_queue.
1659
1660 It is unsafe to save the value generated by protect_from_queue
1661 and reuse it later. Consider what happens if emit_queue is
1662 called before the return value from protect_from_queue is used.
1663
1664 Expansion of the CALL_EXPR below will call emit_queue before
1665 we are finished emitting RTL for argument setup. So if we are
1666 not careful we could get the wrong value for an argument.
1667
1668 To avoid this problem we go ahead and emit code to copy X, Y &
1669 SIZE into new pseudos. We can then place those new pseudos
1670 into an RTL_EXPR and use them later, even after a call to
1671 emit_queue.
1672
1673 Note this is not strictly needed for library calls since they
1674 do not call emit_queue before loading their arguments. However,
1675 we may need to have library calls call emit_queue in the future
1676 since failing to do so could cause problems for targets which
1677 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1678 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1679 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1680
1681 #ifdef TARGET_MEM_FUNCTIONS
1682 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1683 #else
1684 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1685 TREE_UNSIGNED (integer_type_node));
1686 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1687 #endif
1688
1689 #ifdef TARGET_MEM_FUNCTIONS
1690 /* It is incorrect to use the libcall calling conventions to call
1691 memcpy in this context.
1692
1693 This could be a user call to memcpy and the user may wish to
1694 examine the return value from memcpy.
1695
1696 For targets where libcalls and normal calls have different conventions
1697 for returning pointers, we could end up generating incorrect code.
1698
1699 So instead of using a libcall sequence we build up a suitable
1700 CALL_EXPR and expand the call in the normal fashion. */
1701 if (fn == NULL_TREE)
1702 {
1703 tree fntype;
1704
1705 /* This was copied from except.c, I don't know if all this is
1706 necessary in this context or not. */
1707 fn = get_identifier ("memcpy");
1708 push_obstacks_nochange ();
1709 end_temporary_allocation ();
1710 fntype = build_pointer_type (void_type_node);
1711 fntype = build_function_type (fntype, NULL_TREE);
1712 fn = build_decl (FUNCTION_DECL, fn, fntype);
1713 ggc_add_tree_root (&fn, 1);
1714 DECL_EXTERNAL (fn) = 1;
1715 TREE_PUBLIC (fn) = 1;
1716 DECL_ARTIFICIAL (fn) = 1;
1717 make_decl_rtl (fn, NULL_PTR, 1);
1718 assemble_external (fn);
1719 pop_obstacks ();
1720 }
1721
1722 /* We need to make an argument list for the function call.
1723
1724 memcpy has three arguments, the first two are void * addresses and
1725 the last is a size_t byte count for the copy. */
1726 arg_list
1727 = build_tree_list (NULL_TREE,
1728 make_tree (build_pointer_type (void_type_node), x));
1729 TREE_CHAIN (arg_list)
1730 = build_tree_list (NULL_TREE,
1731 make_tree (build_pointer_type (void_type_node), y));
1732 TREE_CHAIN (TREE_CHAIN (arg_list))
1733 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1734 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1735
1736 /* Now we have to build up the CALL_EXPR itself. */
1737 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1738 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1739 call_expr, arg_list, NULL_TREE);
1740 TREE_SIDE_EFFECTS (call_expr) = 1;
1741
1742 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1743 #else
1744 emit_library_call (bcopy_libfunc, 0,
1745 VOIDmode, 3, y, Pmode, x, Pmode,
1746 convert_to_mode (TYPE_MODE (integer_type_node), size,
1747 TREE_UNSIGNED (integer_type_node)),
1748 TYPE_MODE (integer_type_node));
1749 #endif
1750 }
1751
1752 return retval;
1753 }
1754 \f
1755 /* Copy all or part of a value X into registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1757
1758 void
1759 move_block_to_reg (regno, x, nregs, mode)
1760 int regno;
1761 rtx x;
1762 int nregs;
1763 enum machine_mode mode;
1764 {
1765 int i;
1766 #ifdef HAVE_load_multiple
1767 rtx pat;
1768 rtx last;
1769 #endif
1770
1771 if (nregs == 0)
1772 return;
1773
1774 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1775 x = validize_mem (force_const_mem (mode, x));
1776
1777 /* See if the machine can do this with a load multiple insn. */
1778 #ifdef HAVE_load_multiple
1779 if (HAVE_load_multiple)
1780 {
1781 last = get_last_insn ();
1782 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1783 GEN_INT (nregs));
1784 if (pat)
1785 {
1786 emit_insn (pat);
1787 return;
1788 }
1789 else
1790 delete_insns_since (last);
1791 }
1792 #endif
1793
1794 for (i = 0; i < nregs; i++)
1795 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1796 operand_subword_force (x, i, mode));
1797 }
1798
1799 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1800 The number of registers to be filled is NREGS. SIZE indicates the number
1801 of bytes in the object X. */
1802
1803
1804 void
1805 move_block_from_reg (regno, x, nregs, size)
1806 int regno;
1807 rtx x;
1808 int nregs;
1809 int size;
1810 {
1811 int i;
1812 #ifdef HAVE_store_multiple
1813 rtx pat;
1814 rtx last;
1815 #endif
1816 enum machine_mode mode;
1817
1818 /* If SIZE is that of a mode no bigger than a word, just use that
1819 mode's store operation. */
1820 if (size <= UNITS_PER_WORD
1821 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1822 {
1823 emit_move_insn (change_address (x, mode, NULL),
1824 gen_rtx_REG (mode, regno));
1825 return;
1826 }
1827
1828 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1829 to the left before storing to memory. Note that the previous test
1830 doesn't handle all cases (e.g. SIZE == 3). */
1831 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1832 {
1833 rtx tem = operand_subword (x, 0, 1, BLKmode);
1834 rtx shift;
1835
1836 if (tem == 0)
1837 abort ();
1838
1839 shift = expand_shift (LSHIFT_EXPR, word_mode,
1840 gen_rtx_REG (word_mode, regno),
1841 build_int_2 ((UNITS_PER_WORD - size)
1842 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1843 emit_move_insn (tem, shift);
1844 return;
1845 }
1846
1847 /* See if the machine can do this with a store multiple insn. */
1848 #ifdef HAVE_store_multiple
1849 if (HAVE_store_multiple)
1850 {
1851 last = get_last_insn ();
1852 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1853 GEN_INT (nregs));
1854 if (pat)
1855 {
1856 emit_insn (pat);
1857 return;
1858 }
1859 else
1860 delete_insns_since (last);
1861 }
1862 #endif
1863
1864 for (i = 0; i < nregs; i++)
1865 {
1866 rtx tem = operand_subword (x, i, 1, BLKmode);
1867
1868 if (tem == 0)
1869 abort ();
1870
1871 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1872 }
1873 }
1874
1875 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1876 registers represented by a PARALLEL. SSIZE represents the total size of
1877 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1878 SRC in bits. */
1879 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1880 the balance will be in what would be the low-order memory addresses, i.e.
1881 left justified for big endian, right justified for little endian. This
1882 happens to be true for the targets currently using this support. If this
1883 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1884 would be needed. */
1885
1886 void
1887 emit_group_load (dst, orig_src, ssize, align)
1888 rtx dst, orig_src;
1889 int align, ssize;
1890 {
1891 rtx *tmps, src;
1892 int start, i;
1893
1894 if (GET_CODE (dst) != PARALLEL)
1895 abort ();
1896
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
1899 if (XEXP (XVECEXP (dst, 0, 0), 0))
1900 start = 0;
1901 else
1902 start = 1;
1903
1904 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1905
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1908 src = orig_src;
1909 if (GET_CODE (src) != MEM)
1910 {
1911 src = gen_reg_rtx (GET_MODE (orig_src));
1912 emit_move_insn (src, orig_src);
1913 }
1914
1915 /* Process the pieces. */
1916 for (i = start; i < XVECLEN (dst, 0); i++)
1917 {
1918 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1919 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1920 int bytelen = GET_MODE_SIZE (mode);
1921 int shift = 0;
1922
1923 /* Handle trailing fragments that run over the size of the struct. */
1924 if (ssize >= 0 && bytepos + bytelen > ssize)
1925 {
1926 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1927 bytelen = ssize - bytepos;
1928 if (bytelen <= 0)
1929 abort();
1930 }
1931
1932 /* Optimize the access just a bit. */
1933 if (GET_CODE (src) == MEM
1934 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1935 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 {
1938 tmps[i] = gen_reg_rtx (mode);
1939 emit_move_insn (tmps[i],
1940 change_address (src, mode,
1941 plus_constant (XEXP (src, 0),
1942 bytepos)));
1943 }
1944 else if (GET_CODE (src) == CONCAT)
1945 {
1946 if (bytepos == 0
1947 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1948 tmps[i] = XEXP (src, 0);
1949 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1951 tmps[i] = XEXP (src, 1);
1952 else
1953 abort ();
1954 }
1955 else
1956 {
1957 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1958 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, align, ssize);
1960 }
1961
1962 if (BYTES_BIG_ENDIAN && shift)
1963 {
1964 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1965 tmps[i], 0, OPTAB_WIDEN);
1966 }
1967 }
1968 emit_queue();
1969
1970 /* Copy the extracted pieces into the proper (probable) hard regs. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1972 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 }
1974
1975 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1976 registers represented by a PARALLEL. SSIZE represents the total size of
1977 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1978
1979 void
1980 emit_group_store (orig_dst, src, ssize, align)
1981 rtx orig_dst, src;
1982 int ssize, align;
1983 {
1984 rtx *tmps, dst;
1985 int start, i;
1986
1987 if (GET_CODE (src) != PARALLEL)
1988 abort ();
1989
1990 /* Check for a NULL entry, used to indicate that the parameter goes
1991 both on the stack and in registers. */
1992 if (XEXP (XVECEXP (src, 0, 0), 0))
1993 start = 0;
1994 else
1995 start = 1;
1996
1997 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1998
1999 /* Copy the (probable) hard regs into pseudos. */
2000 for (i = start; i < XVECLEN (src, 0); i++)
2001 {
2002 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2003 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2004 emit_move_insn (tmps[i], reg);
2005 }
2006 emit_queue();
2007
2008 /* If we won't be storing directly into memory, protect the real destination
2009 from strange tricks we might play. */
2010 dst = orig_dst;
2011 if (GET_CODE (dst) == PARALLEL)
2012 {
2013 rtx temp;
2014
2015 /* We can get a PARALLEL dst if there is a conditional expression in
2016 a return statement. In that case, the dst and src are the same,
2017 so no action is necessary. */
2018 if (rtx_equal_p (dst, src))
2019 return;
2020
2021 /* It is unclear if we can ever reach here, but we may as well handle
2022 it. Allocate a temporary, and split this into a store/load to/from
2023 the temporary. */
2024
2025 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2026 emit_group_store (temp, src, ssize, align);
2027 emit_group_load (dst, temp, ssize, align);
2028 return;
2029 }
2030 else if (GET_CODE (dst) != MEM)
2031 {
2032 dst = gen_reg_rtx (GET_MODE (orig_dst));
2033 /* Make life a bit easier for combine. */
2034 emit_move_insn (dst, const0_rtx);
2035 }
2036 else if (! MEM_IN_STRUCT_P (dst))
2037 {
2038 /* store_bit_field requires that memory operations have
2039 mem_in_struct_p set; we might not. */
2040
2041 dst = copy_rtx (orig_dst);
2042 MEM_SET_IN_STRUCT_P (dst, 1);
2043 }
2044
2045 /* Process the pieces. */
2046 for (i = start; i < XVECLEN (src, 0); i++)
2047 {
2048 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2049 enum machine_mode mode = GET_MODE (tmps[i]);
2050 int bytelen = GET_MODE_SIZE (mode);
2051
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize >= 0 && bytepos + bytelen > ssize)
2054 {
2055 if (BYTES_BIG_ENDIAN)
2056 {
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2059 tmps[i], 0, OPTAB_WIDEN);
2060 }
2061 bytelen = ssize - bytepos;
2062 }
2063
2064 /* Optimize the access just a bit. */
2065 if (GET_CODE (dst) == MEM
2066 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2067 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2068 && bytelen == GET_MODE_SIZE (mode))
2069 {
2070 emit_move_insn (change_address (dst, mode,
2071 plus_constant (XEXP (dst, 0),
2072 bytepos)),
2073 tmps[i]);
2074 }
2075 else
2076 {
2077 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2079 }
2080 }
2081 emit_queue();
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment.
2096 */
2097
2098 rtx
2099 copy_blkmode_from_reg(tgtblk,srcreg,type)
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103 {
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167 }
2168
2169
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2172
2173 void
2174 use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176 {
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort();
2180
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 }
2185
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2188
2189 void
2190 use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
2192 int regno;
2193 int nregs;
2194 {
2195 int i;
2196
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2202 }
2203
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208 void
2209 use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212 {
2213 int i;
2214
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2224 }
2225 }
2226 \f
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232 static void
2233 clear_by_pieces (to, len, align)
2234 rtx to;
2235 int len, align;
2236 {
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
2242
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2249
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2255
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2257
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2263 {
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2269
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2271 {
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2275 }
2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2281 }
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2284 }
2285
2286 if (! SLOW_UNALIGNED_ACCESS
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2289
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2292
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2306 GET_MODE_SIZE (mode)))
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315 }
2316
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321 static void
2322 clear_by_pieces_1 (genfun, mode, data)
2323 rtx (*genfun) PROTO ((rtx, ...));
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326 {
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
2335 ? gen_rtx_MEM (mode, data->to_addr)
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352 }
2353 \f
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2357
2358 If we call a function that returns the length of the block, return it. */
2359
2360 rtx
2361 clear_storage (object, size, align)
2362 rtx object;
2363 rtx size;
2364 int align;
2365 {
2366 #ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369 #endif
2370 rtx retval = 0;
2371
2372 if (GET_MODE (object) == BLKmode)
2373 {
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
2394 insn_operand_predicate_fn pred;
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2403 <= (GET_MODE_MASK (mode) >> 1)))
2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
2423 return 0;
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2431
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2435
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2439
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
2444
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2451
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454 #else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2458 #endif
2459
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2464
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2467
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
2486 ggc_add_tree_root (&fn, 1);
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2519 #else
2520 emit_library_call (bzero_libfunc, 0,
2521 VOIDmode, 2, object, Pmode, size,
2522 TYPE_MODE (integer_type_node));
2523 #endif
2524 }
2525 }
2526 else
2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2528
2529 return retval;
2530 }
2531
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539 rtx
2540 emit_move_insn (x, y)
2541 rtx x, y;
2542 {
2543 enum machine_mode mode = GET_MODE (x);
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
2575 return emit_move_insn_1 (x, y);
2576 }
2577
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582 rtx
2583 emit_move_insn_1 (x, y)
2584 rtx x, y;
2585 {
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
2610
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2616 if (stack)
2617 {
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2623 gen_imagpart (submode, y)));
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2626 gen_realpart (submode, y)));
2627 #else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_realpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_imagpart (submode, y)));
2634 #endif
2635 }
2636 else
2637 {
2638 /* If this is a complex value with each part being smaller than a
2639 word, the usual calling sequence will likely pack the pieces into
2640 a single register. Unfortunately, SUBREG of hard registers only
2641 deals in terms of words, so we have a problem converting input
2642 arguments to the CONCAT of two registers that is used elsewhere
2643 for complex values. If this is before reload, we can copy it into
2644 memory and reload. FIXME, we should see about using extract and
2645 insert on integer registers, but complex short and complex char
2646 variables should be rarely used. */
2647 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2648 && (reload_in_progress | reload_completed) == 0)
2649 {
2650 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2651 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2652
2653 if (packed_dest_p || packed_src_p)
2654 {
2655 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2656 ? MODE_FLOAT : MODE_INT);
2657
2658 enum machine_mode reg_mode =
2659 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2660
2661 if (reg_mode != BLKmode)
2662 {
2663 rtx mem = assign_stack_temp (reg_mode,
2664 GET_MODE_SIZE (mode), 0);
2665
2666 rtx cmem = change_address (mem, mode, NULL_RTX);
2667
2668 current_function->cannot_inline
2669 = "function uses short complex types";
2670
2671 if (packed_dest_p)
2672 {
2673 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2674 emit_move_insn_1 (cmem, y);
2675 return emit_move_insn_1 (sreg, mem);
2676 }
2677 else
2678 {
2679 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2680 emit_move_insn_1 (mem, sreg);
2681 return emit_move_insn_1 (x, cmem);
2682 }
2683 }
2684 }
2685 }
2686
2687 /* Show the output dies here. This is necessary for pseudos;
2688 hard regs shouldn't appear here except as return values.
2689 We never want to emit such a clobber after reload. */
2690 if (x != y
2691 && ! (reload_in_progress || reload_completed))
2692 {
2693 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2694 }
2695
2696 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2697 (gen_realpart (submode, x), gen_realpart (submode, y)));
2698 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2699 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2700 }
2701
2702 return get_last_insn ();
2703 }
2704
2705 /* This will handle any multi-word mode that lacks a move_insn pattern.
2706 However, you will get better code if you define such patterns,
2707 even if they must turn into multiple assembler instructions. */
2708 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2709 {
2710 rtx last_insn = 0;
2711
2712 #ifdef PUSH_ROUNDING
2713
2714 /* If X is a push on the stack, do the push now and replace
2715 X with a reference to the stack pointer. */
2716 if (push_operand (x, GET_MODE (x)))
2717 {
2718 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2719 x = change_address (x, VOIDmode, stack_pointer_rtx);
2720 }
2721 #endif
2722
2723 /* Show the output dies here. This is necessary for pseudos;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2726 if (x != y
2727 && ! (reload_in_progress || reload_completed))
2728 {
2729 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2730 }
2731
2732 for (i = 0;
2733 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2734 i++)
2735 {
2736 rtx xpart = operand_subword (x, i, 1, mode);
2737 rtx ypart = operand_subword (y, i, 1, mode);
2738
2739 /* If we can't get a part of Y, put Y into memory if it is a
2740 constant. Otherwise, force it into a register. If we still
2741 can't get a part of Y, abort. */
2742 if (ypart == 0 && CONSTANT_P (y))
2743 {
2744 y = force_const_mem (mode, y);
2745 ypart = operand_subword (y, i, 1, mode);
2746 }
2747 else if (ypart == 0)
2748 ypart = operand_subword_force (y, i, mode);
2749
2750 if (xpart == 0 || ypart == 0)
2751 abort ();
2752
2753 last_insn = emit_move_insn (xpart, ypart);
2754 }
2755
2756 return last_insn;
2757 }
2758 else
2759 abort ();
2760 }
2761 \f
2762 /* Pushing data onto the stack. */
2763
2764 /* Push a block of length SIZE (perhaps variable)
2765 and return an rtx to address the beginning of the block.
2766 Note that it is not possible for the value returned to be a QUEUED.
2767 The value may be virtual_outgoing_args_rtx.
2768
2769 EXTRA is the number of bytes of padding to push in addition to SIZE.
2770 BELOW nonzero means this padding comes at low addresses;
2771 otherwise, the padding comes at high addresses. */
2772
2773 rtx
2774 push_block (size, extra, below)
2775 rtx size;
2776 int extra, below;
2777 {
2778 register rtx temp;
2779
2780 size = convert_modes (Pmode, ptr_mode, size, 1);
2781 if (CONSTANT_P (size))
2782 anti_adjust_stack (plus_constant (size, extra));
2783 else if (GET_CODE (size) == REG && extra == 0)
2784 anti_adjust_stack (size);
2785 else
2786 {
2787 rtx temp = copy_to_mode_reg (Pmode, size);
2788 if (extra != 0)
2789 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2790 temp, 0, OPTAB_LIB_WIDEN);
2791 anti_adjust_stack (temp);
2792 }
2793
2794 #if defined (STACK_GROWS_DOWNWARD) \
2795 || (defined (ARGS_GROW_DOWNWARD) \
2796 && !defined (ACCUMULATE_OUTGOING_ARGS))
2797
2798 /* Return the lowest stack address when STACK or ARGS grow downward and
2799 we are not aaccumulating outgoing arguments (the c4x port uses such
2800 conventions). */
2801 temp = virtual_outgoing_args_rtx;
2802 if (extra != 0 && below)
2803 temp = plus_constant (temp, extra);
2804 #else
2805 if (GET_CODE (size) == CONST_INT)
2806 temp = plus_constant (virtual_outgoing_args_rtx,
2807 - INTVAL (size) - (below ? 0 : extra));
2808 else if (extra != 0 && !below)
2809 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2810 negate_rtx (Pmode, plus_constant (size, extra)));
2811 else
2812 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2813 negate_rtx (Pmode, size));
2814 #endif
2815
2816 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2817 }
2818
2819 rtx
2820 gen_push_operand ()
2821 {
2822 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2823 }
2824
2825 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2826 block of SIZE bytes. */
2827
2828 static rtx
2829 get_push_address (size)
2830 int size;
2831 {
2832 register rtx temp;
2833
2834 if (STACK_PUSH_CODE == POST_DEC)
2835 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2836 else if (STACK_PUSH_CODE == POST_INC)
2837 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2838 else
2839 temp = stack_pointer_rtx;
2840
2841 return copy_to_reg (temp);
2842 }
2843
2844 /* Generate code to push X onto the stack, assuming it has mode MODE and
2845 type TYPE.
2846 MODE is redundant except when X is a CONST_INT (since they don't
2847 carry mode info).
2848 SIZE is an rtx for the size of data to be copied (in bytes),
2849 needed only if X is BLKmode.
2850
2851 ALIGN (in bytes) is maximum alignment we can assume.
2852
2853 If PARTIAL and REG are both nonzero, then copy that many of the first
2854 words of X into registers starting with REG, and push the rest of X.
2855 The amount of space pushed is decreased by PARTIAL words,
2856 rounded *down* to a multiple of PARM_BOUNDARY.
2857 REG must be a hard register in this case.
2858 If REG is zero but PARTIAL is not, take any all others actions for an
2859 argument partially in registers, but do not actually load any
2860 registers.
2861
2862 EXTRA is the amount in bytes of extra space to leave next to this arg.
2863 This is ignored if an argument block has already been allocated.
2864
2865 On a machine that lacks real push insns, ARGS_ADDR is the address of
2866 the bottom of the argument block for this call. We use indexing off there
2867 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2868 argument block has not been preallocated.
2869
2870 ARGS_SO_FAR is the size of args previously pushed for this call.
2871
2872 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2873 for arguments passed in registers. If nonzero, it will be the number
2874 of bytes required. */
2875
2876 void
2877 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2878 args_addr, args_so_far, reg_parm_stack_space,
2879 alignment_pad)
2880 register rtx x;
2881 enum machine_mode mode;
2882 tree type;
2883 rtx size;
2884 int align;
2885 int partial;
2886 rtx reg;
2887 int extra;
2888 rtx args_addr;
2889 rtx args_so_far;
2890 int reg_parm_stack_space;
2891 rtx alignment_pad;
2892 {
2893 rtx xinner;
2894 enum direction stack_direction
2895 #ifdef STACK_GROWS_DOWNWARD
2896 = downward;
2897 #else
2898 = upward;
2899 #endif
2900
2901 /* Decide where to pad the argument: `downward' for below,
2902 `upward' for above, or `none' for don't pad it.
2903 Default is below for small data on big-endian machines; else above. */
2904 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2905
2906 /* Invert direction if stack is post-update. */
2907 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2908 if (where_pad != none)
2909 where_pad = (where_pad == downward ? upward : downward);
2910
2911 xinner = x = protect_from_queue (x, 0);
2912
2913 if (mode == BLKmode)
2914 {
2915 /* Copy a block into the stack, entirely or partially. */
2916
2917 register rtx temp;
2918 int used = partial * UNITS_PER_WORD;
2919 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2920 int skip;
2921
2922 if (size == 0)
2923 abort ();
2924
2925 used -= offset;
2926
2927 /* USED is now the # of bytes we need not copy to the stack
2928 because registers will take care of them. */
2929
2930 if (partial != 0)
2931 xinner = change_address (xinner, BLKmode,
2932 plus_constant (XEXP (xinner, 0), used));
2933
2934 /* If the partial register-part of the arg counts in its stack size,
2935 skip the part of stack space corresponding to the registers.
2936 Otherwise, start copying to the beginning of the stack space,
2937 by setting SKIP to 0. */
2938 skip = (reg_parm_stack_space == 0) ? 0 : used;
2939
2940 #ifdef PUSH_ROUNDING
2941 /* Do it with several push insns if that doesn't take lots of insns
2942 and if there is no difficulty with push insns that skip bytes
2943 on the stack for alignment purposes. */
2944 if (args_addr == 0
2945 && GET_CODE (size) == CONST_INT
2946 && skip == 0
2947 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2948 /* Here we avoid the case of a structure whose weak alignment
2949 forces many pushes of a small amount of data,
2950 and such small pushes do rounding that causes trouble. */
2951 && ((! SLOW_UNALIGNED_ACCESS)
2952 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2953 || PUSH_ROUNDING (align) == align)
2954 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2955 {
2956 /* Push padding now if padding above and stack grows down,
2957 or if padding below and stack grows up.
2958 But if space already allocated, this has already been done. */
2959 if (extra && args_addr == 0
2960 && where_pad != none && where_pad != stack_direction)
2961 anti_adjust_stack (GEN_INT (extra));
2962
2963 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2964 INTVAL (size) - used, align);
2965
2966 if (current_function_check_memory_usage && ! in_check_memory_usage)
2967 {
2968 rtx temp;
2969
2970 in_check_memory_usage = 1;
2971 temp = get_push_address (INTVAL(size) - used);
2972 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2973 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2974 temp, Pmode,
2975 XEXP (xinner, 0), Pmode,
2976 GEN_INT (INTVAL(size) - used),
2977 TYPE_MODE (sizetype));
2978 else
2979 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2980 temp, Pmode,
2981 GEN_INT (INTVAL(size) - used),
2982 TYPE_MODE (sizetype),
2983 GEN_INT (MEMORY_USE_RW),
2984 TYPE_MODE (integer_type_node));
2985 in_check_memory_usage = 0;
2986 }
2987 }
2988 else
2989 #endif /* PUSH_ROUNDING */
2990 {
2991 /* Otherwise make space on the stack and copy the data
2992 to the address of that space. */
2993
2994 /* Deduct words put into registers from the size we must copy. */
2995 if (partial != 0)
2996 {
2997 if (GET_CODE (size) == CONST_INT)
2998 size = GEN_INT (INTVAL (size) - used);
2999 else
3000 size = expand_binop (GET_MODE (size), sub_optab, size,
3001 GEN_INT (used), NULL_RTX, 0,
3002 OPTAB_LIB_WIDEN);
3003 }
3004
3005 /* Get the address of the stack space.
3006 In this case, we do not deal with EXTRA separately.
3007 A single stack adjust will do. */
3008 if (! args_addr)
3009 {
3010 temp = push_block (size, extra, where_pad == downward);
3011 extra = 0;
3012 }
3013 else if (GET_CODE (args_so_far) == CONST_INT)
3014 temp = memory_address (BLKmode,
3015 plus_constant (args_addr,
3016 skip + INTVAL (args_so_far)));
3017 else
3018 temp = memory_address (BLKmode,
3019 plus_constant (gen_rtx_PLUS (Pmode,
3020 args_addr,
3021 args_so_far),
3022 skip));
3023 if (current_function_check_memory_usage && ! in_check_memory_usage)
3024 {
3025 rtx target;
3026
3027 in_check_memory_usage = 1;
3028 target = copy_to_reg (temp);
3029 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3030 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3031 target, Pmode,
3032 XEXP (xinner, 0), Pmode,
3033 size, TYPE_MODE (sizetype));
3034 else
3035 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3036 target, Pmode,
3037 size, TYPE_MODE (sizetype),
3038 GEN_INT (MEMORY_USE_RW),
3039 TYPE_MODE (integer_type_node));
3040 in_check_memory_usage = 0;
3041 }
3042
3043 /* TEMP is the address of the block. Copy the data there. */
3044 if (GET_CODE (size) == CONST_INT
3045 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3046 {
3047 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3048 INTVAL (size), align);
3049 goto ret;
3050 }
3051 else
3052 {
3053 rtx opalign = GEN_INT (align);
3054 enum machine_mode mode;
3055 rtx target = gen_rtx_MEM (BLKmode, temp);
3056
3057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3058 mode != VOIDmode;
3059 mode = GET_MODE_WIDER_MODE (mode))
3060 {
3061 enum insn_code code = movstr_optab[(int) mode];
3062 insn_operand_predicate_fn pred;
3063
3064 if (code != CODE_FOR_nothing
3065 && ((GET_CODE (size) == CONST_INT
3066 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3067 <= (GET_MODE_MASK (mode) >> 1)))
3068 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3069 && (!(pred = insn_data[(int) code].operand[0].predicate)
3070 || ((*pred) (target, BLKmode)))
3071 && (!(pred = insn_data[(int) code].operand[1].predicate)
3072 || ((*pred) (xinner, BLKmode)))
3073 && (!(pred = insn_data[(int) code].operand[3].predicate)
3074 || ((*pred) (opalign, VOIDmode))))
3075 {
3076 rtx op2 = convert_to_mode (mode, size, 1);
3077 rtx last = get_last_insn ();
3078 rtx pat;
3079
3080 pred = insn_data[(int) code].operand[2].predicate;
3081 if (pred != 0 && ! (*pred) (op2, mode))
3082 op2 = copy_to_mode_reg (mode, op2);
3083
3084 pat = GEN_FCN ((int) code) (target, xinner,
3085 op2, opalign);
3086 if (pat)
3087 {
3088 emit_insn (pat);
3089 goto ret;
3090 }
3091 else
3092 delete_insns_since (last);
3093 }
3094 }
3095 }
3096
3097 #ifndef ACCUMULATE_OUTGOING_ARGS
3098 /* If the source is referenced relative to the stack pointer,
3099 copy it to another register to stabilize it. We do not need
3100 to do this if we know that we won't be changing sp. */
3101
3102 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3103 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3104 temp = copy_to_reg (temp);
3105 #endif
3106
3107 /* Make inhibit_defer_pop nonzero around the library call
3108 to force it to pop the bcopy-arguments right away. */
3109 NO_DEFER_POP;
3110 #ifdef TARGET_MEM_FUNCTIONS
3111 emit_library_call (memcpy_libfunc, 0,
3112 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3113 convert_to_mode (TYPE_MODE (sizetype),
3114 size, TREE_UNSIGNED (sizetype)),
3115 TYPE_MODE (sizetype));
3116 #else
3117 emit_library_call (bcopy_libfunc, 0,
3118 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3119 convert_to_mode (TYPE_MODE (integer_type_node),
3120 size,
3121 TREE_UNSIGNED (integer_type_node)),
3122 TYPE_MODE (integer_type_node));
3123 #endif
3124 OK_DEFER_POP;
3125 }
3126 }
3127 else if (partial > 0)
3128 {
3129 /* Scalar partly in registers. */
3130
3131 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3132 int i;
3133 int not_stack;
3134 /* # words of start of argument
3135 that we must make space for but need not store. */
3136 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3137 int args_offset = INTVAL (args_so_far);
3138 int skip;
3139
3140 /* Push padding now if padding above and stack grows down,
3141 or if padding below and stack grows up.
3142 But if space already allocated, this has already been done. */
3143 if (extra && args_addr == 0
3144 && where_pad != none && where_pad != stack_direction)
3145 anti_adjust_stack (GEN_INT (extra));
3146
3147 /* If we make space by pushing it, we might as well push
3148 the real data. Otherwise, we can leave OFFSET nonzero
3149 and leave the space uninitialized. */
3150 if (args_addr == 0)
3151 offset = 0;
3152
3153 /* Now NOT_STACK gets the number of words that we don't need to
3154 allocate on the stack. */
3155 not_stack = partial - offset;
3156
3157 /* If the partial register-part of the arg counts in its stack size,
3158 skip the part of stack space corresponding to the registers.
3159 Otherwise, start copying to the beginning of the stack space,
3160 by setting SKIP to 0. */
3161 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3162
3163 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3164 x = validize_mem (force_const_mem (mode, x));
3165
3166 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3167 SUBREGs of such registers are not allowed. */
3168 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3169 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3170 x = copy_to_reg (x);
3171
3172 /* Loop over all the words allocated on the stack for this arg. */
3173 /* We can do it by words, because any scalar bigger than a word
3174 has a size a multiple of a word. */
3175 #ifndef PUSH_ARGS_REVERSED
3176 for (i = not_stack; i < size; i++)
3177 #else
3178 for (i = size - 1; i >= not_stack; i--)
3179 #endif
3180 if (i >= not_stack + offset)
3181 emit_push_insn (operand_subword_force (x, i, mode),
3182 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3183 0, args_addr,
3184 GEN_INT (args_offset + ((i - not_stack + skip)
3185 * UNITS_PER_WORD)),
3186 reg_parm_stack_space, alignment_pad);
3187 }
3188 else
3189 {
3190 rtx addr;
3191 rtx target = NULL_RTX;
3192
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra && args_addr == 0
3197 && where_pad != none && where_pad != stack_direction)
3198 anti_adjust_stack (GEN_INT (extra));
3199
3200 #ifdef PUSH_ROUNDING
3201 if (args_addr == 0)
3202 addr = gen_push_operand ();
3203 else
3204 #endif
3205 {
3206 if (GET_CODE (args_so_far) == CONST_INT)
3207 addr
3208 = memory_address (mode,
3209 plus_constant (args_addr,
3210 INTVAL (args_so_far)));
3211 else
3212 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3213 args_so_far));
3214 target = addr;
3215 }
3216
3217 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3218
3219 if (current_function_check_memory_usage && ! in_check_memory_usage)
3220 {
3221 in_check_memory_usage = 1;
3222 if (target == 0)
3223 target = get_push_address (GET_MODE_SIZE (mode));
3224
3225 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3226 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3227 target, Pmode,
3228 XEXP (x, 0), Pmode,
3229 GEN_INT (GET_MODE_SIZE (mode)),
3230 TYPE_MODE (sizetype));
3231 else
3232 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3233 target, Pmode,
3234 GEN_INT (GET_MODE_SIZE (mode)),
3235 TYPE_MODE (sizetype),
3236 GEN_INT (MEMORY_USE_RW),
3237 TYPE_MODE (integer_type_node));
3238 in_check_memory_usage = 0;
3239 }
3240 }
3241
3242 ret:
3243 /* If part should go in registers, copy that part
3244 into the appropriate registers. Do this now, at the end,
3245 since mem-to-mem copies above may do function calls. */
3246 if (partial > 0 && reg != 0)
3247 {
3248 /* Handle calls that pass values in multiple non-contiguous locations.
3249 The Irix 6 ABI has examples of this. */
3250 if (GET_CODE (reg) == PARALLEL)
3251 emit_group_load (reg, x, -1, align); /* ??? size? */
3252 else
3253 move_block_to_reg (REGNO (reg), x, partial, mode);
3254 }
3255
3256 if (extra && args_addr == 0 && where_pad == stack_direction)
3257 anti_adjust_stack (GEN_INT (extra));
3258
3259 if (alignment_pad)
3260 anti_adjust_stack (alignment_pad);
3261 }
3262 \f
3263 /* Expand an assignment that stores the value of FROM into TO.
3264 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3265 (This may contain a QUEUED rtx;
3266 if the value is constant, this rtx is a constant.)
3267 Otherwise, the returned value is NULL_RTX.
3268
3269 SUGGEST_REG is no longer actually used.
3270 It used to mean, copy the value through a register
3271 and return that register, if that is possible.
3272 We now use WANT_VALUE to decide whether to do this. */
3273
3274 rtx
3275 expand_assignment (to, from, want_value, suggest_reg)
3276 tree to, from;
3277 int want_value;
3278 int suggest_reg ATTRIBUTE_UNUSED;
3279 {
3280 register rtx to_rtx = 0;
3281 rtx result;
3282
3283 /* Don't crash if the lhs of the assignment was erroneous. */
3284
3285 if (TREE_CODE (to) == ERROR_MARK)
3286 {
3287 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3288 return want_value ? result : NULL_RTX;
3289 }
3290
3291 /* Assignment of a structure component needs special treatment
3292 if the structure component's rtx is not simply a MEM.
3293 Assignment of an array element at a constant index, and assignment of
3294 an array element in an unaligned packed structure field, has the same
3295 problem. */
3296
3297 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3298 || TREE_CODE (to) == ARRAY_REF)
3299 {
3300 enum machine_mode mode1;
3301 int bitsize;
3302 int bitpos;
3303 tree offset;
3304 int unsignedp;
3305 int volatilep = 0;
3306 tree tem;
3307 int alignment;
3308
3309 push_temp_slots ();
3310 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3311 &unsignedp, &volatilep, &alignment);
3312
3313 /* If we are going to use store_bit_field and extract_bit_field,
3314 make sure to_rtx will be safe for multiple use. */
3315
3316 if (mode1 == VOIDmode && want_value)
3317 tem = stabilize_reference (tem);
3318
3319 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3320 if (offset != 0)
3321 {
3322 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3323
3324 if (GET_CODE (to_rtx) != MEM)
3325 abort ();
3326
3327 if (GET_MODE (offset_rtx) != ptr_mode)
3328 {
3329 #ifdef POINTERS_EXTEND_UNSIGNED
3330 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3331 #else
3332 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3333 #endif
3334 }
3335
3336 /* A constant address in TO_RTX can have VOIDmode, we must not try
3337 to call force_reg for that case. Avoid that case. */
3338 if (GET_CODE (to_rtx) == MEM
3339 && GET_MODE (to_rtx) == BLKmode
3340 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3341 && bitsize
3342 && (bitpos % bitsize) == 0
3343 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3344 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3345 {
3346 rtx temp = change_address (to_rtx, mode1,
3347 plus_constant (XEXP (to_rtx, 0),
3348 (bitpos /
3349 BITS_PER_UNIT)));
3350 if (GET_CODE (XEXP (temp, 0)) == REG)
3351 to_rtx = temp;
3352 else
3353 to_rtx = change_address (to_rtx, mode1,
3354 force_reg (GET_MODE (XEXP (temp, 0)),
3355 XEXP (temp, 0)));
3356 bitpos = 0;
3357 }
3358
3359 to_rtx = change_address (to_rtx, VOIDmode,
3360 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3361 force_reg (ptr_mode,
3362 offset_rtx)));
3363 }
3364
3365 if (volatilep)
3366 {
3367 if (GET_CODE (to_rtx) == MEM)
3368 {
3369 /* When the offset is zero, to_rtx is the address of the
3370 structure we are storing into, and hence may be shared.
3371 We must make a new MEM before setting the volatile bit. */
3372 if (offset == 0)
3373 to_rtx = copy_rtx (to_rtx);
3374
3375 MEM_VOLATILE_P (to_rtx) = 1;
3376 }
3377 #if 0 /* This was turned off because, when a field is volatile
3378 in an object which is not volatile, the object may be in a register,
3379 and then we would abort over here. */
3380 else
3381 abort ();
3382 #endif
3383 }
3384
3385 if (TREE_CODE (to) == COMPONENT_REF
3386 && TREE_READONLY (TREE_OPERAND (to, 1)))
3387 {
3388 if (offset == 0)
3389 to_rtx = copy_rtx (to_rtx);
3390
3391 RTX_UNCHANGING_P (to_rtx) = 1;
3392 }
3393
3394 /* Check the access. */
3395 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3396 {
3397 rtx to_addr;
3398 int size;
3399 int best_mode_size;
3400 enum machine_mode best_mode;
3401
3402 best_mode = get_best_mode (bitsize, bitpos,
3403 TYPE_ALIGN (TREE_TYPE (tem)),
3404 mode1, volatilep);
3405 if (best_mode == VOIDmode)
3406 best_mode = QImode;
3407
3408 best_mode_size = GET_MODE_BITSIZE (best_mode);
3409 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3410 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3411 size *= GET_MODE_SIZE (best_mode);
3412
3413 /* Check the access right of the pointer. */
3414 if (size)
3415 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3416 to_addr, Pmode,
3417 GEN_INT (size), TYPE_MODE (sizetype),
3418 GEN_INT (MEMORY_USE_WO),
3419 TYPE_MODE (integer_type_node));
3420 }
3421
3422 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3423 (want_value
3424 /* Spurious cast makes HPUX compiler happy. */
3425 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3426 : VOIDmode),
3427 unsignedp,
3428 /* Required alignment of containing datum. */
3429 alignment,
3430 int_size_in_bytes (TREE_TYPE (tem)),
3431 get_alias_set (to));
3432 preserve_temp_slots (result);
3433 free_temp_slots ();
3434 pop_temp_slots ();
3435
3436 /* If the value is meaningful, convert RESULT to the proper mode.
3437 Otherwise, return nothing. */
3438 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3439 TYPE_MODE (TREE_TYPE (from)),
3440 result,
3441 TREE_UNSIGNED (TREE_TYPE (to)))
3442 : NULL_RTX);
3443 }
3444
3445 /* If the rhs is a function call and its value is not an aggregate,
3446 call the function before we start to compute the lhs.
3447 This is needed for correct code for cases such as
3448 val = setjmp (buf) on machines where reference to val
3449 requires loading up part of an address in a separate insn.
3450
3451 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3452 a promoted variable where the zero- or sign- extension needs to be done.
3453 Handling this in the normal way is safe because no computation is done
3454 before the call. */
3455 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3456 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3457 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3458 {
3459 rtx value;
3460
3461 push_temp_slots ();
3462 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3463 if (to_rtx == 0)
3464 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3465
3466 /* Handle calls that return values in multiple non-contiguous locations.
3467 The Irix 6 ABI has examples of this. */
3468 if (GET_CODE (to_rtx) == PARALLEL)
3469 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3470 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3471 else if (GET_MODE (to_rtx) == BLKmode)
3472 emit_block_move (to_rtx, value, expr_size (from),
3473 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3474 else
3475 {
3476 #ifdef POINTERS_EXTEND_UNSIGNED
3477 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3478 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3479 value = convert_memory_address (GET_MODE (to_rtx), value);
3480 #endif
3481 emit_move_insn (to_rtx, value);
3482 }
3483 preserve_temp_slots (to_rtx);
3484 free_temp_slots ();
3485 pop_temp_slots ();
3486 return want_value ? to_rtx : NULL_RTX;
3487 }
3488
3489 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3490 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3491
3492 if (to_rtx == 0)
3493 {
3494 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3495 if (GET_CODE (to_rtx) == MEM)
3496 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3497 }
3498
3499 /* Don't move directly into a return register. */
3500 if (TREE_CODE (to) == RESULT_DECL
3501 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3502 {
3503 rtx temp;
3504
3505 push_temp_slots ();
3506 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3507
3508 if (GET_CODE (to_rtx) == PARALLEL)
3509 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3510 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3511 else
3512 emit_move_insn (to_rtx, temp);
3513
3514 preserve_temp_slots (to_rtx);
3515 free_temp_slots ();
3516 pop_temp_slots ();
3517 return want_value ? to_rtx : NULL_RTX;
3518 }
3519
3520 /* In case we are returning the contents of an object which overlaps
3521 the place the value is being stored, use a safe function when copying
3522 a value through a pointer into a structure value return block. */
3523 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3524 && current_function_returns_struct
3525 && !current_function_returns_pcc_struct)
3526 {
3527 rtx from_rtx, size;
3528
3529 push_temp_slots ();
3530 size = expr_size (from);
3531 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3532 EXPAND_MEMORY_USE_DONT);
3533
3534 /* Copy the rights of the bitmap. */
3535 if (current_function_check_memory_usage)
3536 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3537 XEXP (to_rtx, 0), Pmode,
3538 XEXP (from_rtx, 0), Pmode,
3539 convert_to_mode (TYPE_MODE (sizetype),
3540 size, TREE_UNSIGNED (sizetype)),
3541 TYPE_MODE (sizetype));
3542
3543 #ifdef TARGET_MEM_FUNCTIONS
3544 emit_library_call (memcpy_libfunc, 0,
3545 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3546 XEXP (from_rtx, 0), Pmode,
3547 convert_to_mode (TYPE_MODE (sizetype),
3548 size, TREE_UNSIGNED (sizetype)),
3549 TYPE_MODE (sizetype));
3550 #else
3551 emit_library_call (bcopy_libfunc, 0,
3552 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3553 XEXP (to_rtx, 0), Pmode,
3554 convert_to_mode (TYPE_MODE (integer_type_node),
3555 size, TREE_UNSIGNED (integer_type_node)),
3556 TYPE_MODE (integer_type_node));
3557 #endif
3558
3559 preserve_temp_slots (to_rtx);
3560 free_temp_slots ();
3561 pop_temp_slots ();
3562 return want_value ? to_rtx : NULL_RTX;
3563 }
3564
3565 /* Compute FROM and store the value in the rtx we got. */
3566
3567 push_temp_slots ();
3568 result = store_expr (from, to_rtx, want_value);
3569 preserve_temp_slots (result);
3570 free_temp_slots ();
3571 pop_temp_slots ();
3572 return want_value ? result : NULL_RTX;
3573 }
3574
3575 /* Generate code for computing expression EXP,
3576 and storing the value into TARGET.
3577 TARGET may contain a QUEUED rtx.
3578
3579 If WANT_VALUE is nonzero, return a copy of the value
3580 not in TARGET, so that we can be sure to use the proper
3581 value in a containing expression even if TARGET has something
3582 else stored in it. If possible, we copy the value through a pseudo
3583 and return that pseudo. Or, if the value is constant, we try to
3584 return the constant. In some cases, we return a pseudo
3585 copied *from* TARGET.
3586
3587 If the mode is BLKmode then we may return TARGET itself.
3588 It turns out that in BLKmode it doesn't cause a problem.
3589 because C has no operators that could combine two different
3590 assignments into the same BLKmode object with different values
3591 with no sequence point. Will other languages need this to
3592 be more thorough?
3593
3594 If WANT_VALUE is 0, we return NULL, to make sure
3595 to catch quickly any cases where the caller uses the value
3596 and fails to set WANT_VALUE. */
3597
3598 rtx
3599 store_expr (exp, target, want_value)
3600 register tree exp;
3601 register rtx target;
3602 int want_value;
3603 {
3604 register rtx temp;
3605 int dont_return_target = 0;
3606
3607 if (TREE_CODE (exp) == COMPOUND_EXPR)
3608 {
3609 /* Perform first part of compound expression, then assign from second
3610 part. */
3611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3612 emit_queue ();
3613 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3614 }
3615 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3616 {
3617 /* For conditional expression, get safe form of the target. Then
3618 test the condition, doing the appropriate assignment on either
3619 side. This avoids the creation of unnecessary temporaries.
3620 For non-BLKmode, it is more efficient not to do this. */
3621
3622 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3623
3624 emit_queue ();
3625 target = protect_from_queue (target, 1);
3626
3627 do_pending_stack_adjust ();
3628 NO_DEFER_POP;
3629 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3630 start_cleanup_deferral ();
3631 store_expr (TREE_OPERAND (exp, 1), target, 0);
3632 end_cleanup_deferral ();
3633 emit_queue ();
3634 emit_jump_insn (gen_jump (lab2));
3635 emit_barrier ();
3636 emit_label (lab1);
3637 start_cleanup_deferral ();
3638 store_expr (TREE_OPERAND (exp, 2), target, 0);
3639 end_cleanup_deferral ();
3640 emit_queue ();
3641 emit_label (lab2);
3642 OK_DEFER_POP;
3643
3644 return want_value ? target : NULL_RTX;
3645 }
3646 else if (queued_subexp_p (target))
3647 /* If target contains a postincrement, let's not risk
3648 using it as the place to generate the rhs. */
3649 {
3650 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3651 {
3652 /* Expand EXP into a new pseudo. */
3653 temp = gen_reg_rtx (GET_MODE (target));
3654 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3655 }
3656 else
3657 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3658
3659 /* If target is volatile, ANSI requires accessing the value
3660 *from* the target, if it is accessed. So make that happen.
3661 In no case return the target itself. */
3662 if (! MEM_VOLATILE_P (target) && want_value)
3663 dont_return_target = 1;
3664 }
3665 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3666 && GET_MODE (target) != BLKmode)
3667 /* If target is in memory and caller wants value in a register instead,
3668 arrange that. Pass TARGET as target for expand_expr so that,
3669 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3670 We know expand_expr will not use the target in that case.
3671 Don't do this if TARGET is volatile because we are supposed
3672 to write it and then read it. */
3673 {
3674 temp = expand_expr (exp, target, GET_MODE (target), 0);
3675 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3676 temp = copy_to_reg (temp);
3677 dont_return_target = 1;
3678 }
3679 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3680 /* If this is an scalar in a register that is stored in a wider mode
3681 than the declared mode, compute the result into its declared mode
3682 and then convert to the wider mode. Our value is the computed
3683 expression. */
3684 {
3685 /* If we don't want a value, we can do the conversion inside EXP,
3686 which will often result in some optimizations. Do the conversion
3687 in two steps: first change the signedness, if needed, then
3688 the extend. But don't do this if the type of EXP is a subtype
3689 of something else since then the conversion might involve
3690 more than just converting modes. */
3691 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3692 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3693 {
3694 if (TREE_UNSIGNED (TREE_TYPE (exp))
3695 != SUBREG_PROMOTED_UNSIGNED_P (target))
3696 exp
3697 = convert
3698 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3699 TREE_TYPE (exp)),
3700 exp);
3701
3702 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3703 SUBREG_PROMOTED_UNSIGNED_P (target)),
3704 exp);
3705 }
3706
3707 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3708
3709 /* If TEMP is a volatile MEM and we want a result value, make
3710 the access now so it gets done only once. Likewise if
3711 it contains TARGET. */
3712 if (GET_CODE (temp) == MEM && want_value
3713 && (MEM_VOLATILE_P (temp)
3714 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3715 temp = copy_to_reg (temp);
3716
3717 /* If TEMP is a VOIDmode constant, use convert_modes to make
3718 sure that we properly convert it. */
3719 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3720 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3721 TYPE_MODE (TREE_TYPE (exp)), temp,
3722 SUBREG_PROMOTED_UNSIGNED_P (target));
3723
3724 convert_move (SUBREG_REG (target), temp,
3725 SUBREG_PROMOTED_UNSIGNED_P (target));
3726
3727 /* If we promoted a constant, change the mode back down to match
3728 target. Otherwise, the caller might get confused by a result whose
3729 mode is larger than expected. */
3730
3731 if (want_value && GET_MODE (temp) != GET_MODE (target)
3732 && GET_MODE (temp) != VOIDmode)
3733 {
3734 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3735 SUBREG_PROMOTED_VAR_P (temp) = 1;
3736 SUBREG_PROMOTED_UNSIGNED_P (temp)
3737 = SUBREG_PROMOTED_UNSIGNED_P (target);
3738 }
3739
3740 return want_value ? temp : NULL_RTX;
3741 }
3742 else
3743 {
3744 temp = expand_expr (exp, target, GET_MODE (target), 0);
3745 /* Return TARGET if it's a specified hardware register.
3746 If TARGET is a volatile mem ref, either return TARGET
3747 or return a reg copied *from* TARGET; ANSI requires this.
3748
3749 Otherwise, if TEMP is not TARGET, return TEMP
3750 if it is constant (for efficiency),
3751 or if we really want the correct value. */
3752 if (!(target && GET_CODE (target) == REG
3753 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3754 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3755 && ! rtx_equal_p (temp, target)
3756 && (CONSTANT_P (temp) || want_value))
3757 dont_return_target = 1;
3758 }
3759
3760 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3761 the same as that of TARGET, adjust the constant. This is needed, for
3762 example, in case it is a CONST_DOUBLE and we want only a word-sized
3763 value. */
3764 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3765 && TREE_CODE (exp) != ERROR_MARK
3766 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3767 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3768 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3769
3770 if (current_function_check_memory_usage
3771 && GET_CODE (target) == MEM
3772 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3773 {
3774 if (GET_CODE (temp) == MEM)
3775 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3776 XEXP (target, 0), Pmode,
3777 XEXP (temp, 0), Pmode,
3778 expr_size (exp), TYPE_MODE (sizetype));
3779 else
3780 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3781 XEXP (target, 0), Pmode,
3782 expr_size (exp), TYPE_MODE (sizetype),
3783 GEN_INT (MEMORY_USE_WO),
3784 TYPE_MODE (integer_type_node));
3785 }
3786
3787 /* If value was not generated in the target, store it there.
3788 Convert the value to TARGET's type first if nec. */
3789 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3790 one or both of them are volatile memory refs, we have to distinguish
3791 two cases:
3792 - expand_expr has used TARGET. In this case, we must not generate
3793 another copy. This can be detected by TARGET being equal according
3794 to == .
3795 - expand_expr has not used TARGET - that means that the source just
3796 happens to have the same RTX form. Since temp will have been created
3797 by expand_expr, it will compare unequal according to == .
3798 We must generate a copy in this case, to reach the correct number
3799 of volatile memory references. */
3800
3801 if ((! rtx_equal_p (temp, target)
3802 || (temp != target && (side_effects_p (temp)
3803 || side_effects_p (target))))
3804 && TREE_CODE (exp) != ERROR_MARK)
3805 {
3806 target = protect_from_queue (target, 1);
3807 if (GET_MODE (temp) != GET_MODE (target)
3808 && GET_MODE (temp) != VOIDmode)
3809 {
3810 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3811 if (dont_return_target)
3812 {
3813 /* In this case, we will return TEMP,
3814 so make sure it has the proper mode.
3815 But don't forget to store the value into TARGET. */
3816 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3817 emit_move_insn (target, temp);
3818 }
3819 else
3820 convert_move (target, temp, unsignedp);
3821 }
3822
3823 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3824 {
3825 /* Handle copying a string constant into an array.
3826 The string constant may be shorter than the array.
3827 So copy just the string's actual length, and clear the rest. */
3828 rtx size;
3829 rtx addr;
3830
3831 /* Get the size of the data type of the string,
3832 which is actually the size of the target. */
3833 size = expr_size (exp);
3834 if (GET_CODE (size) == CONST_INT
3835 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3836 emit_block_move (target, temp, size,
3837 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3838 else
3839 {
3840 /* Compute the size of the data to copy from the string. */
3841 tree copy_size
3842 = size_binop (MIN_EXPR,
3843 make_tree (sizetype, size),
3844 convert (sizetype,
3845 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3846 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3847 VOIDmode, 0);
3848 rtx label = 0;
3849
3850 /* Copy that much. */
3851 emit_block_move (target, temp, copy_size_rtx,
3852 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3853
3854 /* Figure out how much is left in TARGET that we have to clear.
3855 Do all calculations in ptr_mode. */
3856
3857 addr = XEXP (target, 0);
3858 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3859
3860 if (GET_CODE (copy_size_rtx) == CONST_INT)
3861 {
3862 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3863 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3864 }
3865 else
3866 {
3867 addr = force_reg (ptr_mode, addr);
3868 addr = expand_binop (ptr_mode, add_optab, addr,
3869 copy_size_rtx, NULL_RTX, 0,
3870 OPTAB_LIB_WIDEN);
3871
3872 size = expand_binop (ptr_mode, sub_optab, size,
3873 copy_size_rtx, NULL_RTX, 0,
3874 OPTAB_LIB_WIDEN);
3875
3876 label = gen_label_rtx ();
3877 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3878 GET_MODE (size), 0, 0, label);
3879 }
3880
3881 if (size != const0_rtx)
3882 {
3883 /* Be sure we can write on ADDR. */
3884 if (current_function_check_memory_usage)
3885 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3886 addr, Pmode,
3887 size, TYPE_MODE (sizetype),
3888 GEN_INT (MEMORY_USE_WO),
3889 TYPE_MODE (integer_type_node));
3890 #ifdef TARGET_MEM_FUNCTIONS
3891 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3892 addr, ptr_mode,
3893 const0_rtx, TYPE_MODE (integer_type_node),
3894 convert_to_mode (TYPE_MODE (sizetype),
3895 size,
3896 TREE_UNSIGNED (sizetype)),
3897 TYPE_MODE (sizetype));
3898 #else
3899 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3900 addr, ptr_mode,
3901 convert_to_mode (TYPE_MODE (integer_type_node),
3902 size,
3903 TREE_UNSIGNED (integer_type_node)),
3904 TYPE_MODE (integer_type_node));
3905 #endif
3906 }
3907
3908 if (label)
3909 emit_label (label);
3910 }
3911 }
3912 /* Handle calls that return values in multiple non-contiguous locations.
3913 The Irix 6 ABI has examples of this. */
3914 else if (GET_CODE (target) == PARALLEL)
3915 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3916 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3917 else if (GET_MODE (temp) == BLKmode)
3918 emit_block_move (target, temp, expr_size (exp),
3919 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3920 else
3921 emit_move_insn (target, temp);
3922 }
3923
3924 /* If we don't want a value, return NULL_RTX. */
3925 if (! want_value)
3926 return NULL_RTX;
3927
3928 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3929 ??? The latter test doesn't seem to make sense. */
3930 else if (dont_return_target && GET_CODE (temp) != MEM)
3931 return temp;
3932
3933 /* Return TARGET itself if it is a hard register. */
3934 else if (want_value && GET_MODE (target) != BLKmode
3935 && ! (GET_CODE (target) == REG
3936 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3937 return copy_to_reg (target);
3938
3939 else
3940 return target;
3941 }
3942 \f
3943 /* Return 1 if EXP just contains zeros. */
3944
3945 static int
3946 is_zeros_p (exp)
3947 tree exp;
3948 {
3949 tree elt;
3950
3951 switch (TREE_CODE (exp))
3952 {
3953 case CONVERT_EXPR:
3954 case NOP_EXPR:
3955 case NON_LVALUE_EXPR:
3956 return is_zeros_p (TREE_OPERAND (exp, 0));
3957
3958 case INTEGER_CST:
3959 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3960
3961 case COMPLEX_CST:
3962 return
3963 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3964
3965 case REAL_CST:
3966 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3967
3968 case CONSTRUCTOR:
3969 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3970 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3971 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3972 if (! is_zeros_p (TREE_VALUE (elt)))
3973 return 0;
3974
3975 return 1;
3976
3977 default:
3978 return 0;
3979 }
3980 }
3981
3982 /* Return 1 if EXP contains mostly (3/4) zeros. */
3983
3984 static int
3985 mostly_zeros_p (exp)
3986 tree exp;
3987 {
3988 if (TREE_CODE (exp) == CONSTRUCTOR)
3989 {
3990 int elts = 0, zeros = 0;
3991 tree elt = CONSTRUCTOR_ELTS (exp);
3992 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3993 {
3994 /* If there are no ranges of true bits, it is all zero. */
3995 return elt == NULL_TREE;
3996 }
3997 for (; elt; elt = TREE_CHAIN (elt))
3998 {
3999 /* We do not handle the case where the index is a RANGE_EXPR,
4000 so the statistic will be somewhat inaccurate.
4001 We do make a more accurate count in store_constructor itself,
4002 so since this function is only used for nested array elements,
4003 this should be close enough. */
4004 if (mostly_zeros_p (TREE_VALUE (elt)))
4005 zeros++;
4006 elts++;
4007 }
4008
4009 return 4 * zeros >= 3 * elts;
4010 }
4011
4012 return is_zeros_p (exp);
4013 }
4014 \f
4015 /* Helper function for store_constructor.
4016 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4017 TYPE is the type of the CONSTRUCTOR, not the element type.
4018 ALIGN and CLEARED are as for store_constructor.
4019
4020 This provides a recursive shortcut back to store_constructor when it isn't
4021 necessary to go through store_field. This is so that we can pass through
4022 the cleared field to let store_constructor know that we may not have to
4023 clear a substructure if the outer structure has already been cleared. */
4024
4025 static void
4026 store_constructor_field (target, bitsize, bitpos,
4027 mode, exp, type, align, cleared)
4028 rtx target;
4029 int bitsize, bitpos;
4030 enum machine_mode mode;
4031 tree exp, type;
4032 int align;
4033 int cleared;
4034 {
4035 if (TREE_CODE (exp) == CONSTRUCTOR
4036 && bitpos % BITS_PER_UNIT == 0
4037 /* If we have a non-zero bitpos for a register target, then we just
4038 let store_field do the bitfield handling. This is unlikely to
4039 generate unnecessary clear instructions anyways. */
4040 && (bitpos == 0 || GET_CODE (target) == MEM))
4041 {
4042 if (bitpos != 0)
4043 target = change_address (target, VOIDmode,
4044 plus_constant (XEXP (target, 0),
4045 bitpos / BITS_PER_UNIT));
4046 store_constructor (exp, target, align, cleared);
4047 }
4048 else
4049 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4050 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4051 int_size_in_bytes (type), cleared);
4052 }
4053
4054 /* Store the value of constructor EXP into the rtx TARGET.
4055 TARGET is either a REG or a MEM.
4056 ALIGN is the maximum known alignment for TARGET, in bits.
4057 CLEARED is true if TARGET is known to have been zero'd. */
4058
4059 static void
4060 store_constructor (exp, target, align, cleared)
4061 tree exp;
4062 rtx target;
4063 int align;
4064 int cleared;
4065 {
4066 tree type = TREE_TYPE (exp);
4067 #ifdef WORD_REGISTER_OPERATIONS
4068 rtx exp_size = expr_size (exp);
4069 #endif
4070
4071 /* We know our target cannot conflict, since safe_from_p has been called. */
4072 #if 0
4073 /* Don't try copying piece by piece into a hard register
4074 since that is vulnerable to being clobbered by EXP.
4075 Instead, construct in a pseudo register and then copy it all. */
4076 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4077 {
4078 rtx temp = gen_reg_rtx (GET_MODE (target));
4079 store_constructor (exp, temp, 0);
4080 emit_move_insn (target, temp);
4081 return;
4082 }
4083 #endif
4084
4085 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4086 || TREE_CODE (type) == QUAL_UNION_TYPE)
4087 {
4088 register tree elt;
4089
4090 /* Inform later passes that the whole union value is dead. */
4091 if (TREE_CODE (type) == UNION_TYPE
4092 || TREE_CODE (type) == QUAL_UNION_TYPE)
4093 {
4094 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4095
4096 /* If the constructor is empty, clear the union. */
4097 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4098 clear_storage (target, expr_size (exp),
4099 TYPE_ALIGN (type) / BITS_PER_UNIT);
4100 }
4101
4102 /* If we are building a static constructor into a register,
4103 set the initial value as zero so we can fold the value into
4104 a constant. But if more than one register is involved,
4105 this probably loses. */
4106 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4107 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4108 {
4109 if (! cleared)
4110 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4111
4112 cleared = 1;
4113 }
4114
4115 /* If the constructor has fewer fields than the structure
4116 or if we are initializing the structure to mostly zeros,
4117 clear the whole structure first. */
4118 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4119 != list_length (TYPE_FIELDS (type)))
4120 || mostly_zeros_p (exp))
4121 {
4122 if (! cleared)
4123 clear_storage (target, expr_size (exp),
4124 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4125
4126 cleared = 1;
4127 }
4128 else
4129 /* Inform later passes that the old value is dead. */
4130 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4131
4132 /* Store each element of the constructor into
4133 the corresponding field of TARGET. */
4134
4135 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4136 {
4137 register tree field = TREE_PURPOSE (elt);
4138 #ifdef WORD_REGISTER_OPERATIONS
4139 tree value = TREE_VALUE (elt);
4140 #endif
4141 register enum machine_mode mode;
4142 int bitsize;
4143 int bitpos = 0;
4144 int unsignedp;
4145 tree pos, constant = 0, offset = 0;
4146 rtx to_rtx = target;
4147
4148 /* Just ignore missing fields.
4149 We cleared the whole structure, above,
4150 if any fields are missing. */
4151 if (field == 0)
4152 continue;
4153
4154 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4155 continue;
4156
4157 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4158 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4159 else
4160 bitsize = -1;
4161
4162 unsignedp = TREE_UNSIGNED (field);
4163 mode = DECL_MODE (field);
4164 if (DECL_BIT_FIELD (field))
4165 mode = VOIDmode;
4166
4167 pos = DECL_FIELD_BITPOS (field);
4168 if (TREE_CODE (pos) == INTEGER_CST)
4169 constant = pos;
4170 else if (TREE_CODE (pos) == PLUS_EXPR
4171 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4172 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4173 else
4174 offset = pos;
4175
4176 if (constant)
4177 bitpos = TREE_INT_CST_LOW (constant);
4178
4179 if (offset)
4180 {
4181 rtx offset_rtx;
4182
4183 if (contains_placeholder_p (offset))
4184 offset = build (WITH_RECORD_EXPR, sizetype,
4185 offset, make_tree (TREE_TYPE (exp), target));
4186
4187 offset = size_binop (FLOOR_DIV_EXPR, offset,
4188 size_int (BITS_PER_UNIT));
4189
4190 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4191 if (GET_CODE (to_rtx) != MEM)
4192 abort ();
4193
4194 if (GET_MODE (offset_rtx) != ptr_mode)
4195 {
4196 #ifdef POINTERS_EXTEND_UNSIGNED
4197 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4198 #else
4199 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4200 #endif
4201 }
4202
4203 to_rtx
4204 = change_address (to_rtx, VOIDmode,
4205 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4206 force_reg (ptr_mode,
4207 offset_rtx)));
4208 }
4209
4210 if (TREE_READONLY (field))
4211 {
4212 if (GET_CODE (to_rtx) == MEM)
4213 to_rtx = copy_rtx (to_rtx);
4214
4215 RTX_UNCHANGING_P (to_rtx) = 1;
4216 }
4217
4218 #ifdef WORD_REGISTER_OPERATIONS
4219 /* If this initializes a field that is smaller than a word, at the
4220 start of a word, try to widen it to a full word.
4221 This special case allows us to output C++ member function
4222 initializations in a form that the optimizers can understand. */
4223 if (constant
4224 && GET_CODE (target) == REG
4225 && bitsize < BITS_PER_WORD
4226 && bitpos % BITS_PER_WORD == 0
4227 && GET_MODE_CLASS (mode) == MODE_INT
4228 && TREE_CODE (value) == INTEGER_CST
4229 && GET_CODE (exp_size) == CONST_INT
4230 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4231 {
4232 tree type = TREE_TYPE (value);
4233 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4234 {
4235 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4236 value = convert (type, value);
4237 }
4238 if (BYTES_BIG_ENDIAN)
4239 value
4240 = fold (build (LSHIFT_EXPR, type, value,
4241 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4242 bitsize = BITS_PER_WORD;
4243 mode = word_mode;
4244 }
4245 #endif
4246 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4247 TREE_VALUE (elt), type,
4248 MIN (align,
4249 DECL_ALIGN (TREE_PURPOSE (elt))),
4250 cleared);
4251 }
4252 }
4253 else if (TREE_CODE (type) == ARRAY_TYPE)
4254 {
4255 register tree elt;
4256 register int i;
4257 int need_to_clear;
4258 tree domain = TYPE_DOMAIN (type);
4259 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4260 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4261 tree elttype = TREE_TYPE (type);
4262
4263 /* If the constructor has fewer elements than the array,
4264 clear the whole array first. Similarly if this is
4265 static constructor of a non-BLKmode object. */
4266 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4267 need_to_clear = 1;
4268 else
4269 {
4270 HOST_WIDE_INT count = 0, zero_count = 0;
4271 need_to_clear = 0;
4272 /* This loop is a more accurate version of the loop in
4273 mostly_zeros_p (it handles RANGE_EXPR in an index).
4274 It is also needed to check for missing elements. */
4275 for (elt = CONSTRUCTOR_ELTS (exp);
4276 elt != NULL_TREE;
4277 elt = TREE_CHAIN (elt))
4278 {
4279 tree index = TREE_PURPOSE (elt);
4280 HOST_WIDE_INT this_node_count;
4281 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4282 {
4283 tree lo_index = TREE_OPERAND (index, 0);
4284 tree hi_index = TREE_OPERAND (index, 1);
4285 if (TREE_CODE (lo_index) != INTEGER_CST
4286 || TREE_CODE (hi_index) != INTEGER_CST)
4287 {
4288 need_to_clear = 1;
4289 break;
4290 }
4291 this_node_count = TREE_INT_CST_LOW (hi_index)
4292 - TREE_INT_CST_LOW (lo_index) + 1;
4293 }
4294 else
4295 this_node_count = 1;
4296 count += this_node_count;
4297 if (mostly_zeros_p (TREE_VALUE (elt)))
4298 zero_count += this_node_count;
4299 }
4300 /* Clear the entire array first if there are any missing elements,
4301 or if the incidence of zero elements is >= 75%. */
4302 if (count < maxelt - minelt + 1
4303 || 4 * zero_count >= 3 * count)
4304 need_to_clear = 1;
4305 }
4306 if (need_to_clear)
4307 {
4308 if (! cleared)
4309 clear_storage (target, expr_size (exp),
4310 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4311 cleared = 1;
4312 }
4313 else
4314 /* Inform later passes that the old value is dead. */
4315 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4316
4317 /* Store each element of the constructor into
4318 the corresponding element of TARGET, determined
4319 by counting the elements. */
4320 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4321 elt;
4322 elt = TREE_CHAIN (elt), i++)
4323 {
4324 register enum machine_mode mode;
4325 int bitsize;
4326 int bitpos;
4327 int unsignedp;
4328 tree value = TREE_VALUE (elt);
4329 int align = TYPE_ALIGN (TREE_TYPE (value));
4330 tree index = TREE_PURPOSE (elt);
4331 rtx xtarget = target;
4332
4333 if (cleared && is_zeros_p (value))
4334 continue;
4335
4336 unsignedp = TREE_UNSIGNED (elttype);
4337 mode = TYPE_MODE (elttype);
4338 if (mode == BLKmode)
4339 {
4340 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4341 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4342 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4343 else
4344 bitsize = -1;
4345 }
4346 else
4347 bitsize = GET_MODE_BITSIZE (mode);
4348
4349 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4350 {
4351 tree lo_index = TREE_OPERAND (index, 0);
4352 tree hi_index = TREE_OPERAND (index, 1);
4353 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4354 struct nesting *loop;
4355 HOST_WIDE_INT lo, hi, count;
4356 tree position;
4357
4358 /* If the range is constant and "small", unroll the loop. */
4359 if (TREE_CODE (lo_index) == INTEGER_CST
4360 && TREE_CODE (hi_index) == INTEGER_CST
4361 && (lo = TREE_INT_CST_LOW (lo_index),
4362 hi = TREE_INT_CST_LOW (hi_index),
4363 count = hi - lo + 1,
4364 (GET_CODE (target) != MEM
4365 || count <= 2
4366 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4367 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4368 <= 40 * 8))))
4369 {
4370 lo -= minelt; hi -= minelt;
4371 for (; lo <= hi; lo++)
4372 {
4373 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4374 store_constructor_field (target, bitsize, bitpos, mode,
4375 value, type, align, cleared);
4376 }
4377 }
4378 else
4379 {
4380 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4381 loop_top = gen_label_rtx ();
4382 loop_end = gen_label_rtx ();
4383
4384 unsignedp = TREE_UNSIGNED (domain);
4385
4386 index = build_decl (VAR_DECL, NULL_TREE, domain);
4387
4388 DECL_RTL (index) = index_r
4389 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4390 &unsignedp, 0));
4391
4392 if (TREE_CODE (value) == SAVE_EXPR
4393 && SAVE_EXPR_RTL (value) == 0)
4394 {
4395 /* Make sure value gets expanded once before the
4396 loop. */
4397 expand_expr (value, const0_rtx, VOIDmode, 0);
4398 emit_queue ();
4399 }
4400 store_expr (lo_index, index_r, 0);
4401 loop = expand_start_loop (0);
4402
4403 /* Assign value to element index. */
4404 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4405 size_int (BITS_PER_UNIT));
4406 position = size_binop (MULT_EXPR,
4407 size_binop (MINUS_EXPR, index,
4408 TYPE_MIN_VALUE (domain)),
4409 position);
4410 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4411 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4412 xtarget = change_address (target, mode, addr);
4413 if (TREE_CODE (value) == CONSTRUCTOR)
4414 store_constructor (value, xtarget, align, cleared);
4415 else
4416 store_expr (value, xtarget, 0);
4417
4418 expand_exit_loop_if_false (loop,
4419 build (LT_EXPR, integer_type_node,
4420 index, hi_index));
4421
4422 expand_increment (build (PREINCREMENT_EXPR,
4423 TREE_TYPE (index),
4424 index, integer_one_node), 0, 0);
4425 expand_end_loop ();
4426 emit_label (loop_end);
4427
4428 /* Needed by stupid register allocation. to extend the
4429 lifetime of pseudo-regs used by target past the end
4430 of the loop. */
4431 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4432 }
4433 }
4434 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4435 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4436 {
4437 rtx pos_rtx, addr;
4438 tree position;
4439
4440 if (index == 0)
4441 index = size_int (i);
4442
4443 if (minelt)
4444 index = size_binop (MINUS_EXPR, index,
4445 TYPE_MIN_VALUE (domain));
4446 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4447 size_int (BITS_PER_UNIT));
4448 position = size_binop (MULT_EXPR, index, position);
4449 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4450 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4451 xtarget = change_address (target, mode, addr);
4452 store_expr (value, xtarget, 0);
4453 }
4454 else
4455 {
4456 if (index != 0)
4457 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4458 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4459 else
4460 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4461 store_constructor_field (target, bitsize, bitpos, mode, value,
4462 type, align, cleared);
4463 }
4464 }
4465 }
4466 /* set constructor assignments */
4467 else if (TREE_CODE (type) == SET_TYPE)
4468 {
4469 tree elt = CONSTRUCTOR_ELTS (exp);
4470 int nbytes = int_size_in_bytes (type), nbits;
4471 tree domain = TYPE_DOMAIN (type);
4472 tree domain_min, domain_max, bitlength;
4473
4474 /* The default implementation strategy is to extract the constant
4475 parts of the constructor, use that to initialize the target,
4476 and then "or" in whatever non-constant ranges we need in addition.
4477
4478 If a large set is all zero or all ones, it is
4479 probably better to set it using memset (if available) or bzero.
4480 Also, if a large set has just a single range, it may also be
4481 better to first clear all the first clear the set (using
4482 bzero/memset), and set the bits we want. */
4483
4484 /* Check for all zeros. */
4485 if (elt == NULL_TREE)
4486 {
4487 if (!cleared)
4488 clear_storage (target, expr_size (exp),
4489 TYPE_ALIGN (type) / BITS_PER_UNIT);
4490 return;
4491 }
4492
4493 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4494 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4495 bitlength = size_binop (PLUS_EXPR,
4496 size_binop (MINUS_EXPR, domain_max, domain_min),
4497 size_one_node);
4498
4499 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4500 abort ();
4501 nbits = TREE_INT_CST_LOW (bitlength);
4502
4503 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4504 are "complicated" (more than one range), initialize (the
4505 constant parts) by copying from a constant. */
4506 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4507 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4508 {
4509 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4510 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4511 char *bit_buffer = (char *) alloca (nbits);
4512 HOST_WIDE_INT word = 0;
4513 int bit_pos = 0;
4514 int ibit = 0;
4515 int offset = 0; /* In bytes from beginning of set. */
4516 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4517 for (;;)
4518 {
4519 if (bit_buffer[ibit])
4520 {
4521 if (BYTES_BIG_ENDIAN)
4522 word |= (1 << (set_word_size - 1 - bit_pos));
4523 else
4524 word |= 1 << bit_pos;
4525 }
4526 bit_pos++; ibit++;
4527 if (bit_pos >= set_word_size || ibit == nbits)
4528 {
4529 if (word != 0 || ! cleared)
4530 {
4531 rtx datum = GEN_INT (word);
4532 rtx to_rtx;
4533 /* The assumption here is that it is safe to use
4534 XEXP if the set is multi-word, but not if
4535 it's single-word. */
4536 if (GET_CODE (target) == MEM)
4537 {
4538 to_rtx = plus_constant (XEXP (target, 0), offset);
4539 to_rtx = change_address (target, mode, to_rtx);
4540 }
4541 else if (offset == 0)
4542 to_rtx = target;
4543 else
4544 abort ();
4545 emit_move_insn (to_rtx, datum);
4546 }
4547 if (ibit == nbits)
4548 break;
4549 word = 0;
4550 bit_pos = 0;
4551 offset += set_word_size / BITS_PER_UNIT;
4552 }
4553 }
4554 }
4555 else if (!cleared)
4556 {
4557 /* Don't bother clearing storage if the set is all ones. */
4558 if (TREE_CHAIN (elt) != NULL_TREE
4559 || (TREE_PURPOSE (elt) == NULL_TREE
4560 ? nbits != 1
4561 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4562 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4563 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4564 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4565 != nbits))))
4566 clear_storage (target, expr_size (exp),
4567 TYPE_ALIGN (type) / BITS_PER_UNIT);
4568 }
4569
4570 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4571 {
4572 /* start of range of element or NULL */
4573 tree startbit = TREE_PURPOSE (elt);
4574 /* end of range of element, or element value */
4575 tree endbit = TREE_VALUE (elt);
4576 #ifdef TARGET_MEM_FUNCTIONS
4577 HOST_WIDE_INT startb, endb;
4578 #endif
4579 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4580
4581 bitlength_rtx = expand_expr (bitlength,
4582 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4583
4584 /* handle non-range tuple element like [ expr ] */
4585 if (startbit == NULL_TREE)
4586 {
4587 startbit = save_expr (endbit);
4588 endbit = startbit;
4589 }
4590 startbit = convert (sizetype, startbit);
4591 endbit = convert (sizetype, endbit);
4592 if (! integer_zerop (domain_min))
4593 {
4594 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4595 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4596 }
4597 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4598 EXPAND_CONST_ADDRESS);
4599 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4600 EXPAND_CONST_ADDRESS);
4601
4602 if (REG_P (target))
4603 {
4604 targetx = assign_stack_temp (GET_MODE (target),
4605 GET_MODE_SIZE (GET_MODE (target)),
4606 0);
4607 emit_move_insn (targetx, target);
4608 }
4609 else if (GET_CODE (target) == MEM)
4610 targetx = target;
4611 else
4612 abort ();
4613
4614 #ifdef TARGET_MEM_FUNCTIONS
4615 /* Optimization: If startbit and endbit are
4616 constants divisible by BITS_PER_UNIT,
4617 call memset instead. */
4618 if (TREE_CODE (startbit) == INTEGER_CST
4619 && TREE_CODE (endbit) == INTEGER_CST
4620 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4621 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4622 {
4623 emit_library_call (memset_libfunc, 0,
4624 VOIDmode, 3,
4625 plus_constant (XEXP (targetx, 0),
4626 startb / BITS_PER_UNIT),
4627 Pmode,
4628 constm1_rtx, TYPE_MODE (integer_type_node),
4629 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4630 TYPE_MODE (sizetype));
4631 }
4632 else
4633 #endif
4634 {
4635 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4636 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4637 bitlength_rtx, TYPE_MODE (sizetype),
4638 startbit_rtx, TYPE_MODE (sizetype),
4639 endbit_rtx, TYPE_MODE (sizetype));
4640 }
4641 if (REG_P (target))
4642 emit_move_insn (target, targetx);
4643 }
4644 }
4645
4646 else
4647 abort ();
4648 }
4649
4650 /* Store the value of EXP (an expression tree)
4651 into a subfield of TARGET which has mode MODE and occupies
4652 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4653 If MODE is VOIDmode, it means that we are storing into a bit-field.
4654
4655 If VALUE_MODE is VOIDmode, return nothing in particular.
4656 UNSIGNEDP is not used in this case.
4657
4658 Otherwise, return an rtx for the value stored. This rtx
4659 has mode VALUE_MODE if that is convenient to do.
4660 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4661
4662 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4663 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4664
4665 ALIAS_SET is the alias set for the destination. This value will
4666 (in general) be different from that for TARGET, since TARGET is a
4667 reference to the containing structure. */
4668
4669 static rtx
4670 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4671 unsignedp, align, total_size, alias_set)
4672 rtx target;
4673 int bitsize, bitpos;
4674 enum machine_mode mode;
4675 tree exp;
4676 enum machine_mode value_mode;
4677 int unsignedp;
4678 int align;
4679 int total_size;
4680 int alias_set;
4681 {
4682 HOST_WIDE_INT width_mask = 0;
4683
4684 if (TREE_CODE (exp) == ERROR_MARK)
4685 return const0_rtx;
4686
4687 if (bitsize < HOST_BITS_PER_WIDE_INT)
4688 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4689
4690 /* If we are storing into an unaligned field of an aligned union that is
4691 in a register, we may have the mode of TARGET being an integer mode but
4692 MODE == BLKmode. In that case, get an aligned object whose size and
4693 alignment are the same as TARGET and store TARGET into it (we can avoid
4694 the store if the field being stored is the entire width of TARGET). Then
4695 call ourselves recursively to store the field into a BLKmode version of
4696 that object. Finally, load from the object into TARGET. This is not
4697 very efficient in general, but should only be slightly more expensive
4698 than the otherwise-required unaligned accesses. Perhaps this can be
4699 cleaned up later. */
4700
4701 if (mode == BLKmode
4702 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4703 {
4704 rtx object = assign_stack_temp (GET_MODE (target),
4705 GET_MODE_SIZE (GET_MODE (target)), 0);
4706 rtx blk_object = copy_rtx (object);
4707
4708 MEM_SET_IN_STRUCT_P (object, 1);
4709 MEM_SET_IN_STRUCT_P (blk_object, 1);
4710 PUT_MODE (blk_object, BLKmode);
4711
4712 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4713 emit_move_insn (object, target);
4714
4715 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4716 align, total_size, alias_set);
4717
4718 /* Even though we aren't returning target, we need to
4719 give it the updated value. */
4720 emit_move_insn (target, object);
4721
4722 return blk_object;
4723 }
4724
4725 /* If the structure is in a register or if the component
4726 is a bit field, we cannot use addressing to access it.
4727 Use bit-field techniques or SUBREG to store in it. */
4728
4729 if (mode == VOIDmode
4730 || (mode != BLKmode && ! direct_store[(int) mode]
4731 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4732 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4733 || GET_CODE (target) == REG
4734 || GET_CODE (target) == SUBREG
4735 /* If the field isn't aligned enough to store as an ordinary memref,
4736 store it as a bit field. */
4737 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
4738 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4739 || bitpos % GET_MODE_ALIGNMENT (mode)))
4740 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
4741 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4742 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4743 /* If the RHS and field are a constant size and the size of the
4744 RHS isn't the same size as the bitfield, we must use bitfield
4745 operations. */
4746 || ((bitsize >= 0
4747 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4748 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4749 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4750 {
4751 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4752
4753 /* If BITSIZE is narrower than the size of the type of EXP
4754 we will be narrowing TEMP. Normally, what's wanted are the
4755 low-order bits. However, if EXP's type is a record and this is
4756 big-endian machine, we want the upper BITSIZE bits. */
4757 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4758 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4759 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4760 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4761 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4762 - bitsize),
4763 temp, 1);
4764
4765 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4766 MODE. */
4767 if (mode != VOIDmode && mode != BLKmode
4768 && mode != TYPE_MODE (TREE_TYPE (exp)))
4769 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4770
4771 /* If the modes of TARGET and TEMP are both BLKmode, both
4772 must be in memory and BITPOS must be aligned on a byte
4773 boundary. If so, we simply do a block copy. */
4774 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4775 {
4776 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4777 || bitpos % BITS_PER_UNIT != 0)
4778 abort ();
4779
4780 target = change_address (target, VOIDmode,
4781 plus_constant (XEXP (target, 0),
4782 bitpos / BITS_PER_UNIT));
4783
4784 /* Find an alignment that is consistent with the bit position. */
4785 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4786 align >>= 1;
4787
4788 emit_block_move (target, temp,
4789 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4790 / BITS_PER_UNIT),
4791 align);
4792
4793 return value_mode == VOIDmode ? const0_rtx : target;
4794 }
4795
4796 /* Store the value in the bitfield. */
4797 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4798 if (value_mode != VOIDmode)
4799 {
4800 /* The caller wants an rtx for the value. */
4801 /* If possible, avoid refetching from the bitfield itself. */
4802 if (width_mask != 0
4803 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4804 {
4805 tree count;
4806 enum machine_mode tmode;
4807
4808 if (unsignedp)
4809 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4810 tmode = GET_MODE (temp);
4811 if (tmode == VOIDmode)
4812 tmode = value_mode;
4813 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4814 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4815 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4816 }
4817 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4818 NULL_RTX, value_mode, 0, align,
4819 total_size);
4820 }
4821 return const0_rtx;
4822 }
4823 else
4824 {
4825 rtx addr = XEXP (target, 0);
4826 rtx to_rtx;
4827
4828 /* If a value is wanted, it must be the lhs;
4829 so make the address stable for multiple use. */
4830
4831 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4832 && ! CONSTANT_ADDRESS_P (addr)
4833 /* A frame-pointer reference is already stable. */
4834 && ! (GET_CODE (addr) == PLUS
4835 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4836 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4837 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4838 addr = copy_to_reg (addr);
4839
4840 /* Now build a reference to just the desired component. */
4841
4842 to_rtx = copy_rtx (change_address (target, mode,
4843 plus_constant (addr,
4844 (bitpos
4845 / BITS_PER_UNIT))));
4846 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4847 MEM_ALIAS_SET (to_rtx) = alias_set;
4848
4849 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4850 }
4851 }
4852 \f
4853 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4854 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4855 ARRAY_REFs and find the ultimate containing object, which we return.
4856
4857 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4858 bit position, and *PUNSIGNEDP to the signedness of the field.
4859 If the position of the field is variable, we store a tree
4860 giving the variable offset (in units) in *POFFSET.
4861 This offset is in addition to the bit position.
4862 If the position is not variable, we store 0 in *POFFSET.
4863 We set *PALIGNMENT to the alignment in bytes of the address that will be
4864 computed. This is the alignment of the thing we return if *POFFSET
4865 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4866
4867 If any of the extraction expressions is volatile,
4868 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4869
4870 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4871 is a mode that can be used to access the field. In that case, *PBITSIZE
4872 is redundant.
4873
4874 If the field describes a variable-sized object, *PMODE is set to
4875 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4876 this case, but the address of the object can be found. */
4877
4878 tree
4879 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4880 punsignedp, pvolatilep, palignment)
4881 tree exp;
4882 int *pbitsize;
4883 int *pbitpos;
4884 tree *poffset;
4885 enum machine_mode *pmode;
4886 int *punsignedp;
4887 int *pvolatilep;
4888 int *palignment;
4889 {
4890 tree orig_exp = exp;
4891 tree size_tree = 0;
4892 enum machine_mode mode = VOIDmode;
4893 tree offset = integer_zero_node;
4894 unsigned int alignment = BIGGEST_ALIGNMENT;
4895
4896 if (TREE_CODE (exp) == COMPONENT_REF)
4897 {
4898 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4899 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4900 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4901 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4902 }
4903 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4904 {
4905 size_tree = TREE_OPERAND (exp, 1);
4906 *punsignedp = TREE_UNSIGNED (exp);
4907 }
4908 else
4909 {
4910 mode = TYPE_MODE (TREE_TYPE (exp));
4911 if (mode == BLKmode)
4912 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4913
4914 *pbitsize = GET_MODE_BITSIZE (mode);
4915 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4916 }
4917
4918 if (size_tree)
4919 {
4920 if (TREE_CODE (size_tree) != INTEGER_CST)
4921 mode = BLKmode, *pbitsize = -1;
4922 else
4923 *pbitsize = TREE_INT_CST_LOW (size_tree);
4924 }
4925
4926 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4927 and find the ultimate containing object. */
4928
4929 *pbitpos = 0;
4930
4931 while (1)
4932 {
4933 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4934 {
4935 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4936 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4937 : TREE_OPERAND (exp, 2));
4938 tree constant = integer_zero_node, var = pos;
4939
4940 /* If this field hasn't been filled in yet, don't go
4941 past it. This should only happen when folding expressions
4942 made during type construction. */
4943 if (pos == 0)
4944 break;
4945
4946 /* Assume here that the offset is a multiple of a unit.
4947 If not, there should be an explicitly added constant. */
4948 if (TREE_CODE (pos) == PLUS_EXPR
4949 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4950 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4951 else if (TREE_CODE (pos) == INTEGER_CST)
4952 constant = pos, var = integer_zero_node;
4953
4954 *pbitpos += TREE_INT_CST_LOW (constant);
4955 offset = size_binop (PLUS_EXPR, offset,
4956 size_binop (EXACT_DIV_EXPR, var,
4957 size_int (BITS_PER_UNIT)));
4958 }
4959
4960 else if (TREE_CODE (exp) == ARRAY_REF)
4961 {
4962 /* This code is based on the code in case ARRAY_REF in expand_expr
4963 below. We assume here that the size of an array element is
4964 always an integral multiple of BITS_PER_UNIT. */
4965
4966 tree index = TREE_OPERAND (exp, 1);
4967 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4968 tree low_bound
4969 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4970 tree index_type = TREE_TYPE (index);
4971 tree xindex;
4972
4973 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4974 {
4975 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4976 index);
4977 index_type = TREE_TYPE (index);
4978 }
4979
4980 /* Optimize the special-case of a zero lower bound.
4981
4982 We convert the low_bound to sizetype to avoid some problems
4983 with constant folding. (E.g. suppose the lower bound is 1,
4984 and its mode is QI. Without the conversion, (ARRAY
4985 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4986 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4987
4988 But sizetype isn't quite right either (especially if
4989 the lowbound is negative). FIXME */
4990
4991 if (! integer_zerop (low_bound))
4992 index = fold (build (MINUS_EXPR, index_type, index,
4993 convert (sizetype, low_bound)));
4994
4995 if (TREE_CODE (index) == INTEGER_CST)
4996 {
4997 index = convert (sbitsizetype, index);
4998 index_type = TREE_TYPE (index);
4999 }
5000
5001 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5002 convert (sbitsizetype,
5003 TYPE_SIZE (TREE_TYPE (exp)))));
5004
5005 if (TREE_CODE (xindex) == INTEGER_CST
5006 && TREE_INT_CST_HIGH (xindex) == 0)
5007 *pbitpos += TREE_INT_CST_LOW (xindex);
5008 else
5009 {
5010 /* Either the bit offset calculated above is not constant, or
5011 it overflowed. In either case, redo the multiplication
5012 against the size in units. This is especially important
5013 in the non-constant case to avoid a division at runtime. */
5014 xindex = fold (build (MULT_EXPR, ssizetype, index,
5015 convert (ssizetype,
5016 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5017
5018 if (contains_placeholder_p (xindex))
5019 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5020
5021 offset = size_binop (PLUS_EXPR, offset, xindex);
5022 }
5023 }
5024 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5025 && ! ((TREE_CODE (exp) == NOP_EXPR
5026 || TREE_CODE (exp) == CONVERT_EXPR)
5027 && (TYPE_MODE (TREE_TYPE (exp))
5028 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5029 break;
5030
5031 /* If any reference in the chain is volatile, the effect is volatile. */
5032 if (TREE_THIS_VOLATILE (exp))
5033 *pvolatilep = 1;
5034
5035 /* If the offset is non-constant already, then we can't assume any
5036 alignment more than the alignment here. */
5037 if (! integer_zerop (offset))
5038 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5039
5040 exp = TREE_OPERAND (exp, 0);
5041 }
5042
5043 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5044 alignment = MIN (alignment, DECL_ALIGN (exp));
5045 else if (TREE_TYPE (exp) != 0)
5046 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5047
5048 if (integer_zerop (offset))
5049 offset = 0;
5050
5051 if (offset != 0 && contains_placeholder_p (offset))
5052 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5053
5054 *pmode = mode;
5055 *poffset = offset;
5056 *palignment = alignment / BITS_PER_UNIT;
5057 return exp;
5058 }
5059
5060 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5061 static enum memory_use_mode
5062 get_memory_usage_from_modifier (modifier)
5063 enum expand_modifier modifier;
5064 {
5065 switch (modifier)
5066 {
5067 case EXPAND_NORMAL:
5068 case EXPAND_SUM:
5069 return MEMORY_USE_RO;
5070 break;
5071 case EXPAND_MEMORY_USE_WO:
5072 return MEMORY_USE_WO;
5073 break;
5074 case EXPAND_MEMORY_USE_RW:
5075 return MEMORY_USE_RW;
5076 break;
5077 case EXPAND_MEMORY_USE_DONT:
5078 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5079 MEMORY_USE_DONT, because they are modifiers to a call of
5080 expand_expr in the ADDR_EXPR case of expand_expr. */
5081 case EXPAND_CONST_ADDRESS:
5082 case EXPAND_INITIALIZER:
5083 return MEMORY_USE_DONT;
5084 case EXPAND_MEMORY_USE_BAD:
5085 default:
5086 abort ();
5087 }
5088 }
5089 \f
5090 /* Given an rtx VALUE that may contain additions and multiplications,
5091 return an equivalent value that just refers to a register or memory.
5092 This is done by generating instructions to perform the arithmetic
5093 and returning a pseudo-register containing the value.
5094
5095 The returned value may be a REG, SUBREG, MEM or constant. */
5096
5097 rtx
5098 force_operand (value, target)
5099 rtx value, target;
5100 {
5101 register optab binoptab = 0;
5102 /* Use a temporary to force order of execution of calls to
5103 `force_operand'. */
5104 rtx tmp;
5105 register rtx op2;
5106 /* Use subtarget as the target for operand 0 of a binary operation. */
5107 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5108
5109 /* Check for a PIC address load. */
5110 if (flag_pic
5111 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5112 && XEXP (value, 0) == pic_offset_table_rtx
5113 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5114 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5115 || GET_CODE (XEXP (value, 1)) == CONST))
5116 {
5117 if (!subtarget)
5118 subtarget = gen_reg_rtx (GET_MODE (value));
5119 emit_move_insn (subtarget, value);
5120 return subtarget;
5121 }
5122
5123 if (GET_CODE (value) == PLUS)
5124 binoptab = add_optab;
5125 else if (GET_CODE (value) == MINUS)
5126 binoptab = sub_optab;
5127 else if (GET_CODE (value) == MULT)
5128 {
5129 op2 = XEXP (value, 1);
5130 if (!CONSTANT_P (op2)
5131 && !(GET_CODE (op2) == REG && op2 != subtarget))
5132 subtarget = 0;
5133 tmp = force_operand (XEXP (value, 0), subtarget);
5134 return expand_mult (GET_MODE (value), tmp,
5135 force_operand (op2, NULL_RTX),
5136 target, 0);
5137 }
5138
5139 if (binoptab)
5140 {
5141 op2 = XEXP (value, 1);
5142 if (!CONSTANT_P (op2)
5143 && !(GET_CODE (op2) == REG && op2 != subtarget))
5144 subtarget = 0;
5145 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5146 {
5147 binoptab = add_optab;
5148 op2 = negate_rtx (GET_MODE (value), op2);
5149 }
5150
5151 /* Check for an addition with OP2 a constant integer and our first
5152 operand a PLUS of a virtual register and something else. In that
5153 case, we want to emit the sum of the virtual register and the
5154 constant first and then add the other value. This allows virtual
5155 register instantiation to simply modify the constant rather than
5156 creating another one around this addition. */
5157 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5158 && GET_CODE (XEXP (value, 0)) == PLUS
5159 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5160 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5161 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5162 {
5163 rtx temp = expand_binop (GET_MODE (value), binoptab,
5164 XEXP (XEXP (value, 0), 0), op2,
5165 subtarget, 0, OPTAB_LIB_WIDEN);
5166 return expand_binop (GET_MODE (value), binoptab, temp,
5167 force_operand (XEXP (XEXP (value, 0), 1), 0),
5168 target, 0, OPTAB_LIB_WIDEN);
5169 }
5170
5171 tmp = force_operand (XEXP (value, 0), subtarget);
5172 return expand_binop (GET_MODE (value), binoptab, tmp,
5173 force_operand (op2, NULL_RTX),
5174 target, 0, OPTAB_LIB_WIDEN);
5175 /* We give UNSIGNEDP = 0 to expand_binop
5176 because the only operations we are expanding here are signed ones. */
5177 }
5178 return value;
5179 }
5180 \f
5181 /* Subroutine of expand_expr:
5182 save the non-copied parts (LIST) of an expr (LHS), and return a list
5183 which can restore these values to their previous values,
5184 should something modify their storage. */
5185
5186 static tree
5187 save_noncopied_parts (lhs, list)
5188 tree lhs;
5189 tree list;
5190 {
5191 tree tail;
5192 tree parts = 0;
5193
5194 for (tail = list; tail; tail = TREE_CHAIN (tail))
5195 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5196 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5197 else
5198 {
5199 tree part = TREE_VALUE (tail);
5200 tree part_type = TREE_TYPE (part);
5201 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5202 rtx target = assign_temp (part_type, 0, 1, 1);
5203 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5204 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5205 parts = tree_cons (to_be_saved,
5206 build (RTL_EXPR, part_type, NULL_TREE,
5207 (tree) target),
5208 parts);
5209 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5210 }
5211 return parts;
5212 }
5213
5214 /* Subroutine of expand_expr:
5215 record the non-copied parts (LIST) of an expr (LHS), and return a list
5216 which specifies the initial values of these parts. */
5217
5218 static tree
5219 init_noncopied_parts (lhs, list)
5220 tree lhs;
5221 tree list;
5222 {
5223 tree tail;
5224 tree parts = 0;
5225
5226 for (tail = list; tail; tail = TREE_CHAIN (tail))
5227 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5228 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5229 else if (TREE_PURPOSE (tail))
5230 {
5231 tree part = TREE_VALUE (tail);
5232 tree part_type = TREE_TYPE (part);
5233 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5234 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5235 }
5236 return parts;
5237 }
5238
5239 /* Subroutine of expand_expr: return nonzero iff there is no way that
5240 EXP can reference X, which is being modified. TOP_P is nonzero if this
5241 call is going to be used to determine whether we need a temporary
5242 for EXP, as opposed to a recursive call to this function.
5243
5244 It is always safe for this routine to return zero since it merely
5245 searches for optimization opportunities. */
5246
5247 static int
5248 safe_from_p (x, exp, top_p)
5249 rtx x;
5250 tree exp;
5251 int top_p;
5252 {
5253 rtx exp_rtl = 0;
5254 int i, nops;
5255 static int save_expr_count;
5256 static int save_expr_size = 0;
5257 static tree *save_expr_rewritten;
5258 static tree save_expr_trees[256];
5259
5260 if (x == 0
5261 /* If EXP has varying size, we MUST use a target since we currently
5262 have no way of allocating temporaries of variable size
5263 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5264 So we assume here that something at a higher level has prevented a
5265 clash. This is somewhat bogus, but the best we can do. Only
5266 do this when X is BLKmode and when we are at the top level. */
5267 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5268 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5269 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5270 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5271 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5272 != INTEGER_CST)
5273 && GET_MODE (x) == BLKmode))
5274 return 1;
5275
5276 if (top_p && save_expr_size == 0)
5277 {
5278 int rtn;
5279
5280 save_expr_count = 0;
5281 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5282 save_expr_rewritten = &save_expr_trees[0];
5283
5284 rtn = safe_from_p (x, exp, 1);
5285
5286 for (i = 0; i < save_expr_count; ++i)
5287 {
5288 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5289 abort ();
5290 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5291 }
5292
5293 save_expr_size = 0;
5294
5295 return rtn;
5296 }
5297
5298 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5299 find the underlying pseudo. */
5300 if (GET_CODE (x) == SUBREG)
5301 {
5302 x = SUBREG_REG (x);
5303 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5304 return 0;
5305 }
5306
5307 /* If X is a location in the outgoing argument area, it is always safe. */
5308 if (GET_CODE (x) == MEM
5309 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5310 || (GET_CODE (XEXP (x, 0)) == PLUS
5311 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5312 return 1;
5313
5314 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5315 {
5316 case 'd':
5317 exp_rtl = DECL_RTL (exp);
5318 break;
5319
5320 case 'c':
5321 return 1;
5322
5323 case 'x':
5324 if (TREE_CODE (exp) == TREE_LIST)
5325 return ((TREE_VALUE (exp) == 0
5326 || safe_from_p (x, TREE_VALUE (exp), 0))
5327 && (TREE_CHAIN (exp) == 0
5328 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5329 else if (TREE_CODE (exp) == ERROR_MARK)
5330 return 1; /* An already-visited SAVE_EXPR? */
5331 else
5332 return 0;
5333
5334 case '1':
5335 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5336
5337 case '2':
5338 case '<':
5339 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5340 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5341
5342 case 'e':
5343 case 'r':
5344 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5345 the expression. If it is set, we conflict iff we are that rtx or
5346 both are in memory. Otherwise, we check all operands of the
5347 expression recursively. */
5348
5349 switch (TREE_CODE (exp))
5350 {
5351 case ADDR_EXPR:
5352 return (staticp (TREE_OPERAND (exp, 0))
5353 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5354 || TREE_STATIC (exp));
5355
5356 case INDIRECT_REF:
5357 if (GET_CODE (x) == MEM)
5358 return 0;
5359 break;
5360
5361 case CALL_EXPR:
5362 exp_rtl = CALL_EXPR_RTL (exp);
5363 if (exp_rtl == 0)
5364 {
5365 /* Assume that the call will clobber all hard registers and
5366 all of memory. */
5367 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5368 || GET_CODE (x) == MEM)
5369 return 0;
5370 }
5371
5372 break;
5373
5374 case RTL_EXPR:
5375 /* If a sequence exists, we would have to scan every instruction
5376 in the sequence to see if it was safe. This is probably not
5377 worthwhile. */
5378 if (RTL_EXPR_SEQUENCE (exp))
5379 return 0;
5380
5381 exp_rtl = RTL_EXPR_RTL (exp);
5382 break;
5383
5384 case WITH_CLEANUP_EXPR:
5385 exp_rtl = RTL_EXPR_RTL (exp);
5386 break;
5387
5388 case CLEANUP_POINT_EXPR:
5389 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5390
5391 case SAVE_EXPR:
5392 exp_rtl = SAVE_EXPR_RTL (exp);
5393 if (exp_rtl)
5394 break;
5395
5396 /* This SAVE_EXPR might appear many times in the top-level
5397 safe_from_p() expression, and if it has a complex
5398 subexpression, examining it multiple times could result
5399 in a combinatorial explosion. E.g. on an Alpha
5400 running at least 200MHz, a Fortran test case compiled with
5401 optimization took about 28 minutes to compile -- even though
5402 it was only a few lines long, and the complicated line causing
5403 so much time to be spent in the earlier version of safe_from_p()
5404 had only 293 or so unique nodes.
5405
5406 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5407 where it is so we can turn it back in the top-level safe_from_p()
5408 when we're done. */
5409
5410 /* For now, don't bother re-sizing the array. */
5411 if (save_expr_count >= save_expr_size)
5412 return 0;
5413 save_expr_rewritten[save_expr_count++] = exp;
5414
5415 nops = tree_code_length[(int) SAVE_EXPR];
5416 for (i = 0; i < nops; i++)
5417 {
5418 tree operand = TREE_OPERAND (exp, i);
5419 if (operand == NULL_TREE)
5420 continue;
5421 TREE_SET_CODE (exp, ERROR_MARK);
5422 if (!safe_from_p (x, operand, 0))
5423 return 0;
5424 TREE_SET_CODE (exp, SAVE_EXPR);
5425 }
5426 TREE_SET_CODE (exp, ERROR_MARK);
5427 return 1;
5428
5429 case BIND_EXPR:
5430 /* The only operand we look at is operand 1. The rest aren't
5431 part of the expression. */
5432 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5433
5434 case METHOD_CALL_EXPR:
5435 /* This takes a rtx argument, but shouldn't appear here. */
5436 abort ();
5437
5438 default:
5439 break;
5440 }
5441
5442 /* If we have an rtx, we do not need to scan our operands. */
5443 if (exp_rtl)
5444 break;
5445
5446 nops = tree_code_length[(int) TREE_CODE (exp)];
5447 for (i = 0; i < nops; i++)
5448 if (TREE_OPERAND (exp, i) != 0
5449 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5450 return 0;
5451 }
5452
5453 /* If we have an rtl, find any enclosed object. Then see if we conflict
5454 with it. */
5455 if (exp_rtl)
5456 {
5457 if (GET_CODE (exp_rtl) == SUBREG)
5458 {
5459 exp_rtl = SUBREG_REG (exp_rtl);
5460 if (GET_CODE (exp_rtl) == REG
5461 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5462 return 0;
5463 }
5464
5465 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5466 are memory and EXP is not readonly. */
5467 return ! (rtx_equal_p (x, exp_rtl)
5468 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5469 && ! TREE_READONLY (exp)));
5470 }
5471
5472 /* If we reach here, it is safe. */
5473 return 1;
5474 }
5475
5476 /* Subroutine of expand_expr: return nonzero iff EXP is an
5477 expression whose type is statically determinable. */
5478
5479 static int
5480 fixed_type_p (exp)
5481 tree exp;
5482 {
5483 if (TREE_CODE (exp) == PARM_DECL
5484 || TREE_CODE (exp) == VAR_DECL
5485 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5486 || TREE_CODE (exp) == COMPONENT_REF
5487 || TREE_CODE (exp) == ARRAY_REF)
5488 return 1;
5489 return 0;
5490 }
5491
5492 /* Subroutine of expand_expr: return rtx if EXP is a
5493 variable or parameter; else return 0. */
5494
5495 static rtx
5496 var_rtx (exp)
5497 tree exp;
5498 {
5499 STRIP_NOPS (exp);
5500 switch (TREE_CODE (exp))
5501 {
5502 case PARM_DECL:
5503 case VAR_DECL:
5504 return DECL_RTL (exp);
5505 default:
5506 return 0;
5507 }
5508 }
5509
5510 #ifdef MAX_INTEGER_COMPUTATION_MODE
5511 void
5512 check_max_integer_computation_mode (exp)
5513 tree exp;
5514 {
5515 enum tree_code code;
5516 enum machine_mode mode;
5517
5518 /* Strip any NOPs that don't change the mode. */
5519 STRIP_NOPS (exp);
5520 code = TREE_CODE (exp);
5521
5522 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5523 if (code == NOP_EXPR
5524 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5525 return;
5526
5527 /* First check the type of the overall operation. We need only look at
5528 unary, binary and relational operations. */
5529 if (TREE_CODE_CLASS (code) == '1'
5530 || TREE_CODE_CLASS (code) == '2'
5531 || TREE_CODE_CLASS (code) == '<')
5532 {
5533 mode = TYPE_MODE (TREE_TYPE (exp));
5534 if (GET_MODE_CLASS (mode) == MODE_INT
5535 && mode > MAX_INTEGER_COMPUTATION_MODE)
5536 fatal ("unsupported wide integer operation");
5537 }
5538
5539 /* Check operand of a unary op. */
5540 if (TREE_CODE_CLASS (code) == '1')
5541 {
5542 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5543 if (GET_MODE_CLASS (mode) == MODE_INT
5544 && mode > MAX_INTEGER_COMPUTATION_MODE)
5545 fatal ("unsupported wide integer operation");
5546 }
5547
5548 /* Check operands of a binary/comparison op. */
5549 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5550 {
5551 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5552 if (GET_MODE_CLASS (mode) == MODE_INT
5553 && mode > MAX_INTEGER_COMPUTATION_MODE)
5554 fatal ("unsupported wide integer operation");
5555
5556 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5557 if (GET_MODE_CLASS (mode) == MODE_INT
5558 && mode > MAX_INTEGER_COMPUTATION_MODE)
5559 fatal ("unsupported wide integer operation");
5560 }
5561 }
5562 #endif
5563
5564 \f
5565 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5566 has any readonly fields. If any of the fields have types that
5567 contain readonly fields, return true as well. */
5568
5569 static int
5570 readonly_fields_p (type)
5571 tree type;
5572 {
5573 tree field;
5574
5575 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5576 if (TREE_READONLY (field)
5577 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5578 && readonly_fields_p (TREE_TYPE (field))))
5579 return 1;
5580
5581 return 0;
5582 }
5583 \f
5584 /* expand_expr: generate code for computing expression EXP.
5585 An rtx for the computed value is returned. The value is never null.
5586 In the case of a void EXP, const0_rtx is returned.
5587
5588 The value may be stored in TARGET if TARGET is nonzero.
5589 TARGET is just a suggestion; callers must assume that
5590 the rtx returned may not be the same as TARGET.
5591
5592 If TARGET is CONST0_RTX, it means that the value will be ignored.
5593
5594 If TMODE is not VOIDmode, it suggests generating the
5595 result in mode TMODE. But this is done only when convenient.
5596 Otherwise, TMODE is ignored and the value generated in its natural mode.
5597 TMODE is just a suggestion; callers must assume that
5598 the rtx returned may not have mode TMODE.
5599
5600 Note that TARGET may have neither TMODE nor MODE. In that case, it
5601 probably will not be used.
5602
5603 If MODIFIER is EXPAND_SUM then when EXP is an addition
5604 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5605 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5606 products as above, or REG or MEM, or constant.
5607 Ordinarily in such cases we would output mul or add instructions
5608 and then return a pseudo reg containing the sum.
5609
5610 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5611 it also marks a label as absolutely required (it can't be dead).
5612 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5613 This is used for outputting expressions used in initializers.
5614
5615 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5616 with a constant address even if that address is not normally legitimate.
5617 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5618
5619 rtx
5620 expand_expr (exp, target, tmode, modifier)
5621 register tree exp;
5622 rtx target;
5623 enum machine_mode tmode;
5624 enum expand_modifier modifier;
5625 {
5626 register rtx op0, op1, temp;
5627 tree type = TREE_TYPE (exp);
5628 int unsignedp = TREE_UNSIGNED (type);
5629 register enum machine_mode mode;
5630 register enum tree_code code = TREE_CODE (exp);
5631 optab this_optab;
5632 rtx subtarget, original_target;
5633 int ignore;
5634 tree context;
5635 /* Used by check-memory-usage to make modifier read only. */
5636 enum expand_modifier ro_modifier;
5637
5638 /* Handle ERROR_MARK before anybody tries to access its type. */
5639 if (TREE_CODE (exp) == ERROR_MARK)
5640 {
5641 op0 = CONST0_RTX (tmode);
5642 if (op0 != 0)
5643 return op0;
5644 return const0_rtx;
5645 }
5646
5647 mode = TYPE_MODE (type);
5648 /* Use subtarget as the target for operand 0 of a binary operation. */
5649 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5650 original_target = target;
5651 ignore = (target == const0_rtx
5652 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5653 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5654 || code == COND_EXPR)
5655 && TREE_CODE (type) == VOID_TYPE));
5656
5657 /* Make a read-only version of the modifier. */
5658 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5659 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5660 ro_modifier = modifier;
5661 else
5662 ro_modifier = EXPAND_NORMAL;
5663
5664 /* Don't use hard regs as subtargets, because the combiner
5665 can only handle pseudo regs. */
5666 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5667 subtarget = 0;
5668 /* Avoid subtargets inside loops,
5669 since they hide some invariant expressions. */
5670 if (preserve_subexpressions_p ())
5671 subtarget = 0;
5672
5673 /* If we are going to ignore this result, we need only do something
5674 if there is a side-effect somewhere in the expression. If there
5675 is, short-circuit the most common cases here. Note that we must
5676 not call expand_expr with anything but const0_rtx in case this
5677 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5678
5679 if (ignore)
5680 {
5681 if (! TREE_SIDE_EFFECTS (exp))
5682 return const0_rtx;
5683
5684 /* Ensure we reference a volatile object even if value is ignored, but
5685 don't do this if all we are doing is taking its address. */
5686 if (TREE_THIS_VOLATILE (exp)
5687 && TREE_CODE (exp) != FUNCTION_DECL
5688 && mode != VOIDmode && mode != BLKmode
5689 && modifier != EXPAND_CONST_ADDRESS)
5690 {
5691 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5692 if (GET_CODE (temp) == MEM)
5693 temp = copy_to_reg (temp);
5694 return const0_rtx;
5695 }
5696
5697 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5698 || code == INDIRECT_REF || code == BUFFER_REF)
5699 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5700 VOIDmode, ro_modifier);
5701 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5702 || code == ARRAY_REF)
5703 {
5704 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5705 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5706 return const0_rtx;
5707 }
5708 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5709 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5710 /* If the second operand has no side effects, just evaluate
5711 the first. */
5712 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5713 VOIDmode, ro_modifier);
5714 else if (code == BIT_FIELD_REF)
5715 {
5716 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5717 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5718 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5719 return const0_rtx;
5720 }
5721 ;
5722 target = 0;
5723 }
5724
5725 #ifdef MAX_INTEGER_COMPUTATION_MODE
5726 /* Only check stuff here if the mode we want is different from the mode
5727 of the expression; if it's the same, check_max_integer_computiation_mode
5728 will handle it. Do we really need to check this stuff at all? */
5729
5730 if (target
5731 && GET_MODE (target) != mode
5732 && TREE_CODE (exp) != INTEGER_CST
5733 && TREE_CODE (exp) != PARM_DECL
5734 && TREE_CODE (exp) != ARRAY_REF
5735 && TREE_CODE (exp) != COMPONENT_REF
5736 && TREE_CODE (exp) != BIT_FIELD_REF
5737 && TREE_CODE (exp) != INDIRECT_REF
5738 && TREE_CODE (exp) != CALL_EXPR
5739 && TREE_CODE (exp) != VAR_DECL
5740 && TREE_CODE (exp) != RTL_EXPR)
5741 {
5742 enum machine_mode mode = GET_MODE (target);
5743
5744 if (GET_MODE_CLASS (mode) == MODE_INT
5745 && mode > MAX_INTEGER_COMPUTATION_MODE)
5746 fatal ("unsupported wide integer operation");
5747 }
5748
5749 if (tmode != mode
5750 && TREE_CODE (exp) != INTEGER_CST
5751 && TREE_CODE (exp) != PARM_DECL
5752 && TREE_CODE (exp) != ARRAY_REF
5753 && TREE_CODE (exp) != COMPONENT_REF
5754 && TREE_CODE (exp) != BIT_FIELD_REF
5755 && TREE_CODE (exp) != INDIRECT_REF
5756 && TREE_CODE (exp) != VAR_DECL
5757 && TREE_CODE (exp) != CALL_EXPR
5758 && TREE_CODE (exp) != RTL_EXPR
5759 && GET_MODE_CLASS (tmode) == MODE_INT
5760 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5761 fatal ("unsupported wide integer operation");
5762
5763 check_max_integer_computation_mode (exp);
5764 #endif
5765
5766 /* If will do cse, generate all results into pseudo registers
5767 since 1) that allows cse to find more things
5768 and 2) otherwise cse could produce an insn the machine
5769 cannot support. */
5770
5771 if (! cse_not_expected && mode != BLKmode && target
5772 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5773 target = subtarget;
5774
5775 switch (code)
5776 {
5777 case LABEL_DECL:
5778 {
5779 tree function = decl_function_context (exp);
5780 /* Handle using a label in a containing function. */
5781 if (function != current_function_decl
5782 && function != inline_function_decl && function != 0)
5783 {
5784 struct function *p = find_function_data (function);
5785 /* Allocate in the memory associated with the function
5786 that the label is in. */
5787 push_obstacks (p->function_obstack,
5788 p->function_maybepermanent_obstack);
5789
5790 p->expr->x_forced_labels
5791 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5792 p->expr->x_forced_labels);
5793 pop_obstacks ();
5794 }
5795 else
5796 {
5797 if (modifier == EXPAND_INITIALIZER)
5798 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5799 label_rtx (exp),
5800 forced_labels);
5801 }
5802
5803 temp = gen_rtx_MEM (FUNCTION_MODE,
5804 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5805 if (function != current_function_decl
5806 && function != inline_function_decl && function != 0)
5807 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5808 return temp;
5809 }
5810
5811 case PARM_DECL:
5812 if (DECL_RTL (exp) == 0)
5813 {
5814 error_with_decl (exp, "prior parameter's size depends on `%s'");
5815 return CONST0_RTX (mode);
5816 }
5817
5818 /* ... fall through ... */
5819
5820 case VAR_DECL:
5821 /* If a static var's type was incomplete when the decl was written,
5822 but the type is complete now, lay out the decl now. */
5823 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5824 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5825 {
5826 push_obstacks_nochange ();
5827 end_temporary_allocation ();
5828 layout_decl (exp, 0);
5829 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5830 pop_obstacks ();
5831 }
5832
5833 /* Although static-storage variables start off initialized, according to
5834 ANSI C, a memcpy could overwrite them with uninitialized values. So
5835 we check them too. This also lets us check for read-only variables
5836 accessed via a non-const declaration, in case it won't be detected
5837 any other way (e.g., in an embedded system or OS kernel without
5838 memory protection).
5839
5840 Aggregates are not checked here; they're handled elsewhere. */
5841 if (current_function && current_function_check_memory_usage
5842 && code == VAR_DECL
5843 && GET_CODE (DECL_RTL (exp)) == MEM
5844 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5845 {
5846 enum memory_use_mode memory_usage;
5847 memory_usage = get_memory_usage_from_modifier (modifier);
5848
5849 if (memory_usage != MEMORY_USE_DONT)
5850 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5851 XEXP (DECL_RTL (exp), 0), Pmode,
5852 GEN_INT (int_size_in_bytes (type)),
5853 TYPE_MODE (sizetype),
5854 GEN_INT (memory_usage),
5855 TYPE_MODE (integer_type_node));
5856 }
5857
5858 /* ... fall through ... */
5859
5860 case FUNCTION_DECL:
5861 case RESULT_DECL:
5862 if (DECL_RTL (exp) == 0)
5863 abort ();
5864
5865 /* Ensure variable marked as used even if it doesn't go through
5866 a parser. If it hasn't be used yet, write out an external
5867 definition. */
5868 if (! TREE_USED (exp))
5869 {
5870 assemble_external (exp);
5871 TREE_USED (exp) = 1;
5872 }
5873
5874 /* Show we haven't gotten RTL for this yet. */
5875 temp = 0;
5876
5877 /* Handle variables inherited from containing functions. */
5878 context = decl_function_context (exp);
5879
5880 /* We treat inline_function_decl as an alias for the current function
5881 because that is the inline function whose vars, types, etc.
5882 are being merged into the current function.
5883 See expand_inline_function. */
5884
5885 if (context != 0 && context != current_function_decl
5886 && context != inline_function_decl
5887 /* If var is static, we don't need a static chain to access it. */
5888 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5889 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5890 {
5891 rtx addr;
5892
5893 /* Mark as non-local and addressable. */
5894 DECL_NONLOCAL (exp) = 1;
5895 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5896 abort ();
5897 mark_addressable (exp);
5898 if (GET_CODE (DECL_RTL (exp)) != MEM)
5899 abort ();
5900 addr = XEXP (DECL_RTL (exp), 0);
5901 if (GET_CODE (addr) == MEM)
5902 addr = gen_rtx_MEM (Pmode,
5903 fix_lexical_addr (XEXP (addr, 0), exp));
5904 else
5905 addr = fix_lexical_addr (addr, exp);
5906 temp = change_address (DECL_RTL (exp), mode, addr);
5907 }
5908
5909 /* This is the case of an array whose size is to be determined
5910 from its initializer, while the initializer is still being parsed.
5911 See expand_decl. */
5912
5913 else if (GET_CODE (DECL_RTL (exp)) == MEM
5914 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5915 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5916 XEXP (DECL_RTL (exp), 0));
5917
5918 /* If DECL_RTL is memory, we are in the normal case and either
5919 the address is not valid or it is not a register and -fforce-addr
5920 is specified, get the address into a register. */
5921
5922 else if (GET_CODE (DECL_RTL (exp)) == MEM
5923 && modifier != EXPAND_CONST_ADDRESS
5924 && modifier != EXPAND_SUM
5925 && modifier != EXPAND_INITIALIZER
5926 && (! memory_address_p (DECL_MODE (exp),
5927 XEXP (DECL_RTL (exp), 0))
5928 || (flag_force_addr
5929 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5930 temp = change_address (DECL_RTL (exp), VOIDmode,
5931 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5932
5933 /* If we got something, return it. But first, set the alignment
5934 the address is a register. */
5935 if (temp != 0)
5936 {
5937 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5938 mark_reg_pointer (XEXP (temp, 0),
5939 DECL_ALIGN (exp) / BITS_PER_UNIT);
5940
5941 return temp;
5942 }
5943
5944 /* If the mode of DECL_RTL does not match that of the decl, it
5945 must be a promoted value. We return a SUBREG of the wanted mode,
5946 but mark it so that we know that it was already extended. */
5947
5948 if (GET_CODE (DECL_RTL (exp)) == REG
5949 && GET_MODE (DECL_RTL (exp)) != mode)
5950 {
5951 /* Get the signedness used for this variable. Ensure we get the
5952 same mode we got when the variable was declared. */
5953 if (GET_MODE (DECL_RTL (exp))
5954 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5955 abort ();
5956
5957 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5958 SUBREG_PROMOTED_VAR_P (temp) = 1;
5959 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5960 return temp;
5961 }
5962
5963 return DECL_RTL (exp);
5964
5965 case INTEGER_CST:
5966 return immed_double_const (TREE_INT_CST_LOW (exp),
5967 TREE_INT_CST_HIGH (exp),
5968 mode);
5969
5970 case CONST_DECL:
5971 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5972 EXPAND_MEMORY_USE_BAD);
5973
5974 case REAL_CST:
5975 /* If optimized, generate immediate CONST_DOUBLE
5976 which will be turned into memory by reload if necessary.
5977
5978 We used to force a register so that loop.c could see it. But
5979 this does not allow gen_* patterns to perform optimizations with
5980 the constants. It also produces two insns in cases like "x = 1.0;".
5981 On most machines, floating-point constants are not permitted in
5982 many insns, so we'd end up copying it to a register in any case.
5983
5984 Now, we do the copying in expand_binop, if appropriate. */
5985 return immed_real_const (exp);
5986
5987 case COMPLEX_CST:
5988 case STRING_CST:
5989 if (! TREE_CST_RTL (exp))
5990 output_constant_def (exp);
5991
5992 /* TREE_CST_RTL probably contains a constant address.
5993 On RISC machines where a constant address isn't valid,
5994 make some insns to get that address into a register. */
5995 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5996 && modifier != EXPAND_CONST_ADDRESS
5997 && modifier != EXPAND_INITIALIZER
5998 && modifier != EXPAND_SUM
5999 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6000 || (flag_force_addr
6001 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6002 return change_address (TREE_CST_RTL (exp), VOIDmode,
6003 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6004 return TREE_CST_RTL (exp);
6005
6006 case EXPR_WITH_FILE_LOCATION:
6007 {
6008 rtx to_return;
6009 char *saved_input_filename = input_filename;
6010 int saved_lineno = lineno;
6011 input_filename = EXPR_WFL_FILENAME (exp);
6012 lineno = EXPR_WFL_LINENO (exp);
6013 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6014 emit_line_note (input_filename, lineno);
6015 /* Possibly avoid switching back and force here */
6016 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6017 input_filename = saved_input_filename;
6018 lineno = saved_lineno;
6019 return to_return;
6020 }
6021
6022 case SAVE_EXPR:
6023 context = decl_function_context (exp);
6024
6025 /* If this SAVE_EXPR was at global context, assume we are an
6026 initialization function and move it into our context. */
6027 if (context == 0)
6028 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6029
6030 /* We treat inline_function_decl as an alias for the current function
6031 because that is the inline function whose vars, types, etc.
6032 are being merged into the current function.
6033 See expand_inline_function. */
6034 if (context == current_function_decl || context == inline_function_decl)
6035 context = 0;
6036
6037 /* If this is non-local, handle it. */
6038 if (context)
6039 {
6040 /* The following call just exists to abort if the context is
6041 not of a containing function. */
6042 find_function_data (context);
6043
6044 temp = SAVE_EXPR_RTL (exp);
6045 if (temp && GET_CODE (temp) == REG)
6046 {
6047 put_var_into_stack (exp);
6048 temp = SAVE_EXPR_RTL (exp);
6049 }
6050 if (temp == 0 || GET_CODE (temp) != MEM)
6051 abort ();
6052 return change_address (temp, mode,
6053 fix_lexical_addr (XEXP (temp, 0), exp));
6054 }
6055 if (SAVE_EXPR_RTL (exp) == 0)
6056 {
6057 if (mode == VOIDmode)
6058 temp = const0_rtx;
6059 else
6060 temp = assign_temp (type, 3, 0, 0);
6061
6062 SAVE_EXPR_RTL (exp) = temp;
6063 if (!optimize && GET_CODE (temp) == REG)
6064 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6065 save_expr_regs);
6066
6067 /* If the mode of TEMP does not match that of the expression, it
6068 must be a promoted value. We pass store_expr a SUBREG of the
6069 wanted mode but mark it so that we know that it was already
6070 extended. Note that `unsignedp' was modified above in
6071 this case. */
6072
6073 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6074 {
6075 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6076 SUBREG_PROMOTED_VAR_P (temp) = 1;
6077 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6078 }
6079
6080 if (temp == const0_rtx)
6081 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6082 EXPAND_MEMORY_USE_BAD);
6083 else
6084 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6085
6086 TREE_USED (exp) = 1;
6087 }
6088
6089 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6090 must be a promoted value. We return a SUBREG of the wanted mode,
6091 but mark it so that we know that it was already extended. */
6092
6093 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6094 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6095 {
6096 /* Compute the signedness and make the proper SUBREG. */
6097 promote_mode (type, mode, &unsignedp, 0);
6098 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6099 SUBREG_PROMOTED_VAR_P (temp) = 1;
6100 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6101 return temp;
6102 }
6103
6104 return SAVE_EXPR_RTL (exp);
6105
6106 case UNSAVE_EXPR:
6107 {
6108 rtx temp;
6109 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6110 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6111 return temp;
6112 }
6113
6114 case PLACEHOLDER_EXPR:
6115 {
6116 tree placeholder_expr;
6117
6118 /* If there is an object on the head of the placeholder list,
6119 see if some object in it of type TYPE or a pointer to it. For
6120 further information, see tree.def. */
6121 for (placeholder_expr = placeholder_list;
6122 placeholder_expr != 0;
6123 placeholder_expr = TREE_CHAIN (placeholder_expr))
6124 {
6125 tree need_type = TYPE_MAIN_VARIANT (type);
6126 tree object = 0;
6127 tree old_list = placeholder_list;
6128 tree elt;
6129
6130 /* Find the outermost reference that is of the type we want.
6131 If none, see if any object has a type that is a pointer to
6132 the type we want. */
6133 for (elt = TREE_PURPOSE (placeholder_expr);
6134 elt != 0 && object == 0;
6135 elt
6136 = ((TREE_CODE (elt) == COMPOUND_EXPR
6137 || TREE_CODE (elt) == COND_EXPR)
6138 ? TREE_OPERAND (elt, 1)
6139 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6140 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6141 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6142 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6143 ? TREE_OPERAND (elt, 0) : 0))
6144 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6145 object = elt;
6146
6147 for (elt = TREE_PURPOSE (placeholder_expr);
6148 elt != 0 && object == 0;
6149 elt
6150 = ((TREE_CODE (elt) == COMPOUND_EXPR
6151 || TREE_CODE (elt) == COND_EXPR)
6152 ? TREE_OPERAND (elt, 1)
6153 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6154 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6155 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6156 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6157 ? TREE_OPERAND (elt, 0) : 0))
6158 if (POINTER_TYPE_P (TREE_TYPE (elt))
6159 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6160 == need_type))
6161 object = build1 (INDIRECT_REF, need_type, elt);
6162
6163 if (object != 0)
6164 {
6165 /* Expand this object skipping the list entries before
6166 it was found in case it is also a PLACEHOLDER_EXPR.
6167 In that case, we want to translate it using subsequent
6168 entries. */
6169 placeholder_list = TREE_CHAIN (placeholder_expr);
6170 temp = expand_expr (object, original_target, tmode,
6171 ro_modifier);
6172 placeholder_list = old_list;
6173 return temp;
6174 }
6175 }
6176 }
6177
6178 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6179 abort ();
6180
6181 case WITH_RECORD_EXPR:
6182 /* Put the object on the placeholder list, expand our first operand,
6183 and pop the list. */
6184 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6185 placeholder_list);
6186 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6187 tmode, ro_modifier);
6188 placeholder_list = TREE_CHAIN (placeholder_list);
6189 return target;
6190
6191 case GOTO_EXPR:
6192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6193 expand_goto (TREE_OPERAND (exp, 0));
6194 else
6195 expand_computed_goto (TREE_OPERAND (exp, 0));
6196 return const0_rtx;
6197
6198 case EXIT_EXPR:
6199 expand_exit_loop_if_false (NULL_PTR,
6200 invert_truthvalue (TREE_OPERAND (exp, 0)));
6201 return const0_rtx;
6202
6203 case LABELED_BLOCK_EXPR:
6204 if (LABELED_BLOCK_BODY (exp))
6205 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6206 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6207 return const0_rtx;
6208
6209 case EXIT_BLOCK_EXPR:
6210 if (EXIT_BLOCK_RETURN (exp))
6211 sorry ("returned value in block_exit_expr");
6212 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6213 return const0_rtx;
6214
6215 case LOOP_EXPR:
6216 push_temp_slots ();
6217 expand_start_loop (1);
6218 expand_expr_stmt (TREE_OPERAND (exp, 0));
6219 expand_end_loop ();
6220 pop_temp_slots ();
6221
6222 return const0_rtx;
6223
6224 case BIND_EXPR:
6225 {
6226 tree vars = TREE_OPERAND (exp, 0);
6227 int vars_need_expansion = 0;
6228
6229 /* Need to open a binding contour here because
6230 if there are any cleanups they must be contained here. */
6231 expand_start_bindings (2);
6232
6233 /* Mark the corresponding BLOCK for output in its proper place. */
6234 if (TREE_OPERAND (exp, 2) != 0
6235 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6236 insert_block (TREE_OPERAND (exp, 2));
6237
6238 /* If VARS have not yet been expanded, expand them now. */
6239 while (vars)
6240 {
6241 if (DECL_RTL (vars) == 0)
6242 {
6243 vars_need_expansion = 1;
6244 expand_decl (vars);
6245 }
6246 expand_decl_init (vars);
6247 vars = TREE_CHAIN (vars);
6248 }
6249
6250 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6251
6252 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6253
6254 return temp;
6255 }
6256
6257 case RTL_EXPR:
6258 if (RTL_EXPR_SEQUENCE (exp))
6259 {
6260 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6261 abort ();
6262 emit_insns (RTL_EXPR_SEQUENCE (exp));
6263 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6264 }
6265 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6266 free_temps_for_rtl_expr (exp);
6267 return RTL_EXPR_RTL (exp);
6268
6269 case CONSTRUCTOR:
6270 /* If we don't need the result, just ensure we evaluate any
6271 subexpressions. */
6272 if (ignore)
6273 {
6274 tree elt;
6275 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6276 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6277 EXPAND_MEMORY_USE_BAD);
6278 return const0_rtx;
6279 }
6280
6281 /* All elts simple constants => refer to a constant in memory. But
6282 if this is a non-BLKmode mode, let it store a field at a time
6283 since that should make a CONST_INT or CONST_DOUBLE when we
6284 fold. Likewise, if we have a target we can use, it is best to
6285 store directly into the target unless the type is large enough
6286 that memcpy will be used. If we are making an initializer and
6287 all operands are constant, put it in memory as well. */
6288 else if ((TREE_STATIC (exp)
6289 && ((mode == BLKmode
6290 && ! (target != 0 && safe_from_p (target, exp, 1)))
6291 || TREE_ADDRESSABLE (exp)
6292 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6293 && (!MOVE_BY_PIECES_P
6294 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6295 TYPE_ALIGN (type) / BITS_PER_UNIT))
6296 && ! mostly_zeros_p (exp))))
6297 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6298 {
6299 rtx constructor = output_constant_def (exp);
6300 if (modifier != EXPAND_CONST_ADDRESS
6301 && modifier != EXPAND_INITIALIZER
6302 && modifier != EXPAND_SUM
6303 && (! memory_address_p (GET_MODE (constructor),
6304 XEXP (constructor, 0))
6305 || (flag_force_addr
6306 && GET_CODE (XEXP (constructor, 0)) != REG)))
6307 constructor = change_address (constructor, VOIDmode,
6308 XEXP (constructor, 0));
6309 return constructor;
6310 }
6311
6312 else
6313 {
6314 /* Handle calls that pass values in multiple non-contiguous
6315 locations. The Irix 6 ABI has examples of this. */
6316 if (target == 0 || ! safe_from_p (target, exp, 1)
6317 || GET_CODE (target) == PARALLEL)
6318 {
6319 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6320 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6321 else
6322 target = assign_temp (type, 0, 1, 1);
6323 }
6324
6325 if (TREE_READONLY (exp))
6326 {
6327 if (GET_CODE (target) == MEM)
6328 target = copy_rtx (target);
6329
6330 RTX_UNCHANGING_P (target) = 1;
6331 }
6332
6333 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6334 return target;
6335 }
6336
6337 case INDIRECT_REF:
6338 {
6339 tree exp1 = TREE_OPERAND (exp, 0);
6340 tree exp2;
6341 tree index;
6342 tree string = string_constant (exp1, &index);
6343 int i;
6344
6345 /* Try to optimize reads from const strings. */
6346 if (string
6347 && TREE_CODE (string) == STRING_CST
6348 && TREE_CODE (index) == INTEGER_CST
6349 && !TREE_INT_CST_HIGH (index)
6350 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6351 && GET_MODE_CLASS (mode) == MODE_INT
6352 && GET_MODE_SIZE (mode) == 1
6353 && modifier != EXPAND_MEMORY_USE_WO)
6354 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6355
6356 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6357 op0 = memory_address (mode, op0);
6358
6359 if (current_function && current_function_check_memory_usage
6360 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6361 {
6362 enum memory_use_mode memory_usage;
6363 memory_usage = get_memory_usage_from_modifier (modifier);
6364
6365 if (memory_usage != MEMORY_USE_DONT)
6366 {
6367 in_check_memory_usage = 1;
6368 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6369 op0, Pmode,
6370 GEN_INT (int_size_in_bytes (type)),
6371 TYPE_MODE (sizetype),
6372 GEN_INT (memory_usage),
6373 TYPE_MODE (integer_type_node));
6374 in_check_memory_usage = 0;
6375 }
6376 }
6377
6378 temp = gen_rtx_MEM (mode, op0);
6379 /* If address was computed by addition,
6380 mark this as an element of an aggregate. */
6381 if (TREE_CODE (exp1) == PLUS_EXPR
6382 || (TREE_CODE (exp1) == SAVE_EXPR
6383 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6384 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6385 || (TREE_CODE (exp1) == ADDR_EXPR
6386 && (exp2 = TREE_OPERAND (exp1, 0))
6387 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6388 MEM_SET_IN_STRUCT_P (temp, 1);
6389
6390 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6391 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6392
6393 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6394 here, because, in C and C++, the fact that a location is accessed
6395 through a pointer to const does not mean that the value there can
6396 never change. Languages where it can never change should
6397 also set TREE_STATIC. */
6398 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6399
6400 /* If we are writing to this object and its type is a record with
6401 readonly fields, we must mark it as readonly so it will
6402 conflict with readonly references to those fields. */
6403 if (modifier == EXPAND_MEMORY_USE_WO
6404 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6405 RTX_UNCHANGING_P (temp) = 1;
6406
6407 return temp;
6408 }
6409
6410 case ARRAY_REF:
6411 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6412 abort ();
6413
6414 {
6415 tree array = TREE_OPERAND (exp, 0);
6416 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6417 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6418 tree index = TREE_OPERAND (exp, 1);
6419 tree index_type = TREE_TYPE (index);
6420 HOST_WIDE_INT i;
6421
6422 /* Optimize the special-case of a zero lower bound.
6423
6424 We convert the low_bound to sizetype to avoid some problems
6425 with constant folding. (E.g. suppose the lower bound is 1,
6426 and its mode is QI. Without the conversion, (ARRAY
6427 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6428 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6429
6430 But sizetype isn't quite right either (especially if
6431 the lowbound is negative). FIXME */
6432
6433 if (! integer_zerop (low_bound))
6434 index = fold (build (MINUS_EXPR, index_type, index,
6435 convert (sizetype, low_bound)));
6436
6437 /* Fold an expression like: "foo"[2].
6438 This is not done in fold so it won't happen inside &.
6439 Don't fold if this is for wide characters since it's too
6440 difficult to do correctly and this is a very rare case. */
6441
6442 if (TREE_CODE (array) == STRING_CST
6443 && TREE_CODE (index) == INTEGER_CST
6444 && !TREE_INT_CST_HIGH (index)
6445 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6446 && GET_MODE_CLASS (mode) == MODE_INT
6447 && GET_MODE_SIZE (mode) == 1)
6448 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6449
6450 /* If this is a constant index into a constant array,
6451 just get the value from the array. Handle both the cases when
6452 we have an explicit constructor and when our operand is a variable
6453 that was declared const. */
6454
6455 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6456 {
6457 if (TREE_CODE (index) == INTEGER_CST
6458 && TREE_INT_CST_HIGH (index) == 0)
6459 {
6460 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6461
6462 i = TREE_INT_CST_LOW (index);
6463 while (elem && i--)
6464 elem = TREE_CHAIN (elem);
6465 if (elem)
6466 return expand_expr (fold (TREE_VALUE (elem)), target,
6467 tmode, ro_modifier);
6468 }
6469 }
6470
6471 else if (optimize >= 1
6472 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6473 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6474 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6475 {
6476 if (TREE_CODE (index) == INTEGER_CST)
6477 {
6478 tree init = DECL_INITIAL (array);
6479
6480 i = TREE_INT_CST_LOW (index);
6481 if (TREE_CODE (init) == CONSTRUCTOR)
6482 {
6483 tree elem = CONSTRUCTOR_ELTS (init);
6484
6485 while (elem
6486 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6487 elem = TREE_CHAIN (elem);
6488 if (elem)
6489 return expand_expr (fold (TREE_VALUE (elem)), target,
6490 tmode, ro_modifier);
6491 }
6492 else if (TREE_CODE (init) == STRING_CST
6493 && TREE_INT_CST_HIGH (index) == 0
6494 && (TREE_INT_CST_LOW (index)
6495 < TREE_STRING_LENGTH (init)))
6496 return (GEN_INT
6497 (TREE_STRING_POINTER
6498 (init)[TREE_INT_CST_LOW (index)]));
6499 }
6500 }
6501 }
6502
6503 /* ... fall through ... */
6504
6505 case COMPONENT_REF:
6506 case BIT_FIELD_REF:
6507 /* If the operand is a CONSTRUCTOR, we can just extract the
6508 appropriate field if it is present. Don't do this if we have
6509 already written the data since we want to refer to that copy
6510 and varasm.c assumes that's what we'll do. */
6511 if (code != ARRAY_REF
6512 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6513 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6514 {
6515 tree elt;
6516
6517 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6518 elt = TREE_CHAIN (elt))
6519 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6520 /* We can normally use the value of the field in the
6521 CONSTRUCTOR. However, if this is a bitfield in
6522 an integral mode that we can fit in a HOST_WIDE_INT,
6523 we must mask only the number of bits in the bitfield,
6524 since this is done implicitly by the constructor. If
6525 the bitfield does not meet either of those conditions,
6526 we can't do this optimization. */
6527 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6528 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6529 == MODE_INT)
6530 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6531 <= HOST_BITS_PER_WIDE_INT))))
6532 {
6533 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6534 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6535 {
6536 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6537
6538 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6539 {
6540 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6541 op0 = expand_and (op0, op1, target);
6542 }
6543 else
6544 {
6545 enum machine_mode imode
6546 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6547 tree count
6548 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6549 0);
6550
6551 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6552 target, 0);
6553 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6554 target, 0);
6555 }
6556 }
6557
6558 return op0;
6559 }
6560 }
6561
6562 {
6563 enum machine_mode mode1;
6564 int bitsize;
6565 int bitpos;
6566 tree offset;
6567 int volatilep = 0;
6568 int alignment;
6569 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6570 &mode1, &unsignedp, &volatilep,
6571 &alignment);
6572
6573 /* If we got back the original object, something is wrong. Perhaps
6574 we are evaluating an expression too early. In any event, don't
6575 infinitely recurse. */
6576 if (tem == exp)
6577 abort ();
6578
6579 /* If TEM's type is a union of variable size, pass TARGET to the inner
6580 computation, since it will need a temporary and TARGET is known
6581 to have to do. This occurs in unchecked conversion in Ada. */
6582
6583 op0 = expand_expr (tem,
6584 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6585 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6586 != INTEGER_CST)
6587 ? target : NULL_RTX),
6588 VOIDmode,
6589 (modifier == EXPAND_INITIALIZER
6590 || modifier == EXPAND_CONST_ADDRESS)
6591 ? modifier : EXPAND_NORMAL);
6592
6593 /* If this is a constant, put it into a register if it is a
6594 legitimate constant and OFFSET is 0 and memory if it isn't. */
6595 if (CONSTANT_P (op0))
6596 {
6597 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6598 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6599 && offset == 0)
6600 op0 = force_reg (mode, op0);
6601 else
6602 op0 = validize_mem (force_const_mem (mode, op0));
6603 }
6604
6605 if (offset != 0)
6606 {
6607 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6608
6609 /* If this object is in memory, put it into a register.
6610 This case can't occur in C, but can in Ada if we have
6611 unchecked conversion of an expression from a scalar type to
6612 an array or record type. */
6613 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6614 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6615 {
6616 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6617
6618 mark_temp_addr_taken (memloc);
6619 emit_move_insn (memloc, op0);
6620 op0 = memloc;
6621 }
6622
6623 if (GET_CODE (op0) != MEM)
6624 abort ();
6625
6626 if (GET_MODE (offset_rtx) != ptr_mode)
6627 {
6628 #ifdef POINTERS_EXTEND_UNSIGNED
6629 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6630 #else
6631 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6632 #endif
6633 }
6634
6635 /* A constant address in OP0 can have VOIDmode, we must not try
6636 to call force_reg for that case. Avoid that case. */
6637 if (GET_CODE (op0) == MEM
6638 && GET_MODE (op0) == BLKmode
6639 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6640 && bitsize != 0
6641 && (bitpos % bitsize) == 0
6642 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6643 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6644 {
6645 rtx temp = change_address (op0, mode1,
6646 plus_constant (XEXP (op0, 0),
6647 (bitpos /
6648 BITS_PER_UNIT)));
6649 if (GET_CODE (XEXP (temp, 0)) == REG)
6650 op0 = temp;
6651 else
6652 op0 = change_address (op0, mode1,
6653 force_reg (GET_MODE (XEXP (temp, 0)),
6654 XEXP (temp, 0)));
6655 bitpos = 0;
6656 }
6657
6658
6659 op0 = change_address (op0, VOIDmode,
6660 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6661 force_reg (ptr_mode,
6662 offset_rtx)));
6663 }
6664
6665 /* Don't forget about volatility even if this is a bitfield. */
6666 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6667 {
6668 op0 = copy_rtx (op0);
6669 MEM_VOLATILE_P (op0) = 1;
6670 }
6671
6672 /* Check the access. */
6673 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6674 {
6675 enum memory_use_mode memory_usage;
6676 memory_usage = get_memory_usage_from_modifier (modifier);
6677
6678 if (memory_usage != MEMORY_USE_DONT)
6679 {
6680 rtx to;
6681 int size;
6682
6683 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6684 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6685
6686 /* Check the access right of the pointer. */
6687 if (size > BITS_PER_UNIT)
6688 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6689 to, Pmode,
6690 GEN_INT (size / BITS_PER_UNIT),
6691 TYPE_MODE (sizetype),
6692 GEN_INT (memory_usage),
6693 TYPE_MODE (integer_type_node));
6694 }
6695 }
6696
6697 /* In cases where an aligned union has an unaligned object
6698 as a field, we might be extracting a BLKmode value from
6699 an integer-mode (e.g., SImode) object. Handle this case
6700 by doing the extract into an object as wide as the field
6701 (which we know to be the width of a basic mode), then
6702 storing into memory, and changing the mode to BLKmode.
6703 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6704 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6705 if (mode1 == VOIDmode
6706 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6707 || (modifier != EXPAND_CONST_ADDRESS
6708 && modifier != EXPAND_INITIALIZER
6709 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6710 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6711 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6712 /* If the field isn't aligned enough to fetch as a memref,
6713 fetch it as a bit field. */
6714 || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
6715 && ((TYPE_ALIGN (TREE_TYPE (tem))
6716 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6717 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6718 || (modifier != EXPAND_CONST_ADDRESS
6719 && modifier != EXPAND_INITIALIZER
6720 && mode == BLKmode
6721 && SLOW_UNALIGNED_ACCESS
6722 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6723 || bitpos % TYPE_ALIGN (type) != 0)))
6724 {
6725 enum machine_mode ext_mode = mode;
6726
6727 if (ext_mode == BLKmode
6728 && ! (target != 0 && GET_CODE (op0) == MEM
6729 && GET_CODE (target) == MEM
6730 && bitpos % BITS_PER_UNIT == 0))
6731 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6732
6733 if (ext_mode == BLKmode)
6734 {
6735 /* In this case, BITPOS must start at a byte boundary and
6736 TARGET, if specified, must be a MEM. */
6737 if (GET_CODE (op0) != MEM
6738 || (target != 0 && GET_CODE (target) != MEM)
6739 || bitpos % BITS_PER_UNIT != 0)
6740 abort ();
6741
6742 op0 = change_address (op0, VOIDmode,
6743 plus_constant (XEXP (op0, 0),
6744 bitpos / BITS_PER_UNIT));
6745 if (target == 0)
6746 target = assign_temp (type, 0, 1, 1);
6747
6748 emit_block_move (target, op0,
6749 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6750 / BITS_PER_UNIT),
6751 1);
6752
6753 return target;
6754 }
6755
6756 op0 = validize_mem (op0);
6757
6758 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6759 mark_reg_pointer (XEXP (op0, 0), alignment);
6760
6761 op0 = extract_bit_field (op0, bitsize, bitpos,
6762 unsignedp, target, ext_mode, ext_mode,
6763 alignment,
6764 int_size_in_bytes (TREE_TYPE (tem)));
6765
6766 /* If the result is a record type and BITSIZE is narrower than
6767 the mode of OP0, an integral mode, and this is a big endian
6768 machine, we must put the field into the high-order bits. */
6769 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6770 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6771 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6772 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6773 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6774 - bitsize),
6775 op0, 1);
6776
6777 if (mode == BLKmode)
6778 {
6779 rtx new = assign_stack_temp (ext_mode,
6780 bitsize / BITS_PER_UNIT, 0);
6781
6782 emit_move_insn (new, op0);
6783 op0 = copy_rtx (new);
6784 PUT_MODE (op0, BLKmode);
6785 MEM_SET_IN_STRUCT_P (op0, 1);
6786 }
6787
6788 return op0;
6789 }
6790
6791 /* If the result is BLKmode, use that to access the object
6792 now as well. */
6793 if (mode == BLKmode)
6794 mode1 = BLKmode;
6795
6796 /* Get a reference to just this component. */
6797 if (modifier == EXPAND_CONST_ADDRESS
6798 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6799 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6800 (bitpos / BITS_PER_UNIT)));
6801 else
6802 op0 = change_address (op0, mode1,
6803 plus_constant (XEXP (op0, 0),
6804 (bitpos / BITS_PER_UNIT)));
6805
6806 if (GET_CODE (op0) == MEM)
6807 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6808
6809 if (GET_CODE (XEXP (op0, 0)) == REG)
6810 mark_reg_pointer (XEXP (op0, 0), alignment);
6811
6812 MEM_SET_IN_STRUCT_P (op0, 1);
6813 MEM_VOLATILE_P (op0) |= volatilep;
6814 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6815 || modifier == EXPAND_CONST_ADDRESS
6816 || modifier == EXPAND_INITIALIZER)
6817 return op0;
6818 else if (target == 0)
6819 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6820
6821 convert_move (target, op0, unsignedp);
6822 return target;
6823 }
6824
6825 /* Intended for a reference to a buffer of a file-object in Pascal.
6826 But it's not certain that a special tree code will really be
6827 necessary for these. INDIRECT_REF might work for them. */
6828 case BUFFER_REF:
6829 abort ();
6830
6831 case IN_EXPR:
6832 {
6833 /* Pascal set IN expression.
6834
6835 Algorithm:
6836 rlo = set_low - (set_low%bits_per_word);
6837 the_word = set [ (index - rlo)/bits_per_word ];
6838 bit_index = index % bits_per_word;
6839 bitmask = 1 << bit_index;
6840 return !!(the_word & bitmask); */
6841
6842 tree set = TREE_OPERAND (exp, 0);
6843 tree index = TREE_OPERAND (exp, 1);
6844 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6845 tree set_type = TREE_TYPE (set);
6846 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6847 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6848 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6849 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6850 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6851 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6852 rtx setaddr = XEXP (setval, 0);
6853 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6854 rtx rlow;
6855 rtx diff, quo, rem, addr, bit, result;
6856
6857 preexpand_calls (exp);
6858
6859 /* If domain is empty, answer is no. Likewise if index is constant
6860 and out of bounds. */
6861 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6862 && TREE_CODE (set_low_bound) == INTEGER_CST
6863 && tree_int_cst_lt (set_high_bound, set_low_bound))
6864 || (TREE_CODE (index) == INTEGER_CST
6865 && TREE_CODE (set_low_bound) == INTEGER_CST
6866 && tree_int_cst_lt (index, set_low_bound))
6867 || (TREE_CODE (set_high_bound) == INTEGER_CST
6868 && TREE_CODE (index) == INTEGER_CST
6869 && tree_int_cst_lt (set_high_bound, index))))
6870 return const0_rtx;
6871
6872 if (target == 0)
6873 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6874
6875 /* If we get here, we have to generate the code for both cases
6876 (in range and out of range). */
6877
6878 op0 = gen_label_rtx ();
6879 op1 = gen_label_rtx ();
6880
6881 if (! (GET_CODE (index_val) == CONST_INT
6882 && GET_CODE (lo_r) == CONST_INT))
6883 {
6884 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6885 GET_MODE (index_val), iunsignedp, 0, op1);
6886 }
6887
6888 if (! (GET_CODE (index_val) == CONST_INT
6889 && GET_CODE (hi_r) == CONST_INT))
6890 {
6891 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6892 GET_MODE (index_val), iunsignedp, 0, op1);
6893 }
6894
6895 /* Calculate the element number of bit zero in the first word
6896 of the set. */
6897 if (GET_CODE (lo_r) == CONST_INT)
6898 rlow = GEN_INT (INTVAL (lo_r)
6899 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6900 else
6901 rlow = expand_binop (index_mode, and_optab, lo_r,
6902 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6903 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6904
6905 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6906 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6907
6908 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6909 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6910 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6911 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6912
6913 addr = memory_address (byte_mode,
6914 expand_binop (index_mode, add_optab, diff,
6915 setaddr, NULL_RTX, iunsignedp,
6916 OPTAB_LIB_WIDEN));
6917
6918 /* Extract the bit we want to examine */
6919 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6920 gen_rtx_MEM (byte_mode, addr),
6921 make_tree (TREE_TYPE (index), rem),
6922 NULL_RTX, 1);
6923 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6924 GET_MODE (target) == byte_mode ? target : 0,
6925 1, OPTAB_LIB_WIDEN);
6926
6927 if (result != target)
6928 convert_move (target, result, 1);
6929
6930 /* Output the code to handle the out-of-range case. */
6931 emit_jump (op0);
6932 emit_label (op1);
6933 emit_move_insn (target, const0_rtx);
6934 emit_label (op0);
6935 return target;
6936 }
6937
6938 case WITH_CLEANUP_EXPR:
6939 if (RTL_EXPR_RTL (exp) == 0)
6940 {
6941 RTL_EXPR_RTL (exp)
6942 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6943 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6944
6945 /* That's it for this cleanup. */
6946 TREE_OPERAND (exp, 2) = 0;
6947 }
6948 return RTL_EXPR_RTL (exp);
6949
6950 case CLEANUP_POINT_EXPR:
6951 {
6952 /* Start a new binding layer that will keep track of all cleanup
6953 actions to be performed. */
6954 expand_start_bindings (2);
6955
6956 target_temp_slot_level = temp_slot_level;
6957
6958 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6959 /* If we're going to use this value, load it up now. */
6960 if (! ignore)
6961 op0 = force_not_mem (op0);
6962 preserve_temp_slots (op0);
6963 expand_end_bindings (NULL_TREE, 0, 0);
6964 }
6965 return op0;
6966
6967 case CALL_EXPR:
6968 /* Check for a built-in function. */
6969 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6970 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6971 == FUNCTION_DECL)
6972 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6973 return expand_builtin (exp, target, subtarget, tmode, ignore);
6974
6975 /* If this call was expanded already by preexpand_calls,
6976 just return the result we got. */
6977 if (CALL_EXPR_RTL (exp) != 0)
6978 return CALL_EXPR_RTL (exp);
6979
6980 return expand_call (exp, target, ignore);
6981
6982 case NON_LVALUE_EXPR:
6983 case NOP_EXPR:
6984 case CONVERT_EXPR:
6985 case REFERENCE_EXPR:
6986 if (TREE_CODE (type) == UNION_TYPE)
6987 {
6988 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6989
6990 /* If both input and output are BLKmode, this conversion
6991 isn't actually doing anything unless we need to make the
6992 alignment stricter. */
6993 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
6994 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
6995 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
6996 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
6997 modifier);
6998
6999 if (target == 0)
7000 {
7001 if (mode != BLKmode)
7002 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7003 else
7004 target = assign_temp (type, 0, 1, 1);
7005 }
7006
7007 if (GET_CODE (target) == MEM)
7008 /* Store data into beginning of memory target. */
7009 store_expr (TREE_OPERAND (exp, 0),
7010 change_address (target, TYPE_MODE (valtype), 0), 0);
7011
7012 else if (GET_CODE (target) == REG)
7013 /* Store this field into a union of the proper type. */
7014 store_field (target,
7015 MIN ((int_size_in_bytes (TREE_TYPE
7016 (TREE_OPERAND (exp, 0)))
7017 * BITS_PER_UNIT),
7018 GET_MODE_BITSIZE (mode)),
7019 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7020 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7021 else
7022 abort ();
7023
7024 /* Return the entire union. */
7025 return target;
7026 }
7027
7028 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7029 {
7030 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7031 ro_modifier);
7032
7033 /* If the signedness of the conversion differs and OP0 is
7034 a promoted SUBREG, clear that indication since we now
7035 have to do the proper extension. */
7036 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7037 && GET_CODE (op0) == SUBREG)
7038 SUBREG_PROMOTED_VAR_P (op0) = 0;
7039
7040 return op0;
7041 }
7042
7043 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7044 if (GET_MODE (op0) == mode)
7045 return op0;
7046
7047 /* If OP0 is a constant, just convert it into the proper mode. */
7048 if (CONSTANT_P (op0))
7049 return
7050 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7051 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7052
7053 if (modifier == EXPAND_INITIALIZER)
7054 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7055
7056 if (target == 0)
7057 return
7058 convert_to_mode (mode, op0,
7059 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7060 else
7061 convert_move (target, op0,
7062 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7063 return target;
7064
7065 case PLUS_EXPR:
7066 /* We come here from MINUS_EXPR when the second operand is a
7067 constant. */
7068 plus_expr:
7069 this_optab = add_optab;
7070
7071 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7072 something else, make sure we add the register to the constant and
7073 then to the other thing. This case can occur during strength
7074 reduction and doing it this way will produce better code if the
7075 frame pointer or argument pointer is eliminated.
7076
7077 fold-const.c will ensure that the constant is always in the inner
7078 PLUS_EXPR, so the only case we need to do anything about is if
7079 sp, ap, or fp is our second argument, in which case we must swap
7080 the innermost first argument and our second argument. */
7081
7082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7083 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7084 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7085 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7086 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7087 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7088 {
7089 tree t = TREE_OPERAND (exp, 1);
7090
7091 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7092 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7093 }
7094
7095 /* If the result is to be ptr_mode and we are adding an integer to
7096 something, we might be forming a constant. So try to use
7097 plus_constant. If it produces a sum and we can't accept it,
7098 use force_operand. This allows P = &ARR[const] to generate
7099 efficient code on machines where a SYMBOL_REF is not a valid
7100 address.
7101
7102 If this is an EXPAND_SUM call, always return the sum. */
7103 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7104 || mode == ptr_mode)
7105 {
7106 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7107 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7108 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7109 {
7110 rtx constant_part;
7111
7112 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7113 EXPAND_SUM);
7114 /* Use immed_double_const to ensure that the constant is
7115 truncated according to the mode of OP1, then sign extended
7116 to a HOST_WIDE_INT. Using the constant directly can result
7117 in non-canonical RTL in a 64x32 cross compile. */
7118 constant_part
7119 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7120 (HOST_WIDE_INT) 0,
7121 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7122 op1 = plus_constant (op1, INTVAL (constant_part));
7123 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7124 op1 = force_operand (op1, target);
7125 return op1;
7126 }
7127
7128 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7129 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7130 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7131 {
7132 rtx constant_part;
7133
7134 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7135 EXPAND_SUM);
7136 if (! CONSTANT_P (op0))
7137 {
7138 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7139 VOIDmode, modifier);
7140 /* Don't go to both_summands if modifier
7141 says it's not right to return a PLUS. */
7142 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7143 goto binop2;
7144 goto both_summands;
7145 }
7146 /* Use immed_double_const to ensure that the constant is
7147 truncated according to the mode of OP1, then sign extended
7148 to a HOST_WIDE_INT. Using the constant directly can result
7149 in non-canonical RTL in a 64x32 cross compile. */
7150 constant_part
7151 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7152 (HOST_WIDE_INT) 0,
7153 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7154 op0 = plus_constant (op0, INTVAL (constant_part));
7155 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7156 op0 = force_operand (op0, target);
7157 return op0;
7158 }
7159 }
7160
7161 /* No sense saving up arithmetic to be done
7162 if it's all in the wrong mode to form part of an address.
7163 And force_operand won't know whether to sign-extend or
7164 zero-extend. */
7165 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7166 || mode != ptr_mode)
7167 goto binop;
7168
7169 preexpand_calls (exp);
7170 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7171 subtarget = 0;
7172
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7174 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7175
7176 both_summands:
7177 /* Make sure any term that's a sum with a constant comes last. */
7178 if (GET_CODE (op0) == PLUS
7179 && CONSTANT_P (XEXP (op0, 1)))
7180 {
7181 temp = op0;
7182 op0 = op1;
7183 op1 = temp;
7184 }
7185 /* If adding to a sum including a constant,
7186 associate it to put the constant outside. */
7187 if (GET_CODE (op1) == PLUS
7188 && CONSTANT_P (XEXP (op1, 1)))
7189 {
7190 rtx constant_term = const0_rtx;
7191
7192 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7193 if (temp != 0)
7194 op0 = temp;
7195 /* Ensure that MULT comes first if there is one. */
7196 else if (GET_CODE (op0) == MULT)
7197 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7198 else
7199 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7200
7201 /* Let's also eliminate constants from op0 if possible. */
7202 op0 = eliminate_constant_term (op0, &constant_term);
7203
7204 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7205 their sum should be a constant. Form it into OP1, since the
7206 result we want will then be OP0 + OP1. */
7207
7208 temp = simplify_binary_operation (PLUS, mode, constant_term,
7209 XEXP (op1, 1));
7210 if (temp != 0)
7211 op1 = temp;
7212 else
7213 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7214 }
7215
7216 /* Put a constant term last and put a multiplication first. */
7217 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7218 temp = op1, op1 = op0, op0 = temp;
7219
7220 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7221 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7222
7223 case MINUS_EXPR:
7224 /* For initializers, we are allowed to return a MINUS of two
7225 symbolic constants. Here we handle all cases when both operands
7226 are constant. */
7227 /* Handle difference of two symbolic constants,
7228 for the sake of an initializer. */
7229 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7230 && really_constant_p (TREE_OPERAND (exp, 0))
7231 && really_constant_p (TREE_OPERAND (exp, 1)))
7232 {
7233 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7234 VOIDmode, ro_modifier);
7235 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7236 VOIDmode, ro_modifier);
7237
7238 /* If the last operand is a CONST_INT, use plus_constant of
7239 the negated constant. Else make the MINUS. */
7240 if (GET_CODE (op1) == CONST_INT)
7241 return plus_constant (op0, - INTVAL (op1));
7242 else
7243 return gen_rtx_MINUS (mode, op0, op1);
7244 }
7245 /* Convert A - const to A + (-const). */
7246 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7247 {
7248 tree negated = fold (build1 (NEGATE_EXPR, type,
7249 TREE_OPERAND (exp, 1)));
7250
7251 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7252 /* If we can't negate the constant in TYPE, leave it alone and
7253 expand_binop will negate it for us. We used to try to do it
7254 here in the signed version of TYPE, but that doesn't work
7255 on POINTER_TYPEs. */;
7256 else
7257 {
7258 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7259 goto plus_expr;
7260 }
7261 }
7262 this_optab = sub_optab;
7263 goto binop;
7264
7265 case MULT_EXPR:
7266 preexpand_calls (exp);
7267 /* If first operand is constant, swap them.
7268 Thus the following special case checks need only
7269 check the second operand. */
7270 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7271 {
7272 register tree t1 = TREE_OPERAND (exp, 0);
7273 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7274 TREE_OPERAND (exp, 1) = t1;
7275 }
7276
7277 /* Attempt to return something suitable for generating an
7278 indexed address, for machines that support that. */
7279
7280 if (modifier == EXPAND_SUM && mode == ptr_mode
7281 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7283 {
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7285 EXPAND_SUM);
7286
7287 /* Apply distributive law if OP0 is x+c. */
7288 if (GET_CODE (op0) == PLUS
7289 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7290 return
7291 gen_rtx_PLUS
7292 (mode,
7293 gen_rtx_MULT
7294 (mode, XEXP (op0, 0),
7295 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7296 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7297 * INTVAL (XEXP (op0, 1))));
7298
7299 if (GET_CODE (op0) != REG)
7300 op0 = force_operand (op0, NULL_RTX);
7301 if (GET_CODE (op0) != REG)
7302 op0 = copy_to_mode_reg (mode, op0);
7303
7304 return
7305 gen_rtx_MULT (mode, op0,
7306 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7307 }
7308
7309 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7310 subtarget = 0;
7311
7312 /* Check for multiplying things that have been extended
7313 from a narrower type. If this machine supports multiplying
7314 in that narrower type with a result in the desired type,
7315 do it that way, and avoid the explicit type-conversion. */
7316 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7317 && TREE_CODE (type) == INTEGER_TYPE
7318 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7319 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7320 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7321 && int_fits_type_p (TREE_OPERAND (exp, 1),
7322 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7323 /* Don't use a widening multiply if a shift will do. */
7324 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7325 > HOST_BITS_PER_WIDE_INT)
7326 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7327 ||
7328 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7329 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7330 ==
7331 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7332 /* If both operands are extended, they must either both
7333 be zero-extended or both be sign-extended. */
7334 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7335 ==
7336 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7337 {
7338 enum machine_mode innermode
7339 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7340 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7341 ? smul_widen_optab : umul_widen_optab);
7342 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7343 ? umul_widen_optab : smul_widen_optab);
7344 if (mode == GET_MODE_WIDER_MODE (innermode))
7345 {
7346 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7347 {
7348 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7349 NULL_RTX, VOIDmode, 0);
7350 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7351 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7352 VOIDmode, 0);
7353 else
7354 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7355 NULL_RTX, VOIDmode, 0);
7356 goto binop2;
7357 }
7358 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7359 && innermode == word_mode)
7360 {
7361 rtx htem;
7362 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7363 NULL_RTX, VOIDmode, 0);
7364 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7365 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7366 VOIDmode, 0);
7367 else
7368 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7369 NULL_RTX, VOIDmode, 0);
7370 temp = expand_binop (mode, other_optab, op0, op1, target,
7371 unsignedp, OPTAB_LIB_WIDEN);
7372 htem = expand_mult_highpart_adjust (innermode,
7373 gen_highpart (innermode, temp),
7374 op0, op1,
7375 gen_highpart (innermode, temp),
7376 unsignedp);
7377 emit_move_insn (gen_highpart (innermode, temp), htem);
7378 return temp;
7379 }
7380 }
7381 }
7382 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7383 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7384 return expand_mult (mode, op0, op1, target, unsignedp);
7385
7386 case TRUNC_DIV_EXPR:
7387 case FLOOR_DIV_EXPR:
7388 case CEIL_DIV_EXPR:
7389 case ROUND_DIV_EXPR:
7390 case EXACT_DIV_EXPR:
7391 preexpand_calls (exp);
7392 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7393 subtarget = 0;
7394 /* Possible optimization: compute the dividend with EXPAND_SUM
7395 then if the divisor is constant can optimize the case
7396 where some terms of the dividend have coeffs divisible by it. */
7397 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7398 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7399 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7400
7401 case RDIV_EXPR:
7402 this_optab = flodiv_optab;
7403 goto binop;
7404
7405 case TRUNC_MOD_EXPR:
7406 case FLOOR_MOD_EXPR:
7407 case CEIL_MOD_EXPR:
7408 case ROUND_MOD_EXPR:
7409 preexpand_calls (exp);
7410 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7411 subtarget = 0;
7412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7414 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7415
7416 case FIX_ROUND_EXPR:
7417 case FIX_FLOOR_EXPR:
7418 case FIX_CEIL_EXPR:
7419 abort (); /* Not used for C. */
7420
7421 case FIX_TRUNC_EXPR:
7422 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7423 if (target == 0)
7424 target = gen_reg_rtx (mode);
7425 expand_fix (target, op0, unsignedp);
7426 return target;
7427
7428 case FLOAT_EXPR:
7429 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7430 if (target == 0)
7431 target = gen_reg_rtx (mode);
7432 /* expand_float can't figure out what to do if FROM has VOIDmode.
7433 So give it the correct mode. With -O, cse will optimize this. */
7434 if (GET_MODE (op0) == VOIDmode)
7435 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7436 op0);
7437 expand_float (target, op0,
7438 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7439 return target;
7440
7441 case NEGATE_EXPR:
7442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7443 temp = expand_unop (mode, neg_optab, op0, target, 0);
7444 if (temp == 0)
7445 abort ();
7446 return temp;
7447
7448 case ABS_EXPR:
7449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7450
7451 /* Handle complex values specially. */
7452 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7453 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7454 return expand_complex_abs (mode, op0, target, unsignedp);
7455
7456 /* Unsigned abs is simply the operand. Testing here means we don't
7457 risk generating incorrect code below. */
7458 if (TREE_UNSIGNED (type))
7459 return op0;
7460
7461 return expand_abs (mode, op0, target,
7462 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7463
7464 case MAX_EXPR:
7465 case MIN_EXPR:
7466 target = original_target;
7467 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7468 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7469 || GET_MODE (target) != mode
7470 || (GET_CODE (target) == REG
7471 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7472 target = gen_reg_rtx (mode);
7473 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7475
7476 /* First try to do it with a special MIN or MAX instruction.
7477 If that does not win, use a conditional jump to select the proper
7478 value. */
7479 this_optab = (TREE_UNSIGNED (type)
7480 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7481 : (code == MIN_EXPR ? smin_optab : smax_optab));
7482
7483 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7484 OPTAB_WIDEN);
7485 if (temp != 0)
7486 return temp;
7487
7488 /* At this point, a MEM target is no longer useful; we will get better
7489 code without it. */
7490
7491 if (GET_CODE (target) == MEM)
7492 target = gen_reg_rtx (mode);
7493
7494 if (target != op0)
7495 emit_move_insn (target, op0);
7496
7497 op0 = gen_label_rtx ();
7498
7499 /* If this mode is an integer too wide to compare properly,
7500 compare word by word. Rely on cse to optimize constant cases. */
7501 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7502 {
7503 if (code == MAX_EXPR)
7504 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7505 target, op1, NULL_RTX, op0);
7506 else
7507 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7508 op1, target, NULL_RTX, op0);
7509 }
7510 else
7511 {
7512 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7513 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7514 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7515 op0);
7516 }
7517 emit_move_insn (target, op1);
7518 emit_label (op0);
7519 return target;
7520
7521 case BIT_NOT_EXPR:
7522 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7523 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7524 if (temp == 0)
7525 abort ();
7526 return temp;
7527
7528 case FFS_EXPR:
7529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7530 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7531 if (temp == 0)
7532 abort ();
7533 return temp;
7534
7535 /* ??? Can optimize bitwise operations with one arg constant.
7536 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7537 and (a bitwise1 b) bitwise2 b (etc)
7538 but that is probably not worth while. */
7539
7540 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7541 boolean values when we want in all cases to compute both of them. In
7542 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7543 as actual zero-or-1 values and then bitwise anding. In cases where
7544 there cannot be any side effects, better code would be made by
7545 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7546 how to recognize those cases. */
7547
7548 case TRUTH_AND_EXPR:
7549 case BIT_AND_EXPR:
7550 this_optab = and_optab;
7551 goto binop;
7552
7553 case TRUTH_OR_EXPR:
7554 case BIT_IOR_EXPR:
7555 this_optab = ior_optab;
7556 goto binop;
7557
7558 case TRUTH_XOR_EXPR:
7559 case BIT_XOR_EXPR:
7560 this_optab = xor_optab;
7561 goto binop;
7562
7563 case LSHIFT_EXPR:
7564 case RSHIFT_EXPR:
7565 case LROTATE_EXPR:
7566 case RROTATE_EXPR:
7567 preexpand_calls (exp);
7568 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7569 subtarget = 0;
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7571 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7572 unsignedp);
7573
7574 /* Could determine the answer when only additive constants differ. Also,
7575 the addition of one can be handled by changing the condition. */
7576 case LT_EXPR:
7577 case LE_EXPR:
7578 case GT_EXPR:
7579 case GE_EXPR:
7580 case EQ_EXPR:
7581 case NE_EXPR:
7582 preexpand_calls (exp);
7583 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7584 if (temp != 0)
7585 return temp;
7586
7587 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7588 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7589 && original_target
7590 && GET_CODE (original_target) == REG
7591 && (GET_MODE (original_target)
7592 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7593 {
7594 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7595 VOIDmode, 0);
7596
7597 if (temp != original_target)
7598 temp = copy_to_reg (temp);
7599
7600 op1 = gen_label_rtx ();
7601 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7602 GET_MODE (temp), unsignedp, 0, op1);
7603 emit_move_insn (temp, const1_rtx);
7604 emit_label (op1);
7605 return temp;
7606 }
7607
7608 /* If no set-flag instruction, must generate a conditional
7609 store into a temporary variable. Drop through
7610 and handle this like && and ||. */
7611
7612 case TRUTH_ANDIF_EXPR:
7613 case TRUTH_ORIF_EXPR:
7614 if (! ignore
7615 && (target == 0 || ! safe_from_p (target, exp, 1)
7616 /* Make sure we don't have a hard reg (such as function's return
7617 value) live across basic blocks, if not optimizing. */
7618 || (!optimize && GET_CODE (target) == REG
7619 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7620 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7621
7622 if (target)
7623 emit_clr_insn (target);
7624
7625 op1 = gen_label_rtx ();
7626 jumpifnot (exp, op1);
7627
7628 if (target)
7629 emit_0_to_1_insn (target);
7630
7631 emit_label (op1);
7632 return ignore ? const0_rtx : target;
7633
7634 case TRUTH_NOT_EXPR:
7635 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7636 /* The parser is careful to generate TRUTH_NOT_EXPR
7637 only with operands that are always zero or one. */
7638 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7639 target, 1, OPTAB_LIB_WIDEN);
7640 if (temp == 0)
7641 abort ();
7642 return temp;
7643
7644 case COMPOUND_EXPR:
7645 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7646 emit_queue ();
7647 return expand_expr (TREE_OPERAND (exp, 1),
7648 (ignore ? const0_rtx : target),
7649 VOIDmode, 0);
7650
7651 case COND_EXPR:
7652 /* If we would have a "singleton" (see below) were it not for a
7653 conversion in each arm, bring that conversion back out. */
7654 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7655 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7656 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7657 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7658 {
7659 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7660 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7661
7662 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7663 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7664 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7665 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7666 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7667 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7668 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7669 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7670 return expand_expr (build1 (NOP_EXPR, type,
7671 build (COND_EXPR, TREE_TYPE (true),
7672 TREE_OPERAND (exp, 0),
7673 true, false)),
7674 target, tmode, modifier);
7675 }
7676
7677 {
7678 /* Note that COND_EXPRs whose type is a structure or union
7679 are required to be constructed to contain assignments of
7680 a temporary variable, so that we can evaluate them here
7681 for side effect only. If type is void, we must do likewise. */
7682
7683 /* If an arm of the branch requires a cleanup,
7684 only that cleanup is performed. */
7685
7686 tree singleton = 0;
7687 tree binary_op = 0, unary_op = 0;
7688
7689 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7690 convert it to our mode, if necessary. */
7691 if (integer_onep (TREE_OPERAND (exp, 1))
7692 && integer_zerop (TREE_OPERAND (exp, 2))
7693 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7694 {
7695 if (ignore)
7696 {
7697 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7698 ro_modifier);
7699 return const0_rtx;
7700 }
7701
7702 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7703 if (GET_MODE (op0) == mode)
7704 return op0;
7705
7706 if (target == 0)
7707 target = gen_reg_rtx (mode);
7708 convert_move (target, op0, unsignedp);
7709 return target;
7710 }
7711
7712 /* Check for X ? A + B : A. If we have this, we can copy A to the
7713 output and conditionally add B. Similarly for unary operations.
7714 Don't do this if X has side-effects because those side effects
7715 might affect A or B and the "?" operation is a sequence point in
7716 ANSI. (operand_equal_p tests for side effects.) */
7717
7718 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7719 && operand_equal_p (TREE_OPERAND (exp, 2),
7720 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7721 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7722 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7723 && operand_equal_p (TREE_OPERAND (exp, 1),
7724 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7725 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7726 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7727 && operand_equal_p (TREE_OPERAND (exp, 2),
7728 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7729 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7730 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7731 && operand_equal_p (TREE_OPERAND (exp, 1),
7732 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7733 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7734
7735 /* If we are not to produce a result, we have no target. Otherwise,
7736 if a target was specified use it; it will not be used as an
7737 intermediate target unless it is safe. If no target, use a
7738 temporary. */
7739
7740 if (ignore)
7741 temp = 0;
7742 else if (original_target
7743 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7744 || (singleton && GET_CODE (original_target) == REG
7745 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7746 && original_target == var_rtx (singleton)))
7747 && GET_MODE (original_target) == mode
7748 #ifdef HAVE_conditional_move
7749 && (! can_conditionally_move_p (mode)
7750 || GET_CODE (original_target) == REG
7751 || TREE_ADDRESSABLE (type))
7752 #endif
7753 && ! (GET_CODE (original_target) == MEM
7754 && MEM_VOLATILE_P (original_target)))
7755 temp = original_target;
7756 else if (TREE_ADDRESSABLE (type))
7757 abort ();
7758 else
7759 temp = assign_temp (type, 0, 0, 1);
7760
7761 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7762 do the test of X as a store-flag operation, do this as
7763 A + ((X != 0) << log C). Similarly for other simple binary
7764 operators. Only do for C == 1 if BRANCH_COST is low. */
7765 if (temp && singleton && binary_op
7766 && (TREE_CODE (binary_op) == PLUS_EXPR
7767 || TREE_CODE (binary_op) == MINUS_EXPR
7768 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7769 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7770 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7771 : integer_onep (TREE_OPERAND (binary_op, 1)))
7772 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7773 {
7774 rtx result;
7775 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7776 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7777 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7778 : xor_optab);
7779
7780 /* If we had X ? A : A + 1, do this as A + (X == 0).
7781
7782 We have to invert the truth value here and then put it
7783 back later if do_store_flag fails. We cannot simply copy
7784 TREE_OPERAND (exp, 0) to another variable and modify that
7785 because invert_truthvalue can modify the tree pointed to
7786 by its argument. */
7787 if (singleton == TREE_OPERAND (exp, 1))
7788 TREE_OPERAND (exp, 0)
7789 = invert_truthvalue (TREE_OPERAND (exp, 0));
7790
7791 result = do_store_flag (TREE_OPERAND (exp, 0),
7792 (safe_from_p (temp, singleton, 1)
7793 ? temp : NULL_RTX),
7794 mode, BRANCH_COST <= 1);
7795
7796 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7797 result = expand_shift (LSHIFT_EXPR, mode, result,
7798 build_int_2 (tree_log2
7799 (TREE_OPERAND
7800 (binary_op, 1)),
7801 0),
7802 (safe_from_p (temp, singleton, 1)
7803 ? temp : NULL_RTX), 0);
7804
7805 if (result)
7806 {
7807 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7808 return expand_binop (mode, boptab, op1, result, temp,
7809 unsignedp, OPTAB_LIB_WIDEN);
7810 }
7811 else if (singleton == TREE_OPERAND (exp, 1))
7812 TREE_OPERAND (exp, 0)
7813 = invert_truthvalue (TREE_OPERAND (exp, 0));
7814 }
7815
7816 do_pending_stack_adjust ();
7817 NO_DEFER_POP;
7818 op0 = gen_label_rtx ();
7819
7820 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7821 {
7822 if (temp != 0)
7823 {
7824 /* If the target conflicts with the other operand of the
7825 binary op, we can't use it. Also, we can't use the target
7826 if it is a hard register, because evaluating the condition
7827 might clobber it. */
7828 if ((binary_op
7829 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7830 || (GET_CODE (temp) == REG
7831 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7832 temp = gen_reg_rtx (mode);
7833 store_expr (singleton, temp, 0);
7834 }
7835 else
7836 expand_expr (singleton,
7837 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7838 if (singleton == TREE_OPERAND (exp, 1))
7839 jumpif (TREE_OPERAND (exp, 0), op0);
7840 else
7841 jumpifnot (TREE_OPERAND (exp, 0), op0);
7842
7843 start_cleanup_deferral ();
7844 if (binary_op && temp == 0)
7845 /* Just touch the other operand. */
7846 expand_expr (TREE_OPERAND (binary_op, 1),
7847 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7848 else if (binary_op)
7849 store_expr (build (TREE_CODE (binary_op), type,
7850 make_tree (type, temp),
7851 TREE_OPERAND (binary_op, 1)),
7852 temp, 0);
7853 else
7854 store_expr (build1 (TREE_CODE (unary_op), type,
7855 make_tree (type, temp)),
7856 temp, 0);
7857 op1 = op0;
7858 }
7859 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7860 comparison operator. If we have one of these cases, set the
7861 output to A, branch on A (cse will merge these two references),
7862 then set the output to FOO. */
7863 else if (temp
7864 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7865 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7866 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7867 TREE_OPERAND (exp, 1), 0)
7868 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7869 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7870 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7871 {
7872 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7873 temp = gen_reg_rtx (mode);
7874 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7875 jumpif (TREE_OPERAND (exp, 0), op0);
7876
7877 start_cleanup_deferral ();
7878 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7879 op1 = op0;
7880 }
7881 else if (temp
7882 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7883 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7884 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7885 TREE_OPERAND (exp, 2), 0)
7886 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7887 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7888 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7889 {
7890 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7891 temp = gen_reg_rtx (mode);
7892 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7893 jumpifnot (TREE_OPERAND (exp, 0), op0);
7894
7895 start_cleanup_deferral ();
7896 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7897 op1 = op0;
7898 }
7899 else
7900 {
7901 op1 = gen_label_rtx ();
7902 jumpifnot (TREE_OPERAND (exp, 0), op0);
7903
7904 start_cleanup_deferral ();
7905
7906 /* One branch of the cond can be void, if it never returns. For
7907 example A ? throw : E */
7908 if (temp != 0
7909 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7910 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7911 else
7912 expand_expr (TREE_OPERAND (exp, 1),
7913 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7914 end_cleanup_deferral ();
7915 emit_queue ();
7916 emit_jump_insn (gen_jump (op1));
7917 emit_barrier ();
7918 emit_label (op0);
7919 start_cleanup_deferral ();
7920 if (temp != 0
7921 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7922 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7923 else
7924 expand_expr (TREE_OPERAND (exp, 2),
7925 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7926 }
7927
7928 end_cleanup_deferral ();
7929
7930 emit_queue ();
7931 emit_label (op1);
7932 OK_DEFER_POP;
7933
7934 return temp;
7935 }
7936
7937 case TARGET_EXPR:
7938 {
7939 /* Something needs to be initialized, but we didn't know
7940 where that thing was when building the tree. For example,
7941 it could be the return value of a function, or a parameter
7942 to a function which lays down in the stack, or a temporary
7943 variable which must be passed by reference.
7944
7945 We guarantee that the expression will either be constructed
7946 or copied into our original target. */
7947
7948 tree slot = TREE_OPERAND (exp, 0);
7949 tree cleanups = NULL_TREE;
7950 tree exp1;
7951
7952 if (TREE_CODE (slot) != VAR_DECL)
7953 abort ();
7954
7955 if (! ignore)
7956 target = original_target;
7957
7958 /* Set this here so that if we get a target that refers to a
7959 register variable that's already been used, put_reg_into_stack
7960 knows that it should fix up those uses. */
7961 TREE_USED (slot) = 1;
7962
7963 if (target == 0)
7964 {
7965 if (DECL_RTL (slot) != 0)
7966 {
7967 target = DECL_RTL (slot);
7968 /* If we have already expanded the slot, so don't do
7969 it again. (mrs) */
7970 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7971 return target;
7972 }
7973 else
7974 {
7975 target = assign_temp (type, 2, 0, 1);
7976 /* All temp slots at this level must not conflict. */
7977 preserve_temp_slots (target);
7978 DECL_RTL (slot) = target;
7979 if (TREE_ADDRESSABLE (slot))
7980 {
7981 TREE_ADDRESSABLE (slot) = 0;
7982 mark_addressable (slot);
7983 }
7984
7985 /* Since SLOT is not known to the called function
7986 to belong to its stack frame, we must build an explicit
7987 cleanup. This case occurs when we must build up a reference
7988 to pass the reference as an argument. In this case,
7989 it is very likely that such a reference need not be
7990 built here. */
7991
7992 if (TREE_OPERAND (exp, 2) == 0)
7993 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7994 cleanups = TREE_OPERAND (exp, 2);
7995 }
7996 }
7997 else
7998 {
7999 /* This case does occur, when expanding a parameter which
8000 needs to be constructed on the stack. The target
8001 is the actual stack address that we want to initialize.
8002 The function we call will perform the cleanup in this case. */
8003
8004 /* If we have already assigned it space, use that space,
8005 not target that we were passed in, as our target
8006 parameter is only a hint. */
8007 if (DECL_RTL (slot) != 0)
8008 {
8009 target = DECL_RTL (slot);
8010 /* If we have already expanded the slot, so don't do
8011 it again. (mrs) */
8012 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8013 return target;
8014 }
8015 else
8016 {
8017 DECL_RTL (slot) = target;
8018 /* If we must have an addressable slot, then make sure that
8019 the RTL that we just stored in slot is OK. */
8020 if (TREE_ADDRESSABLE (slot))
8021 {
8022 TREE_ADDRESSABLE (slot) = 0;
8023 mark_addressable (slot);
8024 }
8025 }
8026 }
8027
8028 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8029 /* Mark it as expanded. */
8030 TREE_OPERAND (exp, 1) = NULL_TREE;
8031
8032 store_expr (exp1, target, 0);
8033
8034 expand_decl_cleanup (NULL_TREE, cleanups);
8035
8036 return target;
8037 }
8038
8039 case INIT_EXPR:
8040 {
8041 tree lhs = TREE_OPERAND (exp, 0);
8042 tree rhs = TREE_OPERAND (exp, 1);
8043 tree noncopied_parts = 0;
8044 tree lhs_type = TREE_TYPE (lhs);
8045
8046 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8047 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8048 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8049 TYPE_NONCOPIED_PARTS (lhs_type));
8050 while (noncopied_parts != 0)
8051 {
8052 expand_assignment (TREE_VALUE (noncopied_parts),
8053 TREE_PURPOSE (noncopied_parts), 0, 0);
8054 noncopied_parts = TREE_CHAIN (noncopied_parts);
8055 }
8056 return temp;
8057 }
8058
8059 case MODIFY_EXPR:
8060 {
8061 /* If lhs is complex, expand calls in rhs before computing it.
8062 That's so we don't compute a pointer and save it over a call.
8063 If lhs is simple, compute it first so we can give it as a
8064 target if the rhs is just a call. This avoids an extra temp and copy
8065 and that prevents a partial-subsumption which makes bad code.
8066 Actually we could treat component_ref's of vars like vars. */
8067
8068 tree lhs = TREE_OPERAND (exp, 0);
8069 tree rhs = TREE_OPERAND (exp, 1);
8070 tree noncopied_parts = 0;
8071 tree lhs_type = TREE_TYPE (lhs);
8072
8073 temp = 0;
8074
8075 if (TREE_CODE (lhs) != VAR_DECL
8076 && TREE_CODE (lhs) != RESULT_DECL
8077 && TREE_CODE (lhs) != PARM_DECL
8078 && ! (TREE_CODE (lhs) == INDIRECT_REF
8079 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8080 preexpand_calls (exp);
8081
8082 /* Check for |= or &= of a bitfield of size one into another bitfield
8083 of size 1. In this case, (unless we need the result of the
8084 assignment) we can do this more efficiently with a
8085 test followed by an assignment, if necessary.
8086
8087 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8088 things change so we do, this code should be enhanced to
8089 support it. */
8090 if (ignore
8091 && TREE_CODE (lhs) == COMPONENT_REF
8092 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8093 || TREE_CODE (rhs) == BIT_AND_EXPR)
8094 && TREE_OPERAND (rhs, 0) == lhs
8095 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8096 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8097 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8098 {
8099 rtx label = gen_label_rtx ();
8100
8101 do_jump (TREE_OPERAND (rhs, 1),
8102 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8103 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8104 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8105 (TREE_CODE (rhs) == BIT_IOR_EXPR
8106 ? integer_one_node
8107 : integer_zero_node)),
8108 0, 0);
8109 do_pending_stack_adjust ();
8110 emit_label (label);
8111 return const0_rtx;
8112 }
8113
8114 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8115 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8116 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8117 TYPE_NONCOPIED_PARTS (lhs_type));
8118
8119 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8120 while (noncopied_parts != 0)
8121 {
8122 expand_assignment (TREE_PURPOSE (noncopied_parts),
8123 TREE_VALUE (noncopied_parts), 0, 0);
8124 noncopied_parts = TREE_CHAIN (noncopied_parts);
8125 }
8126 return temp;
8127 }
8128
8129 case RETURN_EXPR:
8130 if (!TREE_OPERAND (exp, 0))
8131 expand_null_return ();
8132 else
8133 expand_return (TREE_OPERAND (exp, 0));
8134 return const0_rtx;
8135
8136 case PREINCREMENT_EXPR:
8137 case PREDECREMENT_EXPR:
8138 return expand_increment (exp, 0, ignore);
8139
8140 case POSTINCREMENT_EXPR:
8141 case POSTDECREMENT_EXPR:
8142 /* Faster to treat as pre-increment if result is not used. */
8143 return expand_increment (exp, ! ignore, ignore);
8144
8145 case ADDR_EXPR:
8146 /* If nonzero, TEMP will be set to the address of something that might
8147 be a MEM corresponding to a stack slot. */
8148 temp = 0;
8149
8150 /* Are we taking the address of a nested function? */
8151 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8152 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8153 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8154 && ! TREE_STATIC (exp))
8155 {
8156 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8157 op0 = force_operand (op0, target);
8158 }
8159 /* If we are taking the address of something erroneous, just
8160 return a zero. */
8161 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8162 return const0_rtx;
8163 else
8164 {
8165 /* We make sure to pass const0_rtx down if we came in with
8166 ignore set, to avoid doing the cleanups twice for something. */
8167 op0 = expand_expr (TREE_OPERAND (exp, 0),
8168 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8169 (modifier == EXPAND_INITIALIZER
8170 ? modifier : EXPAND_CONST_ADDRESS));
8171
8172 /* If we are going to ignore the result, OP0 will have been set
8173 to const0_rtx, so just return it. Don't get confused and
8174 think we are taking the address of the constant. */
8175 if (ignore)
8176 return op0;
8177
8178 op0 = protect_from_queue (op0, 0);
8179
8180 /* We would like the object in memory. If it is a constant, we can
8181 have it be statically allocated into memory. For a non-constant,
8182 we need to allocate some memory and store the value into it. */
8183
8184 if (CONSTANT_P (op0))
8185 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8186 op0);
8187 else if (GET_CODE (op0) == MEM)
8188 {
8189 mark_temp_addr_taken (op0);
8190 temp = XEXP (op0, 0);
8191 }
8192
8193 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8194 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8195 {
8196 /* If this object is in a register, it must be not
8197 be BLKmode. */
8198 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8199 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8200
8201 mark_temp_addr_taken (memloc);
8202 emit_move_insn (memloc, op0);
8203 op0 = memloc;
8204 }
8205
8206 if (GET_CODE (op0) != MEM)
8207 abort ();
8208
8209 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8210 {
8211 temp = XEXP (op0, 0);
8212 #ifdef POINTERS_EXTEND_UNSIGNED
8213 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8214 && mode == ptr_mode)
8215 temp = convert_memory_address (ptr_mode, temp);
8216 #endif
8217 return temp;
8218 }
8219
8220 op0 = force_operand (XEXP (op0, 0), target);
8221 }
8222
8223 if (flag_force_addr && GET_CODE (op0) != REG)
8224 op0 = force_reg (Pmode, op0);
8225
8226 if (GET_CODE (op0) == REG
8227 && ! REG_USERVAR_P (op0))
8228 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8229
8230 /* If we might have had a temp slot, add an equivalent address
8231 for it. */
8232 if (temp != 0)
8233 update_temp_slot_address (temp, op0);
8234
8235 #ifdef POINTERS_EXTEND_UNSIGNED
8236 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8237 && mode == ptr_mode)
8238 op0 = convert_memory_address (ptr_mode, op0);
8239 #endif
8240
8241 return op0;
8242
8243 case ENTRY_VALUE_EXPR:
8244 abort ();
8245
8246 /* COMPLEX type for Extended Pascal & Fortran */
8247 case COMPLEX_EXPR:
8248 {
8249 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8250 rtx insns;
8251
8252 /* Get the rtx code of the operands. */
8253 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8254 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8255
8256 if (! target)
8257 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8258
8259 start_sequence ();
8260
8261 /* Move the real (op0) and imaginary (op1) parts to their location. */
8262 emit_move_insn (gen_realpart (mode, target), op0);
8263 emit_move_insn (gen_imagpart (mode, target), op1);
8264
8265 insns = get_insns ();
8266 end_sequence ();
8267
8268 /* Complex construction should appear as a single unit. */
8269 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8270 each with a separate pseudo as destination.
8271 It's not correct for flow to treat them as a unit. */
8272 if (GET_CODE (target) != CONCAT)
8273 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8274 else
8275 emit_insns (insns);
8276
8277 return target;
8278 }
8279
8280 case REALPART_EXPR:
8281 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8282 return gen_realpart (mode, op0);
8283
8284 case IMAGPART_EXPR:
8285 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8286 return gen_imagpart (mode, op0);
8287
8288 case CONJ_EXPR:
8289 {
8290 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8291 rtx imag_t;
8292 rtx insns;
8293
8294 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8295
8296 if (! target)
8297 target = gen_reg_rtx (mode);
8298
8299 start_sequence ();
8300
8301 /* Store the realpart and the negated imagpart to target. */
8302 emit_move_insn (gen_realpart (partmode, target),
8303 gen_realpart (partmode, op0));
8304
8305 imag_t = gen_imagpart (partmode, target);
8306 temp = expand_unop (partmode, neg_optab,
8307 gen_imagpart (partmode, op0), imag_t, 0);
8308 if (temp != imag_t)
8309 emit_move_insn (imag_t, temp);
8310
8311 insns = get_insns ();
8312 end_sequence ();
8313
8314 /* Conjugate should appear as a single unit
8315 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8316 each with a separate pseudo as destination.
8317 It's not correct for flow to treat them as a unit. */
8318 if (GET_CODE (target) != CONCAT)
8319 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8320 else
8321 emit_insns (insns);
8322
8323 return target;
8324 }
8325
8326 case TRY_CATCH_EXPR:
8327 {
8328 tree handler = TREE_OPERAND (exp, 1);
8329
8330 expand_eh_region_start ();
8331
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333
8334 expand_eh_region_end (handler);
8335
8336 return op0;
8337 }
8338
8339 case TRY_FINALLY_EXPR:
8340 {
8341 tree try_block = TREE_OPERAND (exp, 0);
8342 tree finally_block = TREE_OPERAND (exp, 1);
8343 rtx finally_label = gen_label_rtx ();
8344 rtx done_label = gen_label_rtx ();
8345 rtx return_link = gen_reg_rtx (Pmode);
8346 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8347 (tree) finally_label, (tree) return_link);
8348 TREE_SIDE_EFFECTS (cleanup) = 1;
8349
8350 /* Start a new binding layer that will keep track of all cleanup
8351 actions to be performed. */
8352 expand_start_bindings (2);
8353
8354 target_temp_slot_level = temp_slot_level;
8355
8356 expand_decl_cleanup (NULL_TREE, cleanup);
8357 op0 = expand_expr (try_block, target, tmode, modifier);
8358
8359 preserve_temp_slots (op0);
8360 expand_end_bindings (NULL_TREE, 0, 0);
8361 emit_jump (done_label);
8362 emit_label (finally_label);
8363 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8364 emit_indirect_jump (return_link);
8365 emit_label (done_label);
8366 return op0;
8367 }
8368
8369 case GOTO_SUBROUTINE_EXPR:
8370 {
8371 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8372 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8373 rtx return_address = gen_label_rtx ();
8374 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8375 emit_jump (subr);
8376 emit_label (return_address);
8377 return const0_rtx;
8378 }
8379
8380 case POPDCC_EXPR:
8381 {
8382 rtx dcc = get_dynamic_cleanup_chain ();
8383 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8384 return const0_rtx;
8385 }
8386
8387 case POPDHC_EXPR:
8388 {
8389 rtx dhc = get_dynamic_handler_chain ();
8390 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8391 return const0_rtx;
8392 }
8393
8394 case VA_ARG_EXPR:
8395 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8396
8397 default:
8398 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8399 }
8400
8401 /* Here to do an ordinary binary operator, generating an instruction
8402 from the optab already placed in `this_optab'. */
8403 binop:
8404 preexpand_calls (exp);
8405 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8406 subtarget = 0;
8407 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8409 binop2:
8410 temp = expand_binop (mode, this_optab, op0, op1, target,
8411 unsignedp, OPTAB_LIB_WIDEN);
8412 if (temp == 0)
8413 abort ();
8414 return temp;
8415 }
8416 \f
8417 /* Similar to expand_expr, except that we don't specify a target, target
8418 mode, or modifier and we return the alignment of the inner type. This is
8419 used in cases where it is not necessary to align the result to the
8420 alignment of its type as long as we know the alignment of the result, for
8421 example for comparisons of BLKmode values. */
8422
8423 static rtx
8424 expand_expr_unaligned (exp, palign)
8425 register tree exp;
8426 int *palign;
8427 {
8428 register rtx op0;
8429 tree type = TREE_TYPE (exp);
8430 register enum machine_mode mode = TYPE_MODE (type);
8431
8432 /* Default the alignment we return to that of the type. */
8433 *palign = TYPE_ALIGN (type);
8434
8435 /* The only cases in which we do anything special is if the resulting mode
8436 is BLKmode. */
8437 if (mode != BLKmode)
8438 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8439
8440 switch (TREE_CODE (exp))
8441 {
8442 case CONVERT_EXPR:
8443 case NOP_EXPR:
8444 case NON_LVALUE_EXPR:
8445 /* Conversions between BLKmode values don't change the underlying
8446 alignment or value. */
8447 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8448 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8449 break;
8450
8451 case ARRAY_REF:
8452 /* Much of the code for this case is copied directly from expand_expr.
8453 We need to duplicate it here because we will do something different
8454 in the fall-through case, so we need to handle the same exceptions
8455 it does. */
8456 {
8457 tree array = TREE_OPERAND (exp, 0);
8458 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8459 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8460 tree index = TREE_OPERAND (exp, 1);
8461 tree index_type = TREE_TYPE (index);
8462 HOST_WIDE_INT i;
8463
8464 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8465 abort ();
8466
8467 /* Optimize the special-case of a zero lower bound.
8468
8469 We convert the low_bound to sizetype to avoid some problems
8470 with constant folding. (E.g. suppose the lower bound is 1,
8471 and its mode is QI. Without the conversion, (ARRAY
8472 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8473 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8474
8475 But sizetype isn't quite right either (especially if
8476 the lowbound is negative). FIXME */
8477
8478 if (! integer_zerop (low_bound))
8479 index = fold (build (MINUS_EXPR, index_type, index,
8480 convert (sizetype, low_bound)));
8481
8482 /* If this is a constant index into a constant array,
8483 just get the value from the array. Handle both the cases when
8484 we have an explicit constructor and when our operand is a variable
8485 that was declared const. */
8486
8487 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8488 {
8489 if (TREE_CODE (index) == INTEGER_CST
8490 && TREE_INT_CST_HIGH (index) == 0)
8491 {
8492 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8493
8494 i = TREE_INT_CST_LOW (index);
8495 while (elem && i--)
8496 elem = TREE_CHAIN (elem);
8497 if (elem)
8498 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8499 palign);
8500 }
8501 }
8502
8503 else if (optimize >= 1
8504 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8505 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8506 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8507 {
8508 if (TREE_CODE (index) == INTEGER_CST)
8509 {
8510 tree init = DECL_INITIAL (array);
8511
8512 i = TREE_INT_CST_LOW (index);
8513 if (TREE_CODE (init) == CONSTRUCTOR)
8514 {
8515 tree elem = CONSTRUCTOR_ELTS (init);
8516
8517 while (elem
8518 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8519 elem = TREE_CHAIN (elem);
8520 if (elem)
8521 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8522 palign);
8523 }
8524 }
8525 }
8526 }
8527
8528 /* ... fall through ... */
8529
8530 case COMPONENT_REF:
8531 case BIT_FIELD_REF:
8532 /* If the operand is a CONSTRUCTOR, we can just extract the
8533 appropriate field if it is present. Don't do this if we have
8534 already written the data since we want to refer to that copy
8535 and varasm.c assumes that's what we'll do. */
8536 if (TREE_CODE (exp) != ARRAY_REF
8537 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8538 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8539 {
8540 tree elt;
8541
8542 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8543 elt = TREE_CHAIN (elt))
8544 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8545 /* Note that unlike the case in expand_expr, we know this is
8546 BLKmode and hence not an integer. */
8547 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8548 }
8549
8550 {
8551 enum machine_mode mode1;
8552 int bitsize;
8553 int bitpos;
8554 tree offset;
8555 int volatilep = 0;
8556 int alignment;
8557 int unsignedp;
8558 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8559 &mode1, &unsignedp, &volatilep,
8560 &alignment);
8561
8562 /* If we got back the original object, something is wrong. Perhaps
8563 we are evaluating an expression too early. In any event, don't
8564 infinitely recurse. */
8565 if (tem == exp)
8566 abort ();
8567
8568 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8569
8570 /* If this is a constant, put it into a register if it is a
8571 legitimate constant and OFFSET is 0 and memory if it isn't. */
8572 if (CONSTANT_P (op0))
8573 {
8574 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8575
8576 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8577 && offset == 0)
8578 op0 = force_reg (inner_mode, op0);
8579 else
8580 op0 = validize_mem (force_const_mem (inner_mode, op0));
8581 }
8582
8583 if (offset != 0)
8584 {
8585 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8586
8587 /* If this object is in a register, put it into memory.
8588 This case can't occur in C, but can in Ada if we have
8589 unchecked conversion of an expression from a scalar type to
8590 an array or record type. */
8591 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8592 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8593 {
8594 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8595
8596 mark_temp_addr_taken (memloc);
8597 emit_move_insn (memloc, op0);
8598 op0 = memloc;
8599 }
8600
8601 if (GET_CODE (op0) != MEM)
8602 abort ();
8603
8604 if (GET_MODE (offset_rtx) != ptr_mode)
8605 {
8606 #ifdef POINTERS_EXTEND_UNSIGNED
8607 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8608 #else
8609 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8610 #endif
8611 }
8612
8613 op0 = change_address (op0, VOIDmode,
8614 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8615 force_reg (ptr_mode,
8616 offset_rtx)));
8617 }
8618
8619 /* Don't forget about volatility even if this is a bitfield. */
8620 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8621 {
8622 op0 = copy_rtx (op0);
8623 MEM_VOLATILE_P (op0) = 1;
8624 }
8625
8626 /* Check the access. */
8627 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8628 {
8629 rtx to;
8630 int size;
8631
8632 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8633 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8634
8635 /* Check the access right of the pointer. */
8636 if (size > BITS_PER_UNIT)
8637 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8638 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8639 TYPE_MODE (sizetype),
8640 GEN_INT (MEMORY_USE_RO),
8641 TYPE_MODE (integer_type_node));
8642 }
8643
8644 /* Get a reference to just this component. */
8645 op0 = change_address (op0, mode1,
8646 plus_constant (XEXP (op0, 0),
8647 (bitpos / BITS_PER_UNIT)));
8648
8649 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8650
8651 /* Adjust the alignment in case the bit position is not
8652 a multiple of the alignment of the inner object. */
8653 while (bitpos % alignment != 0)
8654 alignment >>= 1;
8655
8656 if (GET_CODE (XEXP (op0, 0)) == REG)
8657 mark_reg_pointer (XEXP (op0, 0), alignment);
8658
8659 MEM_IN_STRUCT_P (op0) = 1;
8660 MEM_VOLATILE_P (op0) |= volatilep;
8661
8662 *palign = alignment;
8663 return op0;
8664 }
8665
8666 default:
8667 break;
8668
8669 }
8670
8671 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8672 }
8673 \f
8674 /* Return the tree node and offset if a given argument corresponds to
8675 a string constant. */
8676
8677 tree
8678 string_constant (arg, ptr_offset)
8679 tree arg;
8680 tree *ptr_offset;
8681 {
8682 STRIP_NOPS (arg);
8683
8684 if (TREE_CODE (arg) == ADDR_EXPR
8685 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8686 {
8687 *ptr_offset = integer_zero_node;
8688 return TREE_OPERAND (arg, 0);
8689 }
8690 else if (TREE_CODE (arg) == PLUS_EXPR)
8691 {
8692 tree arg0 = TREE_OPERAND (arg, 0);
8693 tree arg1 = TREE_OPERAND (arg, 1);
8694
8695 STRIP_NOPS (arg0);
8696 STRIP_NOPS (arg1);
8697
8698 if (TREE_CODE (arg0) == ADDR_EXPR
8699 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8700 {
8701 *ptr_offset = arg1;
8702 return TREE_OPERAND (arg0, 0);
8703 }
8704 else if (TREE_CODE (arg1) == ADDR_EXPR
8705 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8706 {
8707 *ptr_offset = arg0;
8708 return TREE_OPERAND (arg1, 0);
8709 }
8710 }
8711
8712 return 0;
8713 }
8714 \f
8715 /* Expand code for a post- or pre- increment or decrement
8716 and return the RTX for the result.
8717 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8718
8719 static rtx
8720 expand_increment (exp, post, ignore)
8721 register tree exp;
8722 int post, ignore;
8723 {
8724 register rtx op0, op1;
8725 register rtx temp, value;
8726 register tree incremented = TREE_OPERAND (exp, 0);
8727 optab this_optab = add_optab;
8728 int icode;
8729 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8730 int op0_is_copy = 0;
8731 int single_insn = 0;
8732 /* 1 means we can't store into OP0 directly,
8733 because it is a subreg narrower than a word,
8734 and we don't dare clobber the rest of the word. */
8735 int bad_subreg = 0;
8736
8737 /* Stabilize any component ref that might need to be
8738 evaluated more than once below. */
8739 if (!post
8740 || TREE_CODE (incremented) == BIT_FIELD_REF
8741 || (TREE_CODE (incremented) == COMPONENT_REF
8742 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8743 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8744 incremented = stabilize_reference (incremented);
8745 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8746 ones into save exprs so that they don't accidentally get evaluated
8747 more than once by the code below. */
8748 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8749 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8750 incremented = save_expr (incremented);
8751
8752 /* Compute the operands as RTX.
8753 Note whether OP0 is the actual lvalue or a copy of it:
8754 I believe it is a copy iff it is a register or subreg
8755 and insns were generated in computing it. */
8756
8757 temp = get_last_insn ();
8758 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8759
8760 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8761 in place but instead must do sign- or zero-extension during assignment,
8762 so we copy it into a new register and let the code below use it as
8763 a copy.
8764
8765 Note that we can safely modify this SUBREG since it is know not to be
8766 shared (it was made by the expand_expr call above). */
8767
8768 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8769 {
8770 if (post)
8771 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8772 else
8773 bad_subreg = 1;
8774 }
8775 else if (GET_CODE (op0) == SUBREG
8776 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8777 {
8778 /* We cannot increment this SUBREG in place. If we are
8779 post-incrementing, get a copy of the old value. Otherwise,
8780 just mark that we cannot increment in place. */
8781 if (post)
8782 op0 = copy_to_reg (op0);
8783 else
8784 bad_subreg = 1;
8785 }
8786
8787 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8788 && temp != get_last_insn ());
8789 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8790 EXPAND_MEMORY_USE_BAD);
8791
8792 /* Decide whether incrementing or decrementing. */
8793 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8794 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8795 this_optab = sub_optab;
8796
8797 /* Convert decrement by a constant into a negative increment. */
8798 if (this_optab == sub_optab
8799 && GET_CODE (op1) == CONST_INT)
8800 {
8801 op1 = GEN_INT (- INTVAL (op1));
8802 this_optab = add_optab;
8803 }
8804
8805 /* For a preincrement, see if we can do this with a single instruction. */
8806 if (!post)
8807 {
8808 icode = (int) this_optab->handlers[(int) mode].insn_code;
8809 if (icode != (int) CODE_FOR_nothing
8810 /* Make sure that OP0 is valid for operands 0 and 1
8811 of the insn we want to queue. */
8812 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8813 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8814 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8815 single_insn = 1;
8816 }
8817
8818 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8819 then we cannot just increment OP0. We must therefore contrive to
8820 increment the original value. Then, for postincrement, we can return
8821 OP0 since it is a copy of the old value. For preincrement, expand here
8822 unless we can do it with a single insn.
8823
8824 Likewise if storing directly into OP0 would clobber high bits
8825 we need to preserve (bad_subreg). */
8826 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8827 {
8828 /* This is the easiest way to increment the value wherever it is.
8829 Problems with multiple evaluation of INCREMENTED are prevented
8830 because either (1) it is a component_ref or preincrement,
8831 in which case it was stabilized above, or (2) it is an array_ref
8832 with constant index in an array in a register, which is
8833 safe to reevaluate. */
8834 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8835 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8836 ? MINUS_EXPR : PLUS_EXPR),
8837 TREE_TYPE (exp),
8838 incremented,
8839 TREE_OPERAND (exp, 1));
8840
8841 while (TREE_CODE (incremented) == NOP_EXPR
8842 || TREE_CODE (incremented) == CONVERT_EXPR)
8843 {
8844 newexp = convert (TREE_TYPE (incremented), newexp);
8845 incremented = TREE_OPERAND (incremented, 0);
8846 }
8847
8848 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8849 return post ? op0 : temp;
8850 }
8851
8852 if (post)
8853 {
8854 /* We have a true reference to the value in OP0.
8855 If there is an insn to add or subtract in this mode, queue it.
8856 Queueing the increment insn avoids the register shuffling
8857 that often results if we must increment now and first save
8858 the old value for subsequent use. */
8859
8860 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8861 op0 = stabilize (op0);
8862 #endif
8863
8864 icode = (int) this_optab->handlers[(int) mode].insn_code;
8865 if (icode != (int) CODE_FOR_nothing
8866 /* Make sure that OP0 is valid for operands 0 and 1
8867 of the insn we want to queue. */
8868 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8869 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8870 {
8871 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8872 op1 = force_reg (mode, op1);
8873
8874 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8875 }
8876 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8877 {
8878 rtx addr = (general_operand (XEXP (op0, 0), mode)
8879 ? force_reg (Pmode, XEXP (op0, 0))
8880 : copy_to_reg (XEXP (op0, 0)));
8881 rtx temp, result;
8882
8883 op0 = change_address (op0, VOIDmode, addr);
8884 temp = force_reg (GET_MODE (op0), op0);
8885 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8886 op1 = force_reg (mode, op1);
8887
8888 /* The increment queue is LIFO, thus we have to `queue'
8889 the instructions in reverse order. */
8890 enqueue_insn (op0, gen_move_insn (op0, temp));
8891 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8892 return result;
8893 }
8894 }
8895
8896 /* Preincrement, or we can't increment with one simple insn. */
8897 if (post)
8898 /* Save a copy of the value before inc or dec, to return it later. */
8899 temp = value = copy_to_reg (op0);
8900 else
8901 /* Arrange to return the incremented value. */
8902 /* Copy the rtx because expand_binop will protect from the queue,
8903 and the results of that would be invalid for us to return
8904 if our caller does emit_queue before using our result. */
8905 temp = copy_rtx (value = op0);
8906
8907 /* Increment however we can. */
8908 op1 = expand_binop (mode, this_optab, value, op1,
8909 current_function_check_memory_usage ? NULL_RTX : op0,
8910 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8911 /* Make sure the value is stored into OP0. */
8912 if (op1 != op0)
8913 emit_move_insn (op0, op1);
8914
8915 return temp;
8916 }
8917 \f
8918 /* Expand all function calls contained within EXP, innermost ones first.
8919 But don't look within expressions that have sequence points.
8920 For each CALL_EXPR, record the rtx for its value
8921 in the CALL_EXPR_RTL field. */
8922
8923 static void
8924 preexpand_calls (exp)
8925 tree exp;
8926 {
8927 register int nops, i;
8928 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8929
8930 if (! do_preexpand_calls)
8931 return;
8932
8933 /* Only expressions and references can contain calls. */
8934
8935 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8936 return;
8937
8938 switch (TREE_CODE (exp))
8939 {
8940 case CALL_EXPR:
8941 /* Do nothing if already expanded. */
8942 if (CALL_EXPR_RTL (exp) != 0
8943 /* Do nothing if the call returns a variable-sized object. */
8944 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8945 /* Do nothing to built-in functions. */
8946 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8947 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8948 == FUNCTION_DECL)
8949 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8950 return;
8951
8952 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8953 return;
8954
8955 case COMPOUND_EXPR:
8956 case COND_EXPR:
8957 case TRUTH_ANDIF_EXPR:
8958 case TRUTH_ORIF_EXPR:
8959 /* If we find one of these, then we can be sure
8960 the adjust will be done for it (since it makes jumps).
8961 Do it now, so that if this is inside an argument
8962 of a function, we don't get the stack adjustment
8963 after some other args have already been pushed. */
8964 do_pending_stack_adjust ();
8965 return;
8966
8967 case BLOCK:
8968 case RTL_EXPR:
8969 case WITH_CLEANUP_EXPR:
8970 case CLEANUP_POINT_EXPR:
8971 case TRY_CATCH_EXPR:
8972 return;
8973
8974 case SAVE_EXPR:
8975 if (SAVE_EXPR_RTL (exp) != 0)
8976 return;
8977
8978 default:
8979 break;
8980 }
8981
8982 nops = tree_code_length[(int) TREE_CODE (exp)];
8983 for (i = 0; i < nops; i++)
8984 if (TREE_OPERAND (exp, i) != 0)
8985 {
8986 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8987 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8988 It doesn't happen before the call is made. */
8989 ;
8990 else
8991 {
8992 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8993 if (type == 'e' || type == '<' || type == '1' || type == '2'
8994 || type == 'r')
8995 preexpand_calls (TREE_OPERAND (exp, i));
8996 }
8997 }
8998 }
8999 \f
9000 /* At the start of a function, record that we have no previously-pushed
9001 arguments waiting to be popped. */
9002
9003 void
9004 init_pending_stack_adjust ()
9005 {
9006 pending_stack_adjust = 0;
9007 }
9008
9009 /* When exiting from function, if safe, clear out any pending stack adjust
9010 so the adjustment won't get done.
9011
9012 Note, if the current function calls alloca, then it must have a
9013 frame pointer regardless of the value of flag_omit_frame_pointer. */
9014
9015 void
9016 clear_pending_stack_adjust ()
9017 {
9018 #ifdef EXIT_IGNORE_STACK
9019 if (optimize > 0
9020 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9021 && EXIT_IGNORE_STACK
9022 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9023 && ! flag_inline_functions)
9024 pending_stack_adjust = 0;
9025 #endif
9026 }
9027
9028 /* Pop any previously-pushed arguments that have not been popped yet. */
9029
9030 void
9031 do_pending_stack_adjust ()
9032 {
9033 if (inhibit_defer_pop == 0)
9034 {
9035 if (pending_stack_adjust != 0)
9036 adjust_stack (GEN_INT (pending_stack_adjust));
9037 pending_stack_adjust = 0;
9038 }
9039 }
9040 \f
9041 /* Expand conditional expressions. */
9042
9043 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9044 LABEL is an rtx of code CODE_LABEL, in this function and all the
9045 functions here. */
9046
9047 void
9048 jumpifnot (exp, label)
9049 tree exp;
9050 rtx label;
9051 {
9052 do_jump (exp, label, NULL_RTX);
9053 }
9054
9055 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9056
9057 void
9058 jumpif (exp, label)
9059 tree exp;
9060 rtx label;
9061 {
9062 do_jump (exp, NULL_RTX, label);
9063 }
9064
9065 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9066 the result is zero, or IF_TRUE_LABEL if the result is one.
9067 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9068 meaning fall through in that case.
9069
9070 do_jump always does any pending stack adjust except when it does not
9071 actually perform a jump. An example where there is no jump
9072 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9073
9074 This function is responsible for optimizing cases such as
9075 &&, || and comparison operators in EXP. */
9076
9077 void
9078 do_jump (exp, if_false_label, if_true_label)
9079 tree exp;
9080 rtx if_false_label, if_true_label;
9081 {
9082 register enum tree_code code = TREE_CODE (exp);
9083 /* Some cases need to create a label to jump to
9084 in order to properly fall through.
9085 These cases set DROP_THROUGH_LABEL nonzero. */
9086 rtx drop_through_label = 0;
9087 rtx temp;
9088 int i;
9089 tree type;
9090 enum machine_mode mode;
9091
9092 #ifdef MAX_INTEGER_COMPUTATION_MODE
9093 check_max_integer_computation_mode (exp);
9094 #endif
9095
9096 emit_queue ();
9097
9098 switch (code)
9099 {
9100 case ERROR_MARK:
9101 break;
9102
9103 case INTEGER_CST:
9104 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9105 if (temp)
9106 emit_jump (temp);
9107 break;
9108
9109 #if 0
9110 /* This is not true with #pragma weak */
9111 case ADDR_EXPR:
9112 /* The address of something can never be zero. */
9113 if (if_true_label)
9114 emit_jump (if_true_label);
9115 break;
9116 #endif
9117
9118 case NOP_EXPR:
9119 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9120 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9121 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9122 goto normal;
9123 case CONVERT_EXPR:
9124 /* If we are narrowing the operand, we have to do the compare in the
9125 narrower mode. */
9126 if ((TYPE_PRECISION (TREE_TYPE (exp))
9127 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9128 goto normal;
9129 case NON_LVALUE_EXPR:
9130 case REFERENCE_EXPR:
9131 case ABS_EXPR:
9132 case NEGATE_EXPR:
9133 case LROTATE_EXPR:
9134 case RROTATE_EXPR:
9135 /* These cannot change zero->non-zero or vice versa. */
9136 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9137 break;
9138
9139 case WITH_RECORD_EXPR:
9140 /* Put the object on the placeholder list, recurse through our first
9141 operand, and pop the list. */
9142 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9143 placeholder_list);
9144 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9145 placeholder_list = TREE_CHAIN (placeholder_list);
9146 break;
9147
9148 #if 0
9149 /* This is never less insns than evaluating the PLUS_EXPR followed by
9150 a test and can be longer if the test is eliminated. */
9151 case PLUS_EXPR:
9152 /* Reduce to minus. */
9153 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9154 TREE_OPERAND (exp, 0),
9155 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9156 TREE_OPERAND (exp, 1))));
9157 /* Process as MINUS. */
9158 #endif
9159
9160 case MINUS_EXPR:
9161 /* Non-zero iff operands of minus differ. */
9162 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9163 TREE_OPERAND (exp, 0),
9164 TREE_OPERAND (exp, 1)),
9165 NE, NE, if_false_label, if_true_label);
9166 break;
9167
9168 case BIT_AND_EXPR:
9169 /* If we are AND'ing with a small constant, do this comparison in the
9170 smallest type that fits. If the machine doesn't have comparisons
9171 that small, it will be converted back to the wider comparison.
9172 This helps if we are testing the sign bit of a narrower object.
9173 combine can't do this for us because it can't know whether a
9174 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9175
9176 if (! SLOW_BYTE_ACCESS
9177 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9178 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9179 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9180 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9181 && (type = type_for_mode (mode, 1)) != 0
9182 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9183 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9184 != CODE_FOR_nothing))
9185 {
9186 do_jump (convert (type, exp), if_false_label, if_true_label);
9187 break;
9188 }
9189 goto normal;
9190
9191 case TRUTH_NOT_EXPR:
9192 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9193 break;
9194
9195 case TRUTH_ANDIF_EXPR:
9196 if (if_false_label == 0)
9197 if_false_label = drop_through_label = gen_label_rtx ();
9198 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9199 start_cleanup_deferral ();
9200 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9201 end_cleanup_deferral ();
9202 break;
9203
9204 case TRUTH_ORIF_EXPR:
9205 if (if_true_label == 0)
9206 if_true_label = drop_through_label = gen_label_rtx ();
9207 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9208 start_cleanup_deferral ();
9209 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9210 end_cleanup_deferral ();
9211 break;
9212
9213 case COMPOUND_EXPR:
9214 push_temp_slots ();
9215 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9216 preserve_temp_slots (NULL_RTX);
9217 free_temp_slots ();
9218 pop_temp_slots ();
9219 emit_queue ();
9220 do_pending_stack_adjust ();
9221 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9222 break;
9223
9224 case COMPONENT_REF:
9225 case BIT_FIELD_REF:
9226 case ARRAY_REF:
9227 {
9228 int bitsize, bitpos, unsignedp;
9229 enum machine_mode mode;
9230 tree type;
9231 tree offset;
9232 int volatilep = 0;
9233 int alignment;
9234
9235 /* Get description of this reference. We don't actually care
9236 about the underlying object here. */
9237 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9238 &mode, &unsignedp, &volatilep,
9239 &alignment);
9240
9241 type = type_for_size (bitsize, unsignedp);
9242 if (! SLOW_BYTE_ACCESS
9243 && type != 0 && bitsize >= 0
9244 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9245 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9246 != CODE_FOR_nothing))
9247 {
9248 do_jump (convert (type, exp), if_false_label, if_true_label);
9249 break;
9250 }
9251 goto normal;
9252 }
9253
9254 case COND_EXPR:
9255 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9256 if (integer_onep (TREE_OPERAND (exp, 1))
9257 && integer_zerop (TREE_OPERAND (exp, 2)))
9258 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9259
9260 else if (integer_zerop (TREE_OPERAND (exp, 1))
9261 && integer_onep (TREE_OPERAND (exp, 2)))
9262 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9263
9264 else
9265 {
9266 register rtx label1 = gen_label_rtx ();
9267 drop_through_label = gen_label_rtx ();
9268
9269 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9270
9271 start_cleanup_deferral ();
9272 /* Now the THEN-expression. */
9273 do_jump (TREE_OPERAND (exp, 1),
9274 if_false_label ? if_false_label : drop_through_label,
9275 if_true_label ? if_true_label : drop_through_label);
9276 /* In case the do_jump just above never jumps. */
9277 do_pending_stack_adjust ();
9278 emit_label (label1);
9279
9280 /* Now the ELSE-expression. */
9281 do_jump (TREE_OPERAND (exp, 2),
9282 if_false_label ? if_false_label : drop_through_label,
9283 if_true_label ? if_true_label : drop_through_label);
9284 end_cleanup_deferral ();
9285 }
9286 break;
9287
9288 case EQ_EXPR:
9289 {
9290 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9291
9292 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9293 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9294 {
9295 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9296 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9297 do_jump
9298 (fold
9299 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9300 fold (build (EQ_EXPR, TREE_TYPE (exp),
9301 fold (build1 (REALPART_EXPR,
9302 TREE_TYPE (inner_type),
9303 exp0)),
9304 fold (build1 (REALPART_EXPR,
9305 TREE_TYPE (inner_type),
9306 exp1)))),
9307 fold (build (EQ_EXPR, TREE_TYPE (exp),
9308 fold (build1 (IMAGPART_EXPR,
9309 TREE_TYPE (inner_type),
9310 exp0)),
9311 fold (build1 (IMAGPART_EXPR,
9312 TREE_TYPE (inner_type),
9313 exp1)))))),
9314 if_false_label, if_true_label);
9315 }
9316
9317 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9318 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9319
9320 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9321 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9322 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9323 else
9324 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9325 break;
9326 }
9327
9328 case NE_EXPR:
9329 {
9330 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9331
9332 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9333 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9334 {
9335 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9336 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9337 do_jump
9338 (fold
9339 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9340 fold (build (NE_EXPR, TREE_TYPE (exp),
9341 fold (build1 (REALPART_EXPR,
9342 TREE_TYPE (inner_type),
9343 exp0)),
9344 fold (build1 (REALPART_EXPR,
9345 TREE_TYPE (inner_type),
9346 exp1)))),
9347 fold (build (NE_EXPR, TREE_TYPE (exp),
9348 fold (build1 (IMAGPART_EXPR,
9349 TREE_TYPE (inner_type),
9350 exp0)),
9351 fold (build1 (IMAGPART_EXPR,
9352 TREE_TYPE (inner_type),
9353 exp1)))))),
9354 if_false_label, if_true_label);
9355 }
9356
9357 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9358 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9359
9360 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9361 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9362 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9363 else
9364 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9365 break;
9366 }
9367
9368 case LT_EXPR:
9369 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9370 if (GET_MODE_CLASS (mode) == MODE_INT
9371 && ! can_compare_p (mode, ccp_jump))
9372 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9373 else
9374 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9375 break;
9376
9377 case LE_EXPR:
9378 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9379 if (GET_MODE_CLASS (mode) == MODE_INT
9380 && ! can_compare_p (mode, ccp_jump))
9381 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9382 else
9383 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9384 break;
9385
9386 case GT_EXPR:
9387 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9388 if (GET_MODE_CLASS (mode) == MODE_INT
9389 && ! can_compare_p (mode, ccp_jump))
9390 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9391 else
9392 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9393 break;
9394
9395 case GE_EXPR:
9396 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9397 if (GET_MODE_CLASS (mode) == MODE_INT
9398 && ! can_compare_p (mode, ccp_jump))
9399 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9400 else
9401 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9402 break;
9403
9404 default:
9405 normal:
9406 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9407 #if 0
9408 /* This is not needed any more and causes poor code since it causes
9409 comparisons and tests from non-SI objects to have different code
9410 sequences. */
9411 /* Copy to register to avoid generating bad insns by cse
9412 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9413 if (!cse_not_expected && GET_CODE (temp) == MEM)
9414 temp = copy_to_reg (temp);
9415 #endif
9416 do_pending_stack_adjust ();
9417 /* Do any postincrements in the expression that was tested. */
9418 emit_queue ();
9419
9420 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9421 {
9422 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9423 if (target)
9424 emit_jump (target);
9425 }
9426 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9427 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9428 /* Note swapping the labels gives us not-equal. */
9429 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9430 else if (GET_MODE (temp) != VOIDmode)
9431 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9432 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9433 GET_MODE (temp), NULL_RTX, 0,
9434 if_false_label, if_true_label);
9435 else
9436 abort ();
9437 }
9438
9439 if (drop_through_label)
9440 {
9441 /* If do_jump produces code that might be jumped around,
9442 do any stack adjusts from that code, before the place
9443 where control merges in. */
9444 do_pending_stack_adjust ();
9445 emit_label (drop_through_label);
9446 }
9447 }
9448 \f
9449 /* Given a comparison expression EXP for values too wide to be compared
9450 with one insn, test the comparison and jump to the appropriate label.
9451 The code of EXP is ignored; we always test GT if SWAP is 0,
9452 and LT if SWAP is 1. */
9453
9454 static void
9455 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9456 tree exp;
9457 int swap;
9458 rtx if_false_label, if_true_label;
9459 {
9460 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9461 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9462 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9463 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9464
9465 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9466 }
9467
9468 /* Compare OP0 with OP1, word at a time, in mode MODE.
9469 UNSIGNEDP says to do unsigned comparison.
9470 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9471
9472 void
9473 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9474 enum machine_mode mode;
9475 int unsignedp;
9476 rtx op0, op1;
9477 rtx if_false_label, if_true_label;
9478 {
9479 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9480 rtx drop_through_label = 0;
9481 int i;
9482
9483 if (! if_true_label || ! if_false_label)
9484 drop_through_label = gen_label_rtx ();
9485 if (! if_true_label)
9486 if_true_label = drop_through_label;
9487 if (! if_false_label)
9488 if_false_label = drop_through_label;
9489
9490 /* Compare a word at a time, high order first. */
9491 for (i = 0; i < nwords; i++)
9492 {
9493 rtx op0_word, op1_word;
9494
9495 if (WORDS_BIG_ENDIAN)
9496 {
9497 op0_word = operand_subword_force (op0, i, mode);
9498 op1_word = operand_subword_force (op1, i, mode);
9499 }
9500 else
9501 {
9502 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9503 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9504 }
9505
9506 /* All but high-order word must be compared as unsigned. */
9507 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9508 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9509 NULL_RTX, if_true_label);
9510
9511 /* Consider lower words only if these are equal. */
9512 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9513 NULL_RTX, 0, NULL_RTX, if_false_label);
9514 }
9515
9516 if (if_false_label)
9517 emit_jump (if_false_label);
9518 if (drop_through_label)
9519 emit_label (drop_through_label);
9520 }
9521
9522 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9523 with one insn, test the comparison and jump to the appropriate label. */
9524
9525 static void
9526 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9527 tree exp;
9528 rtx if_false_label, if_true_label;
9529 {
9530 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9531 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9532 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9533 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9534 int i;
9535 rtx drop_through_label = 0;
9536
9537 if (! if_false_label)
9538 drop_through_label = if_false_label = gen_label_rtx ();
9539
9540 for (i = 0; i < nwords; i++)
9541 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9542 operand_subword_force (op1, i, mode),
9543 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9544 word_mode, NULL_RTX, 0, if_false_label,
9545 NULL_RTX);
9546
9547 if (if_true_label)
9548 emit_jump (if_true_label);
9549 if (drop_through_label)
9550 emit_label (drop_through_label);
9551 }
9552 \f
9553 /* Jump according to whether OP0 is 0.
9554 We assume that OP0 has an integer mode that is too wide
9555 for the available compare insns. */
9556
9557 void
9558 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9559 rtx op0;
9560 rtx if_false_label, if_true_label;
9561 {
9562 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9563 rtx part;
9564 int i;
9565 rtx drop_through_label = 0;
9566
9567 /* The fastest way of doing this comparison on almost any machine is to
9568 "or" all the words and compare the result. If all have to be loaded
9569 from memory and this is a very wide item, it's possible this may
9570 be slower, but that's highly unlikely. */
9571
9572 part = gen_reg_rtx (word_mode);
9573 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9574 for (i = 1; i < nwords && part != 0; i++)
9575 part = expand_binop (word_mode, ior_optab, part,
9576 operand_subword_force (op0, i, GET_MODE (op0)),
9577 part, 1, OPTAB_WIDEN);
9578
9579 if (part != 0)
9580 {
9581 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9582 NULL_RTX, 0, if_false_label, if_true_label);
9583
9584 return;
9585 }
9586
9587 /* If we couldn't do the "or" simply, do this with a series of compares. */
9588 if (! if_false_label)
9589 drop_through_label = if_false_label = gen_label_rtx ();
9590
9591 for (i = 0; i < nwords; i++)
9592 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9593 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9594 if_false_label, NULL_RTX);
9595
9596 if (if_true_label)
9597 emit_jump (if_true_label);
9598
9599 if (drop_through_label)
9600 emit_label (drop_through_label);
9601 }
9602 \f
9603 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9604 (including code to compute the values to be compared)
9605 and set (CC0) according to the result.
9606 The decision as to signed or unsigned comparison must be made by the caller.
9607
9608 We force a stack adjustment unless there are currently
9609 things pushed on the stack that aren't yet used.
9610
9611 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9612 compared.
9613
9614 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9615 size of MODE should be used. */
9616
9617 rtx
9618 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9619 register rtx op0, op1;
9620 enum rtx_code code;
9621 int unsignedp;
9622 enum machine_mode mode;
9623 rtx size;
9624 int align;
9625 {
9626 rtx tem;
9627
9628 /* If one operand is constant, make it the second one. Only do this
9629 if the other operand is not constant as well. */
9630
9631 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9632 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9633 {
9634 tem = op0;
9635 op0 = op1;
9636 op1 = tem;
9637 code = swap_condition (code);
9638 }
9639
9640 if (flag_force_mem)
9641 {
9642 op0 = force_not_mem (op0);
9643 op1 = force_not_mem (op1);
9644 }
9645
9646 do_pending_stack_adjust ();
9647
9648 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9649 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9650 return tem;
9651
9652 #if 0
9653 /* There's no need to do this now that combine.c can eliminate lots of
9654 sign extensions. This can be less efficient in certain cases on other
9655 machines. */
9656
9657 /* If this is a signed equality comparison, we can do it as an
9658 unsigned comparison since zero-extension is cheaper than sign
9659 extension and comparisons with zero are done as unsigned. This is
9660 the case even on machines that can do fast sign extension, since
9661 zero-extension is easier to combine with other operations than
9662 sign-extension is. If we are comparing against a constant, we must
9663 convert it to what it would look like unsigned. */
9664 if ((code == EQ || code == NE) && ! unsignedp
9665 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9666 {
9667 if (GET_CODE (op1) == CONST_INT
9668 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9669 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9670 unsignedp = 1;
9671 }
9672 #endif
9673
9674 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9675
9676 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9677 }
9678
9679 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9680 The decision as to signed or unsigned comparison must be made by the caller.
9681
9682 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9683 compared.
9684
9685 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9686 size of MODE should be used. */
9687
9688 void
9689 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9690 if_false_label, if_true_label)
9691 register rtx op0, op1;
9692 enum rtx_code code;
9693 int unsignedp;
9694 enum machine_mode mode;
9695 rtx size;
9696 int align;
9697 rtx if_false_label, if_true_label;
9698 {
9699 rtx tem;
9700 int dummy_true_label = 0;
9701
9702 /* Reverse the comparison if that is safe and we want to jump if it is
9703 false. */
9704 if (! if_true_label && ! FLOAT_MODE_P (mode))
9705 {
9706 if_true_label = if_false_label;
9707 if_false_label = 0;
9708 code = reverse_condition (code);
9709 }
9710
9711 /* If one operand is constant, make it the second one. Only do this
9712 if the other operand is not constant as well. */
9713
9714 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9715 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9716 {
9717 tem = op0;
9718 op0 = op1;
9719 op1 = tem;
9720 code = swap_condition (code);
9721 }
9722
9723 if (flag_force_mem)
9724 {
9725 op0 = force_not_mem (op0);
9726 op1 = force_not_mem (op1);
9727 }
9728
9729 do_pending_stack_adjust ();
9730
9731 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9732 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9733 {
9734 if (tem == const_true_rtx)
9735 {
9736 if (if_true_label)
9737 emit_jump (if_true_label);
9738 }
9739 else
9740 {
9741 if (if_false_label)
9742 emit_jump (if_false_label);
9743 }
9744 return;
9745 }
9746
9747 #if 0
9748 /* There's no need to do this now that combine.c can eliminate lots of
9749 sign extensions. This can be less efficient in certain cases on other
9750 machines. */
9751
9752 /* If this is a signed equality comparison, we can do it as an
9753 unsigned comparison since zero-extension is cheaper than sign
9754 extension and comparisons with zero are done as unsigned. This is
9755 the case even on machines that can do fast sign extension, since
9756 zero-extension is easier to combine with other operations than
9757 sign-extension is. If we are comparing against a constant, we must
9758 convert it to what it would look like unsigned. */
9759 if ((code == EQ || code == NE) && ! unsignedp
9760 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9761 {
9762 if (GET_CODE (op1) == CONST_INT
9763 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9764 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9765 unsignedp = 1;
9766 }
9767 #endif
9768
9769 if (! if_true_label)
9770 {
9771 dummy_true_label = 1;
9772 if_true_label = gen_label_rtx ();
9773 }
9774
9775 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9776 if_true_label);
9777
9778 if (if_false_label)
9779 emit_jump (if_false_label);
9780 if (dummy_true_label)
9781 emit_label (if_true_label);
9782 }
9783
9784 /* Generate code for a comparison expression EXP (including code to compute
9785 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9786 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9787 generated code will drop through.
9788 SIGNED_CODE should be the rtx operation for this comparison for
9789 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9790
9791 We force a stack adjustment unless there are currently
9792 things pushed on the stack that aren't yet used. */
9793
9794 static void
9795 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9796 if_true_label)
9797 register tree exp;
9798 enum rtx_code signed_code, unsigned_code;
9799 rtx if_false_label, if_true_label;
9800 {
9801 int align0, align1;
9802 register rtx op0, op1;
9803 register tree type;
9804 register enum machine_mode mode;
9805 int unsignedp;
9806 enum rtx_code code;
9807
9808 /* Don't crash if the comparison was erroneous. */
9809 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9810 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9811 return;
9812
9813 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9814 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9815 mode = TYPE_MODE (type);
9816 unsignedp = TREE_UNSIGNED (type);
9817 code = unsignedp ? unsigned_code : signed_code;
9818
9819 #ifdef HAVE_canonicalize_funcptr_for_compare
9820 /* If function pointers need to be "canonicalized" before they can
9821 be reliably compared, then canonicalize them. */
9822 if (HAVE_canonicalize_funcptr_for_compare
9823 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9824 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9825 == FUNCTION_TYPE))
9826 {
9827 rtx new_op0 = gen_reg_rtx (mode);
9828
9829 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9830 op0 = new_op0;
9831 }
9832
9833 if (HAVE_canonicalize_funcptr_for_compare
9834 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9835 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9836 == FUNCTION_TYPE))
9837 {
9838 rtx new_op1 = gen_reg_rtx (mode);
9839
9840 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9841 op1 = new_op1;
9842 }
9843 #endif
9844
9845 /* Do any postincrements in the expression that was tested. */
9846 emit_queue ();
9847
9848 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9849 ((mode == BLKmode)
9850 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9851 MIN (align0, align1) / BITS_PER_UNIT,
9852 if_false_label, if_true_label);
9853 }
9854 \f
9855 /* Generate code to calculate EXP using a store-flag instruction
9856 and return an rtx for the result. EXP is either a comparison
9857 or a TRUTH_NOT_EXPR whose operand is a comparison.
9858
9859 If TARGET is nonzero, store the result there if convenient.
9860
9861 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9862 cheap.
9863
9864 Return zero if there is no suitable set-flag instruction
9865 available on this machine.
9866
9867 Once expand_expr has been called on the arguments of the comparison,
9868 we are committed to doing the store flag, since it is not safe to
9869 re-evaluate the expression. We emit the store-flag insn by calling
9870 emit_store_flag, but only expand the arguments if we have a reason
9871 to believe that emit_store_flag will be successful. If we think that
9872 it will, but it isn't, we have to simulate the store-flag with a
9873 set/jump/set sequence. */
9874
9875 static rtx
9876 do_store_flag (exp, target, mode, only_cheap)
9877 tree exp;
9878 rtx target;
9879 enum machine_mode mode;
9880 int only_cheap;
9881 {
9882 enum rtx_code code;
9883 tree arg0, arg1, type;
9884 tree tem;
9885 enum machine_mode operand_mode;
9886 int invert = 0;
9887 int unsignedp;
9888 rtx op0, op1;
9889 enum insn_code icode;
9890 rtx subtarget = target;
9891 rtx result, label;
9892
9893 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9894 result at the end. We can't simply invert the test since it would
9895 have already been inverted if it were valid. This case occurs for
9896 some floating-point comparisons. */
9897
9898 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9899 invert = 1, exp = TREE_OPERAND (exp, 0);
9900
9901 arg0 = TREE_OPERAND (exp, 0);
9902 arg1 = TREE_OPERAND (exp, 1);
9903 type = TREE_TYPE (arg0);
9904 operand_mode = TYPE_MODE (type);
9905 unsignedp = TREE_UNSIGNED (type);
9906
9907 /* We won't bother with BLKmode store-flag operations because it would mean
9908 passing a lot of information to emit_store_flag. */
9909 if (operand_mode == BLKmode)
9910 return 0;
9911
9912 /* We won't bother with store-flag operations involving function pointers
9913 when function pointers must be canonicalized before comparisons. */
9914 #ifdef HAVE_canonicalize_funcptr_for_compare
9915 if (HAVE_canonicalize_funcptr_for_compare
9916 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9917 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9918 == FUNCTION_TYPE))
9919 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9921 == FUNCTION_TYPE))))
9922 return 0;
9923 #endif
9924
9925 STRIP_NOPS (arg0);
9926 STRIP_NOPS (arg1);
9927
9928 /* Get the rtx comparison code to use. We know that EXP is a comparison
9929 operation of some type. Some comparisons against 1 and -1 can be
9930 converted to comparisons with zero. Do so here so that the tests
9931 below will be aware that we have a comparison with zero. These
9932 tests will not catch constants in the first operand, but constants
9933 are rarely passed as the first operand. */
9934
9935 switch (TREE_CODE (exp))
9936 {
9937 case EQ_EXPR:
9938 code = EQ;
9939 break;
9940 case NE_EXPR:
9941 code = NE;
9942 break;
9943 case LT_EXPR:
9944 if (integer_onep (arg1))
9945 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9946 else
9947 code = unsignedp ? LTU : LT;
9948 break;
9949 case LE_EXPR:
9950 if (! unsignedp && integer_all_onesp (arg1))
9951 arg1 = integer_zero_node, code = LT;
9952 else
9953 code = unsignedp ? LEU : LE;
9954 break;
9955 case GT_EXPR:
9956 if (! unsignedp && integer_all_onesp (arg1))
9957 arg1 = integer_zero_node, code = GE;
9958 else
9959 code = unsignedp ? GTU : GT;
9960 break;
9961 case GE_EXPR:
9962 if (integer_onep (arg1))
9963 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9964 else
9965 code = unsignedp ? GEU : GE;
9966 break;
9967 default:
9968 abort ();
9969 }
9970
9971 /* Put a constant second. */
9972 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9973 {
9974 tem = arg0; arg0 = arg1; arg1 = tem;
9975 code = swap_condition (code);
9976 }
9977
9978 /* If this is an equality or inequality test of a single bit, we can
9979 do this by shifting the bit being tested to the low-order bit and
9980 masking the result with the constant 1. If the condition was EQ,
9981 we xor it with 1. This does not require an scc insn and is faster
9982 than an scc insn even if we have it. */
9983
9984 if ((code == NE || code == EQ)
9985 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9986 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9987 {
9988 tree inner = TREE_OPERAND (arg0, 0);
9989 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9990 int ops_unsignedp;
9991
9992 /* If INNER is a right shift of a constant and it plus BITNUM does
9993 not overflow, adjust BITNUM and INNER. */
9994
9995 if (TREE_CODE (inner) == RSHIFT_EXPR
9996 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9997 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9998 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9999 < TYPE_PRECISION (type)))
10000 {
10001 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10002 inner = TREE_OPERAND (inner, 0);
10003 }
10004
10005 /* If we are going to be able to omit the AND below, we must do our
10006 operations as unsigned. If we must use the AND, we have a choice.
10007 Normally unsigned is faster, but for some machines signed is. */
10008 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10009 #ifdef LOAD_EXTEND_OP
10010 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10011 #else
10012 : 1
10013 #endif
10014 );
10015
10016 if (subtarget == 0 || GET_CODE (subtarget) != REG
10017 || GET_MODE (subtarget) != operand_mode
10018 || ! safe_from_p (subtarget, inner, 1))
10019 subtarget = 0;
10020
10021 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10022
10023 if (bitnum != 0)
10024 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10025 size_int (bitnum), subtarget, ops_unsignedp);
10026
10027 if (GET_MODE (op0) != mode)
10028 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10029
10030 if ((code == EQ && ! invert) || (code == NE && invert))
10031 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10032 ops_unsignedp, OPTAB_LIB_WIDEN);
10033
10034 /* Put the AND last so it can combine with more things. */
10035 if (bitnum != TYPE_PRECISION (type) - 1)
10036 op0 = expand_and (op0, const1_rtx, subtarget);
10037
10038 return op0;
10039 }
10040
10041 /* Now see if we are likely to be able to do this. Return if not. */
10042 if (! can_compare_p (operand_mode, ccp_store_flag))
10043 return 0;
10044 icode = setcc_gen_code[(int) code];
10045 if (icode == CODE_FOR_nothing
10046 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10047 {
10048 /* We can only do this if it is one of the special cases that
10049 can be handled without an scc insn. */
10050 if ((code == LT && integer_zerop (arg1))
10051 || (! only_cheap && code == GE && integer_zerop (arg1)))
10052 ;
10053 else if (BRANCH_COST >= 0
10054 && ! only_cheap && (code == NE || code == EQ)
10055 && TREE_CODE (type) != REAL_TYPE
10056 && ((abs_optab->handlers[(int) operand_mode].insn_code
10057 != CODE_FOR_nothing)
10058 || (ffs_optab->handlers[(int) operand_mode].insn_code
10059 != CODE_FOR_nothing)))
10060 ;
10061 else
10062 return 0;
10063 }
10064
10065 preexpand_calls (exp);
10066 if (subtarget == 0 || GET_CODE (subtarget) != REG
10067 || GET_MODE (subtarget) != operand_mode
10068 || ! safe_from_p (subtarget, arg1, 1))
10069 subtarget = 0;
10070
10071 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10072 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10073
10074 if (target == 0)
10075 target = gen_reg_rtx (mode);
10076
10077 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10078 because, if the emit_store_flag does anything it will succeed and
10079 OP0 and OP1 will not be used subsequently. */
10080
10081 result = emit_store_flag (target, code,
10082 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10083 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10084 operand_mode, unsignedp, 1);
10085
10086 if (result)
10087 {
10088 if (invert)
10089 result = expand_binop (mode, xor_optab, result, const1_rtx,
10090 result, 0, OPTAB_LIB_WIDEN);
10091 return result;
10092 }
10093
10094 /* If this failed, we have to do this with set/compare/jump/set code. */
10095 if (GET_CODE (target) != REG
10096 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10097 target = gen_reg_rtx (GET_MODE (target));
10098
10099 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10100 result = compare_from_rtx (op0, op1, code, unsignedp,
10101 operand_mode, NULL_RTX, 0);
10102 if (GET_CODE (result) == CONST_INT)
10103 return (((result == const0_rtx && ! invert)
10104 || (result != const0_rtx && invert))
10105 ? const0_rtx : const1_rtx);
10106
10107 label = gen_label_rtx ();
10108 if (bcc_gen_fctn[(int) code] == 0)
10109 abort ();
10110
10111 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10112 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10113 emit_label (label);
10114
10115 return target;
10116 }
10117 \f
10118 /* Generate a tablejump instruction (used for switch statements). */
10119
10120 #ifdef HAVE_tablejump
10121
10122 /* INDEX is the value being switched on, with the lowest value
10123 in the table already subtracted.
10124 MODE is its expected mode (needed if INDEX is constant).
10125 RANGE is the length of the jump table.
10126 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10127
10128 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10129 index value is out of range. */
10130
10131 void
10132 do_tablejump (index, mode, range, table_label, default_label)
10133 rtx index, range, table_label, default_label;
10134 enum machine_mode mode;
10135 {
10136 register rtx temp, vector;
10137
10138 /* Do an unsigned comparison (in the proper mode) between the index
10139 expression and the value which represents the length of the range.
10140 Since we just finished subtracting the lower bound of the range
10141 from the index expression, this comparison allows us to simultaneously
10142 check that the original index expression value is both greater than
10143 or equal to the minimum value of the range and less than or equal to
10144 the maximum value of the range. */
10145
10146 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10147 0, default_label);
10148
10149 /* If index is in range, it must fit in Pmode.
10150 Convert to Pmode so we can index with it. */
10151 if (mode != Pmode)
10152 index = convert_to_mode (Pmode, index, 1);
10153
10154 /* Don't let a MEM slip thru, because then INDEX that comes
10155 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10156 and break_out_memory_refs will go to work on it and mess it up. */
10157 #ifdef PIC_CASE_VECTOR_ADDRESS
10158 if (flag_pic && GET_CODE (index) != REG)
10159 index = copy_to_mode_reg (Pmode, index);
10160 #endif
10161
10162 /* If flag_force_addr were to affect this address
10163 it could interfere with the tricky assumptions made
10164 about addresses that contain label-refs,
10165 which may be valid only very near the tablejump itself. */
10166 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10167 GET_MODE_SIZE, because this indicates how large insns are. The other
10168 uses should all be Pmode, because they are addresses. This code
10169 could fail if addresses and insns are not the same size. */
10170 index = gen_rtx_PLUS (Pmode,
10171 gen_rtx_MULT (Pmode, index,
10172 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10173 gen_rtx_LABEL_REF (Pmode, table_label));
10174 #ifdef PIC_CASE_VECTOR_ADDRESS
10175 if (flag_pic)
10176 index = PIC_CASE_VECTOR_ADDRESS (index);
10177 else
10178 #endif
10179 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10180 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10181 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10182 RTX_UNCHANGING_P (vector) = 1;
10183 convert_move (temp, vector, 0);
10184
10185 emit_jump_insn (gen_tablejump (temp, table_label));
10186
10187 /* If we are generating PIC code or if the table is PC-relative, the
10188 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10189 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10190 emit_barrier ();
10191 }
10192
10193 #endif /* HAVE_tablejump */