system.h (CEIL): Define.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Don't check memory usage, since code is being emitted to check a memory
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
89 static int in_check_memory_usage;
90
91 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
92 static tree placeholder_list = 0;
93
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces
97 {
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 int to_struct;
103 int to_readonly;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 int from_struct;
109 int from_readonly;
110 int len;
111 int offset;
112 int reverse;
113 };
114
115 /* This structure is used by clear_by_pieces to describe the clear to
116 be performed. */
117
118 struct clear_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 int len;
126 int offset;
127 int reverse;
128 };
129
130 extern struct obstack permanent_obstack;
131
132 static rtx get_push_address PROTO ((int));
133
134 static rtx enqueue_insn PROTO((rtx, rtx));
135 static int move_by_pieces_ninsns PROTO((unsigned int, int));
136 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *));
138 static void clear_by_pieces PROTO((rtx, int, int));
139 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
140 enum machine_mode,
141 struct clear_by_pieces *));
142 static int is_zeros_p PROTO((tree));
143 static int mostly_zeros_p PROTO((tree));
144 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
145 tree, tree, int, int));
146 static void store_constructor PROTO((tree, rtx, int, int, int));
147 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
148 enum machine_mode, int, int,
149 int, int));
150 static enum memory_use_mode
151 get_memory_usage_from_modifier PROTO((enum expand_modifier));
152 static tree save_noncopied_parts PROTO((tree, tree));
153 static tree init_noncopied_parts PROTO((tree, tree));
154 static int safe_from_p PROTO((rtx, tree, int));
155 static int fixed_type_p PROTO((tree));
156 static rtx var_rtx PROTO((tree));
157 static int readonly_fields_p PROTO((tree));
158 static rtx expand_expr_unaligned PROTO((tree, int *));
159 static rtx expand_increment PROTO((tree, int, int));
160 static void preexpand_calls PROTO((tree));
161 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
162 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
163 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
164 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
165
166 /* Record for each mode whether we can move a register directly to or
167 from an object of that mode in memory. If we can't, we won't try
168 to use that mode directly when accessing a field of that mode. */
169
170 static char direct_load[NUM_MACHINE_MODES];
171 static char direct_store[NUM_MACHINE_MODES];
172
173 /* If a memory-to-memory move would take MOVE_RATIO or more simple
174 move-instruction sequences, we will do a movstr or libcall instead. */
175
176 #ifndef MOVE_RATIO
177 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
178 #define MOVE_RATIO 2
179 #else
180 /* If we are optimizing for space (-Os), cut down the default move ratio */
181 #define MOVE_RATIO (optimize_size ? 3 : 15)
182 #endif
183 #endif
184
185 /* This macro is used to determine whether move_by_pieces should be called
186 to perform a structure copy. */
187 #ifndef MOVE_BY_PIECES_P
188 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
189 (SIZE, ALIGN) < MOVE_RATIO)
190 #endif
191
192 /* This array records the insn_code of insns to perform block moves. */
193 enum insn_code movstr_optab[NUM_MACHINE_MODES];
194
195 /* This array records the insn_code of insns to perform block clears. */
196 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
197
198 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
199
200 #ifndef SLOW_UNALIGNED_ACCESS
201 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
202 #endif
203 \f
204 /* This is run once per compilation to set up which modes can be used
205 directly in memory and to initialize the block move optab. */
206
207 void
208 init_expr_once ()
209 {
210 rtx insn, pat;
211 enum machine_mode mode;
212 int num_clobbers;
213 rtx mem, mem1;
214 char *free_point;
215
216 start_sequence ();
217
218 /* Since we are on the permanent obstack, we must be sure we save this
219 spot AFTER we call start_sequence, since it will reuse the rtl it
220 makes. */
221 free_point = (char *) oballoc (0);
222
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
228
229 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
230 pat = PATTERN (insn);
231
232 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
233 mode = (enum machine_mode) ((int) mode + 1))
234 {
235 int regno;
236 rtx reg;
237
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
240 PUT_MODE (mem1, mode);
241
242 /* See if there is some register that can be used in this mode and
243 directly loaded or stored from memory. */
244
245 if (mode != VOIDmode && mode != BLKmode)
246 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
247 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
248 regno++)
249 {
250 if (! HARD_REGNO_MODE_OK (regno, mode))
251 continue;
252
253 reg = gen_rtx_REG (mode, regno);
254
255 SET_SRC (pat) = mem;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
259
260 SET_SRC (pat) = mem1;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem1;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
274 }
275 }
276
277 end_sequence ();
278 obfree (free_point);
279 }
280
281 /* This is run at the start of compiling a function. */
282
283 void
284 init_expr ()
285 {
286 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
287
288 pending_chain = 0;
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
291 saveregs_value = 0;
292 apply_args_value = 0;
293 forced_labels = 0;
294 }
295
296 void
297 mark_expr_status (p)
298 struct expr_status *p;
299 {
300 if (p == NULL)
301 return;
302
303 ggc_mark_rtx (p->x_saveregs_value);
304 ggc_mark_rtx (p->x_apply_args_value);
305 ggc_mark_rtx (p->x_forced_labels);
306 }
307
308 void
309 free_expr_status (f)
310 struct function *f;
311 {
312 free (f->expr);
313 f->expr = NULL;
314 }
315
316 /* Small sanity check that the queue is empty at the end of a function. */
317 void
318 finish_expr_for_function ()
319 {
320 if (pending_chain)
321 abort ();
322 }
323 \f
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334 static rtx
335 enqueue_insn (var, body)
336 rtx var, body;
337 {
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
341 }
342
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
349
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
353
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
357
358 rtx
359 protect_from_queue (x, modify)
360 register rtx x;
361 int modify;
362 {
363 register RTX_CODE code = GET_CODE (x);
364
365 #if 0 /* A QUEUED can hang around after the queue is forced out. */
366 /* Shortcut for most common case. */
367 if (pending_chain == 0)
368 return x;
369 #endif
370
371 if (code != QUEUED)
372 {
373 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
374 use of autoincrement. Make a copy of the contents of the memory
375 location rather than a copy of the address, but not if the value is
376 of mode BLKmode. Don't modify X in place since it might be
377 shared. */
378 if (code == MEM && GET_MODE (x) != BLKmode
379 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 {
381 register rtx y = XEXP (x, 0);
382 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
383
384 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
385 MEM_COPY_ATTRIBUTES (new, x);
386 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
387
388 if (QUEUED_INSN (y))
389 {
390 register rtx temp = gen_reg_rtx (GET_MODE (new));
391 emit_insn_before (gen_move_insn (temp, new),
392 QUEUED_INSN (y));
393 return temp;
394 }
395 return new;
396 }
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
408 else if (code == PLUS || code == MULT)
409 {
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. */
422 if (QUEUED_INSN (x) == 0)
423 return QUEUED_VAR (x);
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434 }
435
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
441 int
442 queued_subexp_p (x)
443 rtx x;
444 {
445 register enum rtx_code code = GET_CODE (x);
446 switch (code)
447 {
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
459 }
460 }
461
462 /* Perform all the pending incrementations. */
463
464 void
465 emit_queue ()
466 {
467 register rtx p;
468 while ((p = pending_chain))
469 {
470 rtx body = QUEUED_BODY (p);
471
472 if (GET_CODE (body) == SEQUENCE)
473 {
474 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
475 emit_insn (QUEUED_BODY (p));
476 }
477 else
478 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
479 pending_chain = QUEUED_NEXT (p);
480 }
481 }
482 \f
483 /* Copy data from FROM to TO, where the machine modes are not the same.
484 Both modes may be integer, or both may be floating.
485 UNSIGNEDP should be nonzero if FROM is an unsigned type.
486 This causes zero-extension instead of sign-extension. */
487
488 void
489 convert_move (to, from, unsignedp)
490 register rtx to, from;
491 int unsignedp;
492 {
493 enum machine_mode to_mode = GET_MODE (to);
494 enum machine_mode from_mode = GET_MODE (from);
495 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
496 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
497 enum insn_code code;
498 rtx libcall;
499
500 /* rtx code for making an equivalent value. */
501 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
502
503 to = protect_from_queue (to, 1);
504 from = protect_from_queue (from, 0);
505
506 if (to_real != from_real)
507 abort ();
508
509 /* If FROM is a SUBREG that indicates that we have already done at least
510 the required extension, strip it. We don't handle such SUBREGs as
511 TO here. */
512
513 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
515 >= GET_MODE_SIZE (to_mode))
516 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
517 from = gen_lowpart (to_mode, from), from_mode = to_mode;
518
519 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
520 abort ();
521
522 if (to_mode == from_mode
523 || (from_mode == VOIDmode && CONSTANT_P (from)))
524 {
525 emit_move_insn (to, from);
526 return;
527 }
528
529 if (to_real)
530 {
531 rtx value;
532
533 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
534 {
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, 0))
537 != CODE_FOR_nothing)
538 {
539 emit_unop_insn (code, to, from, UNKNOWN);
540 return;
541 }
542 }
543
544 #ifdef HAVE_trunchfqf2
545 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
546 {
547 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
548 return;
549 }
550 #endif
551 #ifdef HAVE_trunctqfqf2
552 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
555 return;
556 }
557 #endif
558 #ifdef HAVE_truncsfqf2
559 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
562 return;
563 }
564 #endif
565 #ifdef HAVE_truncdfqf2
566 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncxfqf2
573 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_trunctfqf2
580 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586
587 #ifdef HAVE_trunctqfhf2
588 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncsfhf2
595 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncdfhf2
602 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncxfhf2
609 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_trunctfhf2
616 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622
623 #ifdef HAVE_truncsftqf2
624 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncdftqf2
631 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncxftqf2
638 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_trunctftqf2
645 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651
652 #ifdef HAVE_truncdfsf2
653 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_truncxfsf2
660 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_trunctfsf2
667 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncxfdf2
674 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_trunctfdf2
681 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687
688 libcall = (rtx) 0;
689 switch (from_mode)
690 {
691 case SFmode:
692 switch (to_mode)
693 {
694 case DFmode:
695 libcall = extendsfdf2_libfunc;
696 break;
697
698 case XFmode:
699 libcall = extendsfxf2_libfunc;
700 break;
701
702 case TFmode:
703 libcall = extendsftf2_libfunc;
704 break;
705
706 default:
707 break;
708 }
709 break;
710
711 case DFmode:
712 switch (to_mode)
713 {
714 case SFmode:
715 libcall = truncdfsf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extenddfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extenddftf2_libfunc;
724 break;
725
726 default:
727 break;
728 }
729 break;
730
731 case XFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncxfsf2_libfunc;
736 break;
737
738 case DFmode:
739 libcall = truncxfdf2_libfunc;
740 break;
741
742 default:
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757
758 default:
759 break;
760 }
761 break;
762
763 default:
764 break;
765 }
766
767 if (libcall == (rtx) 0)
768 /* This conversion is not implemented yet. */
769 abort ();
770
771 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
772 1, from, from_mode);
773 emit_move_insn (to, value);
774 return;
775 }
776
777 /* Now both modes are integers. */
778
779 /* Handle expanding beyond a word. */
780 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
781 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
782 {
783 rtx insns;
784 rtx lowpart;
785 rtx fill_value;
786 rtx lowfrom;
787 int i;
788 enum machine_mode lowpart_mode;
789 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
790
791 /* Try converting directly if the insn is supported. */
792 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
793 != CODE_FOR_nothing)
794 {
795 /* If FROM is a SUBREG, put it into a register. Do this
796 so that we always generate the same set of insns for
797 better cse'ing; if an intermediate assignment occurred,
798 we won't be doing the operation directly on the SUBREG. */
799 if (optimize > 0 && GET_CODE (from) == SUBREG)
800 from = force_reg (from_mode, from);
801 emit_unop_insn (code, to, from, equiv_code);
802 return;
803 }
804 /* Next, try converting via full word. */
805 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
806 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
807 != CODE_FOR_nothing))
808 {
809 if (GET_CODE (to) == REG)
810 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
811 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
812 emit_unop_insn (code, to,
813 gen_lowpart (word_mode, to), equiv_code);
814 return;
815 }
816
817 /* No special multiword conversion insn; do it by hand. */
818 start_sequence ();
819
820 /* Since we will turn this into a no conflict block, we must ensure
821 that the source does not overlap the target. */
822
823 if (reg_overlap_mentioned_p (to, from))
824 from = force_reg (from_mode, from);
825
826 /* Get a copy of FROM widened to a word, if necessary. */
827 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
828 lowpart_mode = word_mode;
829 else
830 lowpart_mode = from_mode;
831
832 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
833
834 lowpart = gen_lowpart (lowpart_mode, to);
835 emit_move_insn (lowpart, lowfrom);
836
837 /* Compute the value to put in each remaining word. */
838 if (unsignedp)
839 fill_value = const0_rtx;
840 else
841 {
842 #ifdef HAVE_slt
843 if (HAVE_slt
844 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
845 && STORE_FLAG_VALUE == -1)
846 {
847 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
848 lowpart_mode, 0, 0);
849 fill_value = gen_reg_rtx (word_mode);
850 emit_insn (gen_slt (fill_value));
851 }
852 else
853 #endif
854 {
855 fill_value
856 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
857 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
858 NULL_RTX, 0);
859 fill_value = convert_to_mode (word_mode, fill_value, 1);
860 }
861 }
862
863 /* Fill the remaining words. */
864 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
865 {
866 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
867 rtx subword = operand_subword (to, index, 1, to_mode);
868
869 if (subword == 0)
870 abort ();
871
872 if (fill_value != subword)
873 emit_move_insn (subword, fill_value);
874 }
875
876 insns = get_insns ();
877 end_sequence ();
878
879 emit_no_conflict_block (insns, to, from, NULL_RTX,
880 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
881 return;
882 }
883
884 /* Truncating multi-word to a word or less. */
885 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
886 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
887 {
888 if (!((GET_CODE (from) == MEM
889 && ! MEM_VOLATILE_P (from)
890 && direct_load[(int) to_mode]
891 && ! mode_dependent_address_p (XEXP (from, 0)))
892 || GET_CODE (from) == REG
893 || GET_CODE (from) == SUBREG))
894 from = force_reg (from_mode, from);
895 convert_move (to, gen_lowpart (word_mode, from), 0);
896 return;
897 }
898
899 /* Handle pointer conversion */ /* SPEE 900220 */
900 if (to_mode == PQImode)
901 {
902 if (from_mode != QImode)
903 from = convert_to_mode (QImode, from, unsignedp);
904
905 #ifdef HAVE_truncqipqi2
906 if (HAVE_truncqipqi2)
907 {
908 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
909 return;
910 }
911 #endif /* HAVE_truncqipqi2 */
912 abort ();
913 }
914
915 if (from_mode == PQImode)
916 {
917 if (to_mode != QImode)
918 {
919 from = convert_to_mode (QImode, from, unsignedp);
920 from_mode = QImode;
921 }
922 else
923 {
924 #ifdef HAVE_extendpqiqi2
925 if (HAVE_extendpqiqi2)
926 {
927 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
928 return;
929 }
930 #endif /* HAVE_extendpqiqi2 */
931 abort ();
932 }
933 }
934
935 if (to_mode == PSImode)
936 {
937 if (from_mode != SImode)
938 from = convert_to_mode (SImode, from, unsignedp);
939
940 #ifdef HAVE_truncsipsi2
941 if (HAVE_truncsipsi2)
942 {
943 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncsipsi2 */
947 abort ();
948 }
949
950 if (from_mode == PSImode)
951 {
952 if (to_mode != SImode)
953 {
954 from = convert_to_mode (SImode, from, unsignedp);
955 from_mode = SImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpsisi2
960 if (HAVE_extendpsisi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpsisi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PDImode)
971 {
972 if (from_mode != DImode)
973 from = convert_to_mode (DImode, from, unsignedp);
974
975 #ifdef HAVE_truncdipdi2
976 if (HAVE_truncdipdi2)
977 {
978 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncdipdi2 */
982 abort ();
983 }
984
985 if (from_mode == PDImode)
986 {
987 if (to_mode != DImode)
988 {
989 from = convert_to_mode (DImode, from, unsignedp);
990 from_mode = DImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpdidi2
995 if (HAVE_extendpdidi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpdidi2 */
1001 abort ();
1002 }
1003 }
1004
1005 /* Now follow all the conversions between integers
1006 no more than a word long. */
1007
1008 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1009 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1010 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1011 GET_MODE_BITSIZE (from_mode)))
1012 {
1013 if (!((GET_CODE (from) == MEM
1014 && ! MEM_VOLATILE_P (from)
1015 && direct_load[(int) to_mode]
1016 && ! mode_dependent_address_p (XEXP (from, 0)))
1017 || GET_CODE (from) == REG
1018 || GET_CODE (from) == SUBREG))
1019 from = force_reg (from_mode, from);
1020 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1021 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1022 from = copy_to_reg (from);
1023 emit_move_insn (to, gen_lowpart (to_mode, from));
1024 return;
1025 }
1026
1027 /* Handle extension. */
1028 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1029 {
1030 /* Convert directly if that works. */
1031 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1032 != CODE_FOR_nothing)
1033 {
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
1040 rtx tmp;
1041 tree shift_amount;
1042
1043 /* Search for a mode to convert via. */
1044 for (intermediate = from_mode; intermediate != VOIDmode;
1045 intermediate = GET_MODE_WIDER_MODE (intermediate))
1046 if (((can_extend_p (to_mode, intermediate, unsignedp)
1047 != CODE_FOR_nothing)
1048 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (intermediate))))
1051 && (can_extend_p (intermediate, from_mode, unsignedp)
1052 != CODE_FOR_nothing))
1053 {
1054 convert_move (to, convert_to_mode (intermediate, from,
1055 unsignedp), unsignedp);
1056 return;
1057 }
1058
1059 /* No suitable intermediate mode.
1060 Generate what we need with shifts. */
1061 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1062 - GET_MODE_BITSIZE (from_mode), 0);
1063 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1064 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1065 to, unsignedp);
1066 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1067 to, unsignedp);
1068 if (tmp != to)
1069 emit_move_insn (to, tmp);
1070 return;
1071 }
1072 }
1073
1074 /* Support special truncate insns for certain modes. */
1075
1076 if (from_mode == DImode && to_mode == SImode)
1077 {
1078 #ifdef HAVE_truncdisi2
1079 if (HAVE_truncdisi2)
1080 {
1081 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1082 return;
1083 }
1084 #endif
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1086 return;
1087 }
1088
1089 if (from_mode == DImode && to_mode == HImode)
1090 {
1091 #ifdef HAVE_truncdihi2
1092 if (HAVE_truncdihi2)
1093 {
1094 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1095 return;
1096 }
1097 #endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1100 }
1101
1102 if (from_mode == DImode && to_mode == QImode)
1103 {
1104 #ifdef HAVE_truncdiqi2
1105 if (HAVE_truncdiqi2)
1106 {
1107 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1108 return;
1109 }
1110 #endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1113 }
1114
1115 if (from_mode == SImode && to_mode == HImode)
1116 {
1117 #ifdef HAVE_truncsihi2
1118 if (HAVE_truncsihi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1121 return;
1122 }
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == SImode && to_mode == QImode)
1129 {
1130 #ifdef HAVE_truncsiqi2
1131 if (HAVE_truncsiqi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1134 return;
1135 }
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == HImode && to_mode == QImode)
1142 {
1143 #ifdef HAVE_trunchiqi2
1144 if (HAVE_trunchiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == TImode && to_mode == DImode)
1155 {
1156 #ifdef HAVE_trunctidi2
1157 if (HAVE_trunctidi2)
1158 {
1159 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1160 return;
1161 }
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == TImode && to_mode == SImode)
1168 {
1169 #ifdef HAVE_trunctisi2
1170 if (HAVE_trunctisi2)
1171 {
1172 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1173 return;
1174 }
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == TImode && to_mode == HImode)
1181 {
1182 #ifdef HAVE_trunctihi2
1183 if (HAVE_trunctihi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == QImode)
1194 {
1195 #ifdef HAVE_trunctiqi2
1196 if (HAVE_trunctiqi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 /* Handle truncation of volatile memrefs, and so on;
1207 the things that couldn't be truncated directly,
1208 and for which there was no special instruction. */
1209 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1210 {
1211 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1212 emit_move_insn (to, temp);
1213 return;
1214 }
1215
1216 /* Mode combination is not recognized. */
1217 abort ();
1218 }
1219
1220 /* Return an rtx for a value that would result
1221 from converting X to mode MODE.
1222 Both X and MODE may be floating, or both integer.
1223 UNSIGNEDP is nonzero if X is an unsigned value.
1224 This can be done by referring to a part of X in place
1225 or by copying to a new temporary with conversion.
1226
1227 This function *must not* call protect_from_queue
1228 except when putting X into an insn (in which case convert_move does it). */
1229
1230 rtx
1231 convert_to_mode (mode, x, unsignedp)
1232 enum machine_mode mode;
1233 rtx x;
1234 int unsignedp;
1235 {
1236 return convert_modes (mode, VOIDmode, x, unsignedp);
1237 }
1238
1239 /* Return an rtx for a value that would result
1240 from converting X from mode OLDMODE to mode MODE.
1241 Both modes may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1248
1249 This function *must not* call protect_from_queue
1250 except when putting X into an insn (in which case convert_move does it). */
1251
1252 rtx
1253 convert_modes (mode, oldmode, x, unsignedp)
1254 enum machine_mode mode, oldmode;
1255 rtx x;
1256 int unsignedp;
1257 {
1258 register rtx temp;
1259
1260 /* If FROM is a SUBREG that indicates that we have already done at least
1261 the required extension, strip it. */
1262
1263 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1264 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1265 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1266 x = gen_lowpart (mode, x);
1267
1268 if (GET_MODE (x) != VOIDmode)
1269 oldmode = GET_MODE (x);
1270
1271 if (mode == oldmode)
1272 return x;
1273
1274 /* There is one case that we must handle specially: If we are converting
1275 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1276 we are to interpret the constant as unsigned, gen_lowpart will do
1277 the wrong if the constant appears negative. What we want to do is
1278 make the high-order word of the constant zero, not all ones. */
1279
1280 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1281 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1282 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1283 {
1284 HOST_WIDE_INT val = INTVAL (x);
1285
1286 if (oldmode != VOIDmode
1287 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1288 {
1289 int width = GET_MODE_BITSIZE (oldmode);
1290
1291 /* We need to zero extend VAL. */
1292 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 }
1294
1295 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 }
1297
1298 /* We can do this with a gen_lowpart if both desired and current modes
1299 are integer, and this is either a constant integer, a register, or a
1300 non-volatile MEM. Except for the constant case where MODE is no
1301 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1302
1303 if ((GET_CODE (x) == CONST_INT
1304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1305 || (GET_MODE_CLASS (mode) == MODE_INT
1306 && GET_MODE_CLASS (oldmode) == MODE_INT
1307 && (GET_CODE (x) == CONST_DOUBLE
1308 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1309 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1310 && direct_load[(int) mode])
1311 || (GET_CODE (x) == REG
1312 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1313 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1314 {
1315 /* ?? If we don't know OLDMODE, we have to assume here that
1316 X does not need sign- or zero-extension. This may not be
1317 the case, but it's the best we can do. */
1318 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1319 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322 int width = GET_MODE_BITSIZE (oldmode);
1323
1324 /* We must sign or zero-extend in this case. Start by
1325 zero-extending, then sign extend if we need to. */
1326 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 if (! unsignedp
1328 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1329 val |= (HOST_WIDE_INT) (-1) << width;
1330
1331 return GEN_INT (val);
1332 }
1333
1334 return gen_lowpart (mode, x);
1335 }
1336
1337 temp = gen_reg_rtx (mode);
1338 convert_move (temp, x, unsignedp);
1339 return temp;
1340 }
1341 \f
1342
1343 /* This macro is used to determine what the largest unit size that
1344 move_by_pieces can use is. */
1345
1346 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1347 move efficiently, as opposed to MOVE_MAX which is the maximum
1348 number of bhytes we can move with a single instruction. */
1349
1350 #ifndef MOVE_MAX_PIECES
1351 #define MOVE_MAX_PIECES MOVE_MAX
1352 #endif
1353
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX_PIECES + 1;
1368 enum machine_mode mode = VOIDmode, tmode;
1369 enum insn_code icode;
1370
1371 data.offset = 0;
1372 data.to_addr = to_addr;
1373 data.from_addr = from_addr;
1374 data.to = to;
1375 data.from = from;
1376 data.autinc_to
1377 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1378 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1379 data.autinc_from
1380 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1381 || GET_CODE (from_addr) == POST_INC
1382 || GET_CODE (from_addr) == POST_DEC);
1383
1384 data.explicit_inc_from = 0;
1385 data.explicit_inc_to = 0;
1386 data.reverse
1387 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1388 if (data.reverse) data.offset = len;
1389 data.len = len;
1390
1391 data.to_struct = MEM_IN_STRUCT_P (to);
1392 data.from_struct = MEM_IN_STRUCT_P (from);
1393 data.to_readonly = RTX_UNCHANGING_P (to);
1394 data.from_readonly = RTX_UNCHANGING_P (from);
1395
1396 /* If copying requires more than two move insns,
1397 copy addresses to registers (to make displacements shorter)
1398 and use post-increment if available. */
1399 if (!(data.autinc_from && data.autinc_to)
1400 && move_by_pieces_ninsns (len, align) > 2)
1401 {
1402 /* Find the mode of the largest move... */
1403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1405 if (GET_MODE_SIZE (tmode) < max_size)
1406 mode = tmode;
1407
1408 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1409 {
1410 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1411 data.autinc_from = 1;
1412 data.explicit_inc_from = -1;
1413 }
1414 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1415 {
1416 data.from_addr = copy_addr_to_reg (from_addr);
1417 data.autinc_from = 1;
1418 data.explicit_inc_from = 1;
1419 }
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1423 {
1424 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1425 data.autinc_to = 1;
1426 data.explicit_inc_to = -1;
1427 }
1428 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1429 {
1430 data.to_addr = copy_addr_to_reg (to_addr);
1431 data.autinc_to = 1;
1432 data.explicit_inc_to = 1;
1433 }
1434 if (!data.autinc_to && CONSTANT_P (to_addr))
1435 data.to_addr = copy_addr_to_reg (to_addr);
1436 }
1437
1438 if (! SLOW_UNALIGNED_ACCESS
1439 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 align = MOVE_MAX;
1441
1442 /* First move what we can in the largest integer mode, then go to
1443 successively smaller modes. */
1444
1445 while (max_size > 1)
1446 {
1447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1449 if (GET_MODE_SIZE (tmode) < max_size)
1450 mode = tmode;
1451
1452 if (mode == VOIDmode)
1453 break;
1454
1455 icode = mov_optab->handlers[(int) mode].insn_code;
1456 if (icode != CODE_FOR_nothing
1457 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1458 GET_MODE_SIZE (mode)))
1459 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1460
1461 max_size = GET_MODE_SIZE (mode);
1462 }
1463
1464 /* The code above should have handled everything. */
1465 if (data.len > 0)
1466 abort ();
1467 }
1468
1469 /* Return number of insns required to move L bytes by pieces.
1470 ALIGN (in bytes) is maximum alignment we can assume. */
1471
1472 static int
1473 move_by_pieces_ninsns (l, align)
1474 unsigned int l;
1475 int align;
1476 {
1477 register int n_insns = 0;
1478 int max_size = MOVE_MAX + 1;
1479
1480 if (! SLOW_UNALIGNED_ACCESS
1481 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1482 align = MOVE_MAX;
1483
1484 while (max_size > 1)
1485 {
1486 enum machine_mode mode = VOIDmode, tmode;
1487 enum insn_code icode;
1488
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1492 mode = tmode;
1493
1494 if (mode == VOIDmode)
1495 break;
1496
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing
1499 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1500 GET_MODE_SIZE (mode)))
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1502
1503 max_size = GET_MODE_SIZE (mode);
1504 }
1505
1506 return n_insns;
1507 }
1508
1509 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1512
1513 static void
1514 move_by_pieces_1 (genfun, mode, data)
1515 rtx (*genfun) PROTO ((rtx, ...));
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1518 {
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1521
1522 while (data->len >= size)
1523 {
1524 if (data->reverse) data->offset -= size;
1525
1526 to1 = (data->autinc_to
1527 ? gen_rtx_MEM (mode, data->to_addr)
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1530 data->offset))));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 RTX_UNCHANGING_P (to1) = data->to_readonly;
1533
1534 from1
1535 = (data->autinc_from
1536 ? gen_rtx_MEM (mode, data->from_addr)
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1539 data->offset))));
1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541 RTX_UNCHANGING_P (from1) = data->from_readonly;
1542
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1547
1548 emit_insn ((*genfun) (to1, from1));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes.
1569
1570 Return the address of the new block, if memcpy is called and returns it,
1571 0 otherwise. */
1572
1573 rtx
1574 emit_block_move (x, y, size, align)
1575 rtx x, y;
1576 rtx size;
1577 int align;
1578 {
1579 rtx retval = 0;
1580 #ifdef TARGET_MEM_FUNCTIONS
1581 static tree fn;
1582 tree call_expr, arg_list;
1583 #endif
1584
1585 if (GET_MODE (x) != BLKmode)
1586 abort ();
1587
1588 if (GET_MODE (y) != BLKmode)
1589 abort ();
1590
1591 x = protect_from_queue (x, 1);
1592 y = protect_from_queue (y, 0);
1593 size = protect_from_queue (size, 0);
1594
1595 if (GET_CODE (x) != MEM)
1596 abort ();
1597 if (GET_CODE (y) != MEM)
1598 abort ();
1599 if (size == 0)
1600 abort ();
1601
1602 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1603 move_by_pieces (x, y, INTVAL (size), align);
1604 else
1605 {
1606 /* Try the most limited insn first, because there's no point
1607 including more than one in the machine description unless
1608 the more limited one has some advantage. */
1609
1610 rtx opalign = GEN_INT (align);
1611 enum machine_mode mode;
1612
1613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1614 mode = GET_MODE_WIDER_MODE (mode))
1615 {
1616 enum insn_code code = movstr_optab[(int) mode];
1617 insn_operand_predicate_fn pred;
1618
1619 if (code != CODE_FOR_nothing
1620 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1621 here because if SIZE is less than the mode mask, as it is
1622 returned by the macro, it will definitely be less than the
1623 actual mode mask. */
1624 && ((GET_CODE (size) == CONST_INT
1625 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1626 <= (GET_MODE_MASK (mode) >> 1)))
1627 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1628 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1629 || (*pred) (x, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1631 || (*pred) (y, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1633 || (*pred) (opalign, VOIDmode)))
1634 {
1635 rtx op2;
1636 rtx last = get_last_insn ();
1637 rtx pat;
1638
1639 op2 = convert_to_mode (mode, size, 1);
1640 pred = insn_data[(int) code].operand[2].predicate;
1641 if (pred != 0 && ! (*pred) (op2, mode))
1642 op2 = copy_to_mode_reg (mode, op2);
1643
1644 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 if (pat)
1646 {
1647 emit_insn (pat);
1648 return 0;
1649 }
1650 else
1651 delete_insns_since (last);
1652 }
1653 }
1654
1655 /* X, Y, or SIZE may have been passed through protect_from_queue.
1656
1657 It is unsafe to save the value generated by protect_from_queue
1658 and reuse it later. Consider what happens if emit_queue is
1659 called before the return value from protect_from_queue is used.
1660
1661 Expansion of the CALL_EXPR below will call emit_queue before
1662 we are finished emitting RTL for argument setup. So if we are
1663 not careful we could get the wrong value for an argument.
1664
1665 To avoid this problem we go ahead and emit code to copy X, Y &
1666 SIZE into new pseudos. We can then place those new pseudos
1667 into an RTL_EXPR and use them later, even after a call to
1668 emit_queue.
1669
1670 Note this is not strictly needed for library calls since they
1671 do not call emit_queue before loading their arguments. However,
1672 we may need to have library calls call emit_queue in the future
1673 since failing to do so could cause problems for targets which
1674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1675 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1676 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1677
1678 #ifdef TARGET_MEM_FUNCTIONS
1679 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1680 #else
1681 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1682 TREE_UNSIGNED (integer_type_node));
1683 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1684 #endif
1685
1686 #ifdef TARGET_MEM_FUNCTIONS
1687 /* It is incorrect to use the libcall calling conventions to call
1688 memcpy in this context.
1689
1690 This could be a user call to memcpy and the user may wish to
1691 examine the return value from memcpy.
1692
1693 For targets where libcalls and normal calls have different conventions
1694 for returning pointers, we could end up generating incorrect code.
1695
1696 So instead of using a libcall sequence we build up a suitable
1697 CALL_EXPR and expand the call in the normal fashion. */
1698 if (fn == NULL_TREE)
1699 {
1700 tree fntype;
1701
1702 /* This was copied from except.c, I don't know if all this is
1703 necessary in this context or not. */
1704 fn = get_identifier ("memcpy");
1705 push_obstacks_nochange ();
1706 end_temporary_allocation ();
1707 fntype = build_pointer_type (void_type_node);
1708 fntype = build_function_type (fntype, NULL_TREE);
1709 fn = build_decl (FUNCTION_DECL, fn, fntype);
1710 ggc_add_tree_root (&fn, 1);
1711 DECL_EXTERNAL (fn) = 1;
1712 TREE_PUBLIC (fn) = 1;
1713 DECL_ARTIFICIAL (fn) = 1;
1714 make_decl_rtl (fn, NULL_PTR, 1);
1715 assemble_external (fn);
1716 pop_obstacks ();
1717 }
1718
1719 /* We need to make an argument list for the function call.
1720
1721 memcpy has three arguments, the first two are void * addresses and
1722 the last is a size_t byte count for the copy. */
1723 arg_list
1724 = build_tree_list (NULL_TREE,
1725 make_tree (build_pointer_type (void_type_node), x));
1726 TREE_CHAIN (arg_list)
1727 = build_tree_list (NULL_TREE,
1728 make_tree (build_pointer_type (void_type_node), y));
1729 TREE_CHAIN (TREE_CHAIN (arg_list))
1730 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1731 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1732
1733 /* Now we have to build up the CALL_EXPR itself. */
1734 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1735 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1736 call_expr, arg_list, NULL_TREE);
1737 TREE_SIDE_EFFECTS (call_expr) = 1;
1738
1739 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1740 #else
1741 emit_library_call (bcopy_libfunc, 0,
1742 VOIDmode, 3, y, Pmode, x, Pmode,
1743 convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node)),
1745 TYPE_MODE (integer_type_node));
1746 #endif
1747 }
1748
1749 return retval;
1750 }
1751 \f
1752 /* Copy all or part of a value X into registers starting at REGNO.
1753 The number of registers to be filled is NREGS. */
1754
1755 void
1756 move_block_to_reg (regno, x, nregs, mode)
1757 int regno;
1758 rtx x;
1759 int nregs;
1760 enum machine_mode mode;
1761 {
1762 int i;
1763 #ifdef HAVE_load_multiple
1764 rtx pat;
1765 rtx last;
1766 #endif
1767
1768 if (nregs == 0)
1769 return;
1770
1771 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1772 x = validize_mem (force_const_mem (mode, x));
1773
1774 /* See if the machine can do this with a load multiple insn. */
1775 #ifdef HAVE_load_multiple
1776 if (HAVE_load_multiple)
1777 {
1778 last = get_last_insn ();
1779 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
1788 }
1789 #endif
1790
1791 for (i = 0; i < nregs; i++)
1792 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1793 operand_subword_force (x, i, mode));
1794 }
1795
1796 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1797 The number of registers to be filled is NREGS. SIZE indicates the number
1798 of bytes in the object X. */
1799
1800
1801 void
1802 move_block_from_reg (regno, x, nregs, size)
1803 int regno;
1804 rtx x;
1805 int nregs;
1806 int size;
1807 {
1808 int i;
1809 #ifdef HAVE_store_multiple
1810 rtx pat;
1811 rtx last;
1812 #endif
1813 enum machine_mode mode;
1814
1815 /* If SIZE is that of a mode no bigger than a word, just use that
1816 mode's store operation. */
1817 if (size <= UNITS_PER_WORD
1818 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1819 {
1820 emit_move_insn (change_address (x, mode, NULL),
1821 gen_rtx_REG (mode, regno));
1822 return;
1823 }
1824
1825 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1826 to the left before storing to memory. Note that the previous test
1827 doesn't handle all cases (e.g. SIZE == 3). */
1828 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1829 {
1830 rtx tem = operand_subword (x, 0, 1, BLKmode);
1831 rtx shift;
1832
1833 if (tem == 0)
1834 abort ();
1835
1836 shift = expand_shift (LSHIFT_EXPR, word_mode,
1837 gen_rtx_REG (word_mode, regno),
1838 build_int_2 ((UNITS_PER_WORD - size)
1839 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1840 emit_move_insn (tem, shift);
1841 return;
1842 }
1843
1844 /* See if the machine can do this with a store multiple insn. */
1845 #ifdef HAVE_store_multiple
1846 if (HAVE_store_multiple)
1847 {
1848 last = get_last_insn ();
1849 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1850 GEN_INT (nregs));
1851 if (pat)
1852 {
1853 emit_insn (pat);
1854 return;
1855 }
1856 else
1857 delete_insns_since (last);
1858 }
1859 #endif
1860
1861 for (i = 0; i < nregs; i++)
1862 {
1863 rtx tem = operand_subword (x, i, 1, BLKmode);
1864
1865 if (tem == 0)
1866 abort ();
1867
1868 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1869 }
1870 }
1871
1872 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1873 registers represented by a PARALLEL. SSIZE represents the total size of
1874 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1875 SRC in bits. */
1876 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1877 the balance will be in what would be the low-order memory addresses, i.e.
1878 left justified for big endian, right justified for little endian. This
1879 happens to be true for the targets currently using this support. If this
1880 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1881 would be needed. */
1882
1883 void
1884 emit_group_load (dst, orig_src, ssize, align)
1885 rtx dst, orig_src;
1886 int align, ssize;
1887 {
1888 rtx *tmps, src;
1889 int start, i;
1890
1891 if (GET_CODE (dst) != PARALLEL)
1892 abort ();
1893
1894 /* Check for a NULL entry, used to indicate that the parameter goes
1895 both on the stack and in registers. */
1896 if (XEXP (XVECEXP (dst, 0, 0), 0))
1897 start = 0;
1898 else
1899 start = 1;
1900
1901 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1902
1903 /* If we won't be loading directly from memory, protect the real source
1904 from strange tricks we might play. */
1905 src = orig_src;
1906 if (GET_CODE (src) != MEM)
1907 {
1908 if (GET_CODE (src) == VOIDmode)
1909 src = gen_reg_rtx (GET_MODE (dst));
1910 else
1911 src = gen_reg_rtx (GET_MODE (orig_src));
1912 emit_move_insn (src, orig_src);
1913 }
1914
1915 /* Process the pieces. */
1916 for (i = start; i < XVECLEN (dst, 0); i++)
1917 {
1918 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1919 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1920 int bytelen = GET_MODE_SIZE (mode);
1921 int shift = 0;
1922
1923 /* Handle trailing fragments that run over the size of the struct. */
1924 if (ssize >= 0 && bytepos + bytelen > ssize)
1925 {
1926 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1927 bytelen = ssize - bytepos;
1928 if (bytelen <= 0)
1929 abort();
1930 }
1931
1932 /* Optimize the access just a bit. */
1933 if (GET_CODE (src) == MEM
1934 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1935 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 {
1938 tmps[i] = gen_reg_rtx (mode);
1939 emit_move_insn (tmps[i],
1940 change_address (src, mode,
1941 plus_constant (XEXP (src, 0),
1942 bytepos)));
1943 }
1944 else if (GET_CODE (src) == CONCAT)
1945 {
1946 if (bytepos == 0
1947 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1948 tmps[i] = XEXP (src, 0);
1949 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1951 tmps[i] = XEXP (src, 1);
1952 else
1953 abort ();
1954 }
1955 else
1956 {
1957 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1958 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, align, ssize);
1960 }
1961
1962 if (BYTES_BIG_ENDIAN && shift)
1963 {
1964 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1965 tmps[i], 0, OPTAB_WIDEN);
1966 }
1967 }
1968 emit_queue();
1969
1970 /* Copy the extracted pieces into the proper (probable) hard regs. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1972 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 }
1974
1975 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1976 registers represented by a PARALLEL. SSIZE represents the total size of
1977 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1978
1979 void
1980 emit_group_store (orig_dst, src, ssize, align)
1981 rtx orig_dst, src;
1982 int ssize, align;
1983 {
1984 rtx *tmps, dst;
1985 int start, i;
1986
1987 if (GET_CODE (src) != PARALLEL)
1988 abort ();
1989
1990 /* Check for a NULL entry, used to indicate that the parameter goes
1991 both on the stack and in registers. */
1992 if (XEXP (XVECEXP (src, 0, 0), 0))
1993 start = 0;
1994 else
1995 start = 1;
1996
1997 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1998
1999 /* Copy the (probable) hard regs into pseudos. */
2000 for (i = start; i < XVECLEN (src, 0); i++)
2001 {
2002 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2003 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2004 emit_move_insn (tmps[i], reg);
2005 }
2006 emit_queue();
2007
2008 /* If we won't be storing directly into memory, protect the real destination
2009 from strange tricks we might play. */
2010 dst = orig_dst;
2011 if (GET_CODE (dst) == PARALLEL)
2012 {
2013 rtx temp;
2014
2015 /* We can get a PARALLEL dst if there is a conditional expression in
2016 a return statement. In that case, the dst and src are the same,
2017 so no action is necessary. */
2018 if (rtx_equal_p (dst, src))
2019 return;
2020
2021 /* It is unclear if we can ever reach here, but we may as well handle
2022 it. Allocate a temporary, and split this into a store/load to/from
2023 the temporary. */
2024
2025 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2026 emit_group_store (temp, src, ssize, align);
2027 emit_group_load (dst, temp, ssize, align);
2028 return;
2029 }
2030 else if (GET_CODE (dst) != MEM)
2031 {
2032 dst = gen_reg_rtx (GET_MODE (orig_dst));
2033 /* Make life a bit easier for combine. */
2034 emit_move_insn (dst, const0_rtx);
2035 }
2036 else if (! MEM_IN_STRUCT_P (dst))
2037 {
2038 /* store_bit_field requires that memory operations have
2039 mem_in_struct_p set; we might not. */
2040
2041 dst = copy_rtx (orig_dst);
2042 MEM_SET_IN_STRUCT_P (dst, 1);
2043 }
2044
2045 /* Process the pieces. */
2046 for (i = start; i < XVECLEN (src, 0); i++)
2047 {
2048 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2049 enum machine_mode mode = GET_MODE (tmps[i]);
2050 int bytelen = GET_MODE_SIZE (mode);
2051
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize >= 0 && bytepos + bytelen > ssize)
2054 {
2055 if (BYTES_BIG_ENDIAN)
2056 {
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2059 tmps[i], 0, OPTAB_WIDEN);
2060 }
2061 bytelen = ssize - bytepos;
2062 }
2063
2064 /* Optimize the access just a bit. */
2065 if (GET_CODE (dst) == MEM
2066 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2067 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2068 && bytelen == GET_MODE_SIZE (mode))
2069 {
2070 emit_move_insn (change_address (dst, mode,
2071 plus_constant (XEXP (dst, 0),
2072 bytepos)),
2073 tmps[i]);
2074 }
2075 else
2076 {
2077 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2079 }
2080 }
2081 emit_queue();
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment.
2096 */
2097
2098 rtx
2099 copy_blkmode_from_reg(tgtblk,srcreg,type)
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103 {
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167 }
2168
2169
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2172
2173 void
2174 use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176 {
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort();
2180
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 }
2185
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2188
2189 void
2190 use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
2192 int regno;
2193 int nregs;
2194 {
2195 int i;
2196
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2202 }
2203
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208 void
2209 use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212 {
2213 int i;
2214
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2224 }
2225 }
2226 \f
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232 static void
2233 clear_by_pieces (to, len, align)
2234 rtx to;
2235 int len, align;
2236 {
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
2242
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2249
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2255
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2257
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2263 {
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2269
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2271 {
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2275 }
2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2281 }
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2284 }
2285
2286 if (! SLOW_UNALIGNED_ACCESS
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2289
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2292
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2306 GET_MODE_SIZE (mode)))
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315 }
2316
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321 static void
2322 clear_by_pieces_1 (genfun, mode, data)
2323 rtx (*genfun) PROTO ((rtx, ...));
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326 {
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
2335 ? gen_rtx_MEM (mode, data->to_addr)
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352 }
2353 \f
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2357
2358 If we call a function that returns the length of the block, return it. */
2359
2360 rtx
2361 clear_storage (object, size, align)
2362 rtx object;
2363 rtx size;
2364 int align;
2365 {
2366 #ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369 #endif
2370 rtx retval = 0;
2371
2372 if (GET_MODE (object) == BLKmode)
2373 {
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
2394 insn_operand_predicate_fn pred;
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2403 <= (GET_MODE_MASK (mode) >> 1)))
2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
2423 return 0;
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2431
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2435
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2439
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
2444
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2451
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454 #else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2458 #endif
2459
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2464
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2467
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
2486 ggc_add_tree_root (&fn, 1);
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2519 #else
2520 emit_library_call (bzero_libfunc, 0,
2521 VOIDmode, 2, object, Pmode, size,
2522 TYPE_MODE (integer_type_node));
2523 #endif
2524 }
2525 }
2526 else
2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2528
2529 return retval;
2530 }
2531
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539 rtx
2540 emit_move_insn (x, y)
2541 rtx x, y;
2542 {
2543 enum machine_mode mode = GET_MODE (x);
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
2575 return emit_move_insn_1 (x, y);
2576 }
2577
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582 rtx
2583 emit_move_insn_1 (x, y)
2584 rtx x, y;
2585 {
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
2610
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2616 if (stack)
2617 {
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2623 gen_imagpart (submode, y)));
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2626 gen_realpart (submode, y)));
2627 #else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_realpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_imagpart (submode, y)));
2634 #endif
2635 }
2636 else
2637 {
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2640
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2652 {
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655
2656 if (packed_dest_p || packed_src_p)
2657 {
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2660
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663
2664 if (reg_mode != BLKmode)
2665 {
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2668
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2670
2671 cfun->cannot_inline = "function uses short complex types";
2672
2673 if (packed_dest_p)
2674 {
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2678 }
2679 else
2680 {
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2684 }
2685 }
2686 }
2687 }
2688
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2693
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2698 if (x != y
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
2702 {
2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2704 }
2705
2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2707 (realpart_x, realpart_y));
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (imagpart_x, imagpart_y));
2710 }
2711
2712 return get_last_insn ();
2713 }
2714
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2719 {
2720 rtx last_insn = 0;
2721 rtx seq;
2722 int need_clobber;
2723
2724 #ifdef PUSH_ROUNDING
2725
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2729 {
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 }
2733 #endif
2734
2735 start_sequence ();
2736
2737 need_clobber = 0;
2738 for (i = 0;
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2740 i++)
2741 {
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2744
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2749 {
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2752 }
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2755
2756 if (xpart == 0 || ypart == 0)
2757 abort ();
2758
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760
2761 last_insn = emit_move_insn (xpart, ypart);
2762 }
2763
2764 seq = gen_sequence ();
2765 end_sequence ();
2766
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2771 if (x != y
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2774 {
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2776 }
2777
2778 emit_insn (seq);
2779
2780 return last_insn;
2781 }
2782 else
2783 abort ();
2784 }
2785 \f
2786 /* Pushing data onto the stack. */
2787
2788 /* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2792
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2796
2797 rtx
2798 push_block (size, extra, below)
2799 rtx size;
2800 int extra, below;
2801 {
2802 register rtx temp;
2803
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2809 else
2810 {
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2812 if (extra != 0)
2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2816 }
2817
2818 #if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2824 conventions). */
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2828 #else
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2835 else
2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2837 negate_rtx (Pmode, size));
2838 #endif
2839
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2841 }
2842
2843 rtx
2844 gen_push_operand ()
2845 {
2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2847 }
2848
2849 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2851
2852 static rtx
2853 get_push_address (size)
2854 int size;
2855 {
2856 register rtx temp;
2857
2858 if (STACK_PUSH_CODE == POST_DEC)
2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2860 else if (STACK_PUSH_CODE == POST_INC)
2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2862 else
2863 temp = stack_pointer_rtx;
2864
2865 return copy_to_reg (temp);
2866 }
2867
2868 /* Generate code to push X onto the stack, assuming it has mode MODE and
2869 type TYPE.
2870 MODE is redundant except when X is a CONST_INT (since they don't
2871 carry mode info).
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2874
2875 ALIGN (in bytes) is maximum alignment we can assume.
2876
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2884 registers.
2885
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
2887 This is ignored if an argument block has already been allocated.
2888
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2893
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2895
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
2899
2900 void
2901 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2902 args_addr, args_so_far, reg_parm_stack_space,
2903 alignment_pad)
2904 register rtx x;
2905 enum machine_mode mode;
2906 tree type;
2907 rtx size;
2908 int align;
2909 int partial;
2910 rtx reg;
2911 int extra;
2912 rtx args_addr;
2913 rtx args_so_far;
2914 int reg_parm_stack_space;
2915 rtx alignment_pad;
2916 {
2917 rtx xinner;
2918 enum direction stack_direction
2919 #ifdef STACK_GROWS_DOWNWARD
2920 = downward;
2921 #else
2922 = upward;
2923 #endif
2924
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2934
2935 xinner = x = protect_from_queue (x, 0);
2936
2937 if (mode == BLKmode)
2938 {
2939 /* Copy a block into the stack, entirely or partially. */
2940
2941 register rtx temp;
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2944 int skip;
2945
2946 if (size == 0)
2947 abort ();
2948
2949 used -= offset;
2950
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2953
2954 if (partial != 0)
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2957
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
2963
2964 #ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2968 if (args_addr == 0
2969 && GET_CODE (size) == CONST_INT
2970 && skip == 0
2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
2975 && ((! SLOW_UNALIGNED_ACCESS)
2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2977 || PUSH_ROUNDING (align) == align)
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 {
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
2985 anti_adjust_stack (GEN_INT (extra));
2986
2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2988 INTVAL (size) - used, align);
2989
2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
2991 {
2992 rtx temp;
2993
2994 in_check_memory_usage = 1;
2995 temp = get_push_address (INTVAL(size) - used);
2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2998 temp, Pmode,
2999 XEXP (xinner, 0), Pmode,
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3002 else
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3004 temp, Pmode,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
3010 }
3011 }
3012 else
3013 #endif /* PUSH_ROUNDING */
3014 {
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3017
3018 /* Deduct words put into registers from the size we must copy. */
3019 if (partial != 0)
3020 {
3021 if (GET_CODE (size) == CONST_INT)
3022 size = GEN_INT (INTVAL (size) - used);
3023 else
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
3025 GEN_INT (used), NULL_RTX, 0,
3026 OPTAB_LIB_WIDEN);
3027 }
3028
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3032 if (! args_addr)
3033 {
3034 temp = push_block (size, extra, where_pad == downward);
3035 extra = 0;
3036 }
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3041 else
3042 temp = memory_address (BLKmode,
3043 plus_constant (gen_rtx_PLUS (Pmode,
3044 args_addr,
3045 args_so_far),
3046 skip));
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3048 {
3049 rtx target;
3050
3051 in_check_memory_usage = 1;
3052 target = copy_to_reg (temp);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3055 target, Pmode,
3056 XEXP (xinner, 0), Pmode,
3057 size, TYPE_MODE (sizetype));
3058 else
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3060 target, Pmode,
3061 size, TYPE_MODE (sizetype),
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
3065 }
3066
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
3069 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3070 {
3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3072 INTVAL (size), align);
3073 goto ret;
3074 }
3075 else
3076 {
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
3079 rtx target = gen_rtx_MEM (BLKmode, temp);
3080
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3082 mode != VOIDmode;
3083 mode = GET_MODE_WIDER_MODE (mode))
3084 {
3085 enum insn_code code = movstr_optab[(int) mode];
3086 insn_operand_predicate_fn pred;
3087
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
3099 {
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3102 rtx pat;
3103
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
3106 op2 = copy_to_mode_reg (mode, op2);
3107
3108 pat = GEN_FCN ((int) code) (target, xinner,
3109 op2, opalign);
3110 if (pat)
3111 {
3112 emit_insn (pat);
3113 goto ret;
3114 }
3115 else
3116 delete_insns_since (last);
3117 }
3118 }
3119 }
3120
3121 #ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3125
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3129 #endif
3130
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3133 NO_DEFER_POP;
3134 #ifdef TARGET_MEM_FUNCTIONS
3135 emit_library_call (memcpy_libfunc, 0,
3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
3139 TYPE_MODE (sizetype));
3140 #else
3141 emit_library_call (bcopy_libfunc, 0,
3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3144 size,
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
3147 #endif
3148 OK_DEFER_POP;
3149 }
3150 }
3151 else if (partial > 0)
3152 {
3153 /* Scalar partly in registers. */
3154
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3156 int i;
3157 int not_stack;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3162 int skip;
3163
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
3169 anti_adjust_stack (GEN_INT (extra));
3170
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3174 if (args_addr == 0)
3175 offset = 0;
3176
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3180
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3186
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3189
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3195
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199 #ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3201 #else
3202 for (i = size - 1; i >= not_stack; i--)
3203 #endif
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3207 0, args_addr,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
3209 * UNITS_PER_WORD)),
3210 reg_parm_stack_space, alignment_pad);
3211 }
3212 else
3213 {
3214 rtx addr;
3215 rtx target = NULL_RTX;
3216
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
3222 anti_adjust_stack (GEN_INT (extra));
3223
3224 #ifdef PUSH_ROUNDING
3225 if (args_addr == 0)
3226 addr = gen_push_operand ();
3227 else
3228 #endif
3229 {
3230 if (GET_CODE (args_so_far) == CONST_INT)
3231 addr
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3235 else
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3237 args_so_far));
3238 target = addr;
3239 }
3240
3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3242
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3244 {
3245 in_check_memory_usage = 1;
3246 if (target == 0)
3247 target = get_push_address (GET_MODE_SIZE (mode));
3248
3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3251 target, Pmode,
3252 XEXP (x, 0), Pmode,
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3255 else
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3257 target, Pmode,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
3263 }
3264 }
3265
3266 ret:
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
3270 if (partial > 0 && reg != 0)
3271 {
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
3275 emit_group_load (reg, x, -1, align); /* ??? size? */
3276 else
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3278 }
3279
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
3281 anti_adjust_stack (GEN_INT (extra));
3282
3283 if (alignment_pad)
3284 anti_adjust_stack (alignment_pad);
3285 }
3286 \f
3287 /* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
3292
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
3296 We now use WANT_VALUE to decide whether to do this. */
3297
3298 rtx
3299 expand_assignment (to, from, want_value, suggest_reg)
3300 tree to, from;
3301 int want_value;
3302 int suggest_reg ATTRIBUTE_UNUSED;
3303 {
3304 register rtx to_rtx = 0;
3305 rtx result;
3306
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3308
3309 if (TREE_CODE (to) == ERROR_MARK)
3310 {
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3313 }
3314
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3319 problem. */
3320
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
3323 {
3324 enum machine_mode mode1;
3325 int bitsize;
3326 int bitpos;
3327 tree offset;
3328 int unsignedp;
3329 int volatilep = 0;
3330 tree tem;
3331 int alignment;
3332
3333 push_temp_slots ();
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
3336
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3339
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3342
3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3344 if (offset != 0)
3345 {
3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3347
3348 if (GET_CODE (to_rtx) != MEM)
3349 abort ();
3350
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3352 {
3353 #ifdef POINTERS_EXTEND_UNSIGNED
3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3355 #else
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3357 #endif
3358 }
3359
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3365 && bitsize
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 {
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3372 (bitpos /
3373 BITS_PER_UNIT)));
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3375 to_rtx = temp;
3376 else
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3379 XEXP (temp, 0)));
3380 bitpos = 0;
3381 }
3382
3383 to_rtx = change_address (to_rtx, VOIDmode,
3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3385 force_reg (ptr_mode,
3386 offset_rtx)));
3387 }
3388
3389 if (volatilep)
3390 {
3391 if (GET_CODE (to_rtx) == MEM)
3392 {
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3396 if (offset == 0)
3397 to_rtx = copy_rtx (to_rtx);
3398
3399 MEM_VOLATILE_P (to_rtx) = 1;
3400 }
3401 #if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3404 else
3405 abort ();
3406 #endif
3407 }
3408
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 {
3412 if (offset == 0)
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3416 }
3417
3418 /* Check the access. */
3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3420 {
3421 rtx to_addr;
3422 int size;
3423 int best_mode_size;
3424 enum machine_mode best_mode;
3425
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3428 mode1, volatilep);
3429 if (best_mode == VOIDmode)
3430 best_mode = QImode;
3431
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3436
3437 /* Check the access right of the pointer. */
3438 if (size)
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3440 to_addr, Pmode,
3441 GEN_INT (size), TYPE_MODE (sizetype),
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
3444 }
3445
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3447 (want_value
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3450 : VOIDmode),
3451 unsignedp,
3452 /* Required alignment of containing datum. */
3453 alignment,
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
3456 preserve_temp_slots (result);
3457 free_temp_slots ();
3458 pop_temp_slots ();
3459
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3464 result,
3465 TREE_UNSIGNED (TREE_TYPE (to)))
3466 : NULL_RTX);
3467 }
3468
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
3473 requires loading up part of an address in a separate insn.
3474
3475 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3476 a promoted variable where the zero- or sign- extension needs to be done.
3477 Handling this in the normal way is safe because no computation is done
3478 before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3481 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3482 {
3483 rtx value;
3484
3485 push_temp_slots ();
3486 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3487 if (to_rtx == 0)
3488 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3489
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 if (GET_CODE (to_rtx) == PARALLEL)
3493 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3494 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3495 else if (GET_MODE (to_rtx) == BLKmode)
3496 emit_block_move (to_rtx, value, expr_size (from),
3497 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3498 else
3499 {
3500 #ifdef POINTERS_EXTEND_UNSIGNED
3501 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3502 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3503 value = convert_memory_address (GET_MODE (to_rtx), value);
3504 #endif
3505 emit_move_insn (to_rtx, value);
3506 }
3507 preserve_temp_slots (to_rtx);
3508 free_temp_slots ();
3509 pop_temp_slots ();
3510 return want_value ? to_rtx : NULL_RTX;
3511 }
3512
3513 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3514 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515
3516 if (to_rtx == 0)
3517 {
3518 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3519 if (GET_CODE (to_rtx) == MEM)
3520 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3521 }
3522
3523 /* Don't move directly into a return register. */
3524 if (TREE_CODE (to) == RESULT_DECL
3525 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3526 {
3527 rtx temp;
3528
3529 push_temp_slots ();
3530 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3531
3532 if (GET_CODE (to_rtx) == PARALLEL)
3533 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3534 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3535 else
3536 emit_move_insn (to_rtx, temp);
3537
3538 preserve_temp_slots (to_rtx);
3539 free_temp_slots ();
3540 pop_temp_slots ();
3541 return want_value ? to_rtx : NULL_RTX;
3542 }
3543
3544 /* In case we are returning the contents of an object which overlaps
3545 the place the value is being stored, use a safe function when copying
3546 a value through a pointer into a structure value return block. */
3547 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3548 && current_function_returns_struct
3549 && !current_function_returns_pcc_struct)
3550 {
3551 rtx from_rtx, size;
3552
3553 push_temp_slots ();
3554 size = expr_size (from);
3555 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3556 EXPAND_MEMORY_USE_DONT);
3557
3558 /* Copy the rights of the bitmap. */
3559 if (current_function_check_memory_usage)
3560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3561 XEXP (to_rtx, 0), Pmode,
3562 XEXP (from_rtx, 0), Pmode,
3563 convert_to_mode (TYPE_MODE (sizetype),
3564 size, TREE_UNSIGNED (sizetype)),
3565 TYPE_MODE (sizetype));
3566
3567 #ifdef TARGET_MEM_FUNCTIONS
3568 emit_library_call (memcpy_libfunc, 0,
3569 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3570 XEXP (from_rtx, 0), Pmode,
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
3573 TYPE_MODE (sizetype));
3574 #else
3575 emit_library_call (bcopy_libfunc, 0,
3576 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3577 XEXP (to_rtx, 0), Pmode,
3578 convert_to_mode (TYPE_MODE (integer_type_node),
3579 size, TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
3581 #endif
3582
3583 preserve_temp_slots (to_rtx);
3584 free_temp_slots ();
3585 pop_temp_slots ();
3586 return want_value ? to_rtx : NULL_RTX;
3587 }
3588
3589 /* Compute FROM and store the value in the rtx we got. */
3590
3591 push_temp_slots ();
3592 result = store_expr (from, to_rtx, want_value);
3593 preserve_temp_slots (result);
3594 free_temp_slots ();
3595 pop_temp_slots ();
3596 return want_value ? result : NULL_RTX;
3597 }
3598
3599 /* Generate code for computing expression EXP,
3600 and storing the value into TARGET.
3601 TARGET may contain a QUEUED rtx.
3602
3603 If WANT_VALUE is nonzero, return a copy of the value
3604 not in TARGET, so that we can be sure to use the proper
3605 value in a containing expression even if TARGET has something
3606 else stored in it. If possible, we copy the value through a pseudo
3607 and return that pseudo. Or, if the value is constant, we try to
3608 return the constant. In some cases, we return a pseudo
3609 copied *from* TARGET.
3610
3611 If the mode is BLKmode then we may return TARGET itself.
3612 It turns out that in BLKmode it doesn't cause a problem.
3613 because C has no operators that could combine two different
3614 assignments into the same BLKmode object with different values
3615 with no sequence point. Will other languages need this to
3616 be more thorough?
3617
3618 If WANT_VALUE is 0, we return NULL, to make sure
3619 to catch quickly any cases where the caller uses the value
3620 and fails to set WANT_VALUE. */
3621
3622 rtx
3623 store_expr (exp, target, want_value)
3624 register tree exp;
3625 register rtx target;
3626 int want_value;
3627 {
3628 register rtx temp;
3629 int dont_return_target = 0;
3630
3631 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 {
3633 /* Perform first part of compound expression, then assign from second
3634 part. */
3635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3636 emit_queue ();
3637 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3638 }
3639 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 {
3641 /* For conditional expression, get safe form of the target. Then
3642 test the condition, doing the appropriate assignment on either
3643 side. This avoids the creation of unnecessary temporaries.
3644 For non-BLKmode, it is more efficient not to do this. */
3645
3646 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647
3648 emit_queue ();
3649 target = protect_from_queue (target, 1);
3650
3651 do_pending_stack_adjust ();
3652 NO_DEFER_POP;
3653 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3654 start_cleanup_deferral ();
3655 store_expr (TREE_OPERAND (exp, 1), target, 0);
3656 end_cleanup_deferral ();
3657 emit_queue ();
3658 emit_jump_insn (gen_jump (lab2));
3659 emit_barrier ();
3660 emit_label (lab1);
3661 start_cleanup_deferral ();
3662 store_expr (TREE_OPERAND (exp, 2), target, 0);
3663 end_cleanup_deferral ();
3664 emit_queue ();
3665 emit_label (lab2);
3666 OK_DEFER_POP;
3667
3668 return want_value ? target : NULL_RTX;
3669 }
3670 else if (queued_subexp_p (target))
3671 /* If target contains a postincrement, let's not risk
3672 using it as the place to generate the rhs. */
3673 {
3674 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 {
3676 /* Expand EXP into a new pseudo. */
3677 temp = gen_reg_rtx (GET_MODE (target));
3678 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 }
3680 else
3681 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3682
3683 /* If target is volatile, ANSI requires accessing the value
3684 *from* the target, if it is accessed. So make that happen.
3685 In no case return the target itself. */
3686 if (! MEM_VOLATILE_P (target) && want_value)
3687 dont_return_target = 1;
3688 }
3689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3690 && GET_MODE (target) != BLKmode)
3691 /* If target is in memory and caller wants value in a register instead,
3692 arrange that. Pass TARGET as target for expand_expr so that,
3693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3694 We know expand_expr will not use the target in that case.
3695 Don't do this if TARGET is volatile because we are supposed
3696 to write it and then read it. */
3697 {
3698 temp = expand_expr (exp, target, GET_MODE (target), 0);
3699 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3700 temp = copy_to_reg (temp);
3701 dont_return_target = 1;
3702 }
3703 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3704 /* If this is an scalar in a register that is stored in a wider mode
3705 than the declared mode, compute the result into its declared mode
3706 and then convert to the wider mode. Our value is the computed
3707 expression. */
3708 {
3709 /* If we don't want a value, we can do the conversion inside EXP,
3710 which will often result in some optimizations. Do the conversion
3711 in two steps: first change the signedness, if needed, then
3712 the extend. But don't do this if the type of EXP is a subtype
3713 of something else since then the conversion might involve
3714 more than just converting modes. */
3715 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3716 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3717 {
3718 if (TREE_UNSIGNED (TREE_TYPE (exp))
3719 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 exp
3721 = convert
3722 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3723 TREE_TYPE (exp)),
3724 exp);
3725
3726 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3727 SUBREG_PROMOTED_UNSIGNED_P (target)),
3728 exp);
3729 }
3730
3731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3732
3733 /* If TEMP is a volatile MEM and we want a result value, make
3734 the access now so it gets done only once. Likewise if
3735 it contains TARGET. */
3736 if (GET_CODE (temp) == MEM && want_value
3737 && (MEM_VOLATILE_P (temp)
3738 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3739 temp = copy_to_reg (temp);
3740
3741 /* If TEMP is a VOIDmode constant, use convert_modes to make
3742 sure that we properly convert it. */
3743 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 TYPE_MODE (TREE_TYPE (exp)), temp,
3746 SUBREG_PROMOTED_UNSIGNED_P (target));
3747
3748 convert_move (SUBREG_REG (target), temp,
3749 SUBREG_PROMOTED_UNSIGNED_P (target));
3750
3751 /* If we promoted a constant, change the mode back down to match
3752 target. Otherwise, the caller might get confused by a result whose
3753 mode is larger than expected. */
3754
3755 if (want_value && GET_MODE (temp) != GET_MODE (target)
3756 && GET_MODE (temp) != VOIDmode)
3757 {
3758 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3759 SUBREG_PROMOTED_VAR_P (temp) = 1;
3760 SUBREG_PROMOTED_UNSIGNED_P (temp)
3761 = SUBREG_PROMOTED_UNSIGNED_P (target);
3762 }
3763
3764 return want_value ? temp : NULL_RTX;
3765 }
3766 else
3767 {
3768 temp = expand_expr (exp, target, GET_MODE (target), 0);
3769 /* Return TARGET if it's a specified hardware register.
3770 If TARGET is a volatile mem ref, either return TARGET
3771 or return a reg copied *from* TARGET; ANSI requires this.
3772
3773 Otherwise, if TEMP is not TARGET, return TEMP
3774 if it is constant (for efficiency),
3775 or if we really want the correct value. */
3776 if (!(target && GET_CODE (target) == REG
3777 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3778 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3779 && ! rtx_equal_p (temp, target)
3780 && (CONSTANT_P (temp) || want_value))
3781 dont_return_target = 1;
3782 }
3783
3784 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3785 the same as that of TARGET, adjust the constant. This is needed, for
3786 example, in case it is a CONST_DOUBLE and we want only a word-sized
3787 value. */
3788 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3789 && TREE_CODE (exp) != ERROR_MARK
3790 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3791 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3792 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793
3794 if (current_function_check_memory_usage
3795 && GET_CODE (target) == MEM
3796 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 {
3798 if (GET_CODE (temp) == MEM)
3799 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3800 XEXP (target, 0), Pmode,
3801 XEXP (temp, 0), Pmode,
3802 expr_size (exp), TYPE_MODE (sizetype));
3803 else
3804 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3805 XEXP (target, 0), Pmode,
3806 expr_size (exp), TYPE_MODE (sizetype),
3807 GEN_INT (MEMORY_USE_WO),
3808 TYPE_MODE (integer_type_node));
3809 }
3810
3811 /* If value was not generated in the target, store it there.
3812 Convert the value to TARGET's type first if nec. */
3813 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3814 one or both of them are volatile memory refs, we have to distinguish
3815 two cases:
3816 - expand_expr has used TARGET. In this case, we must not generate
3817 another copy. This can be detected by TARGET being equal according
3818 to == .
3819 - expand_expr has not used TARGET - that means that the source just
3820 happens to have the same RTX form. Since temp will have been created
3821 by expand_expr, it will compare unequal according to == .
3822 We must generate a copy in this case, to reach the correct number
3823 of volatile memory references. */
3824
3825 if ((! rtx_equal_p (temp, target)
3826 || (temp != target && (side_effects_p (temp)
3827 || side_effects_p (target))))
3828 && TREE_CODE (exp) != ERROR_MARK)
3829 {
3830 target = protect_from_queue (target, 1);
3831 if (GET_MODE (temp) != GET_MODE (target)
3832 && GET_MODE (temp) != VOIDmode)
3833 {
3834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3835 if (dont_return_target)
3836 {
3837 /* In this case, we will return TEMP,
3838 so make sure it has the proper mode.
3839 But don't forget to store the value into TARGET. */
3840 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3841 emit_move_insn (target, temp);
3842 }
3843 else
3844 convert_move (target, temp, unsignedp);
3845 }
3846
3847 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 {
3849 /* Handle copying a string constant into an array.
3850 The string constant may be shorter than the array.
3851 So copy just the string's actual length, and clear the rest. */
3852 rtx size;
3853 rtx addr;
3854
3855 /* Get the size of the data type of the string,
3856 which is actually the size of the target. */
3857 size = expr_size (exp);
3858 if (GET_CODE (size) == CONST_INT
3859 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3860 emit_block_move (target, temp, size,
3861 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3862 else
3863 {
3864 /* Compute the size of the data to copy from the string. */
3865 tree copy_size
3866 = size_binop (MIN_EXPR,
3867 make_tree (sizetype, size),
3868 convert (sizetype,
3869 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3870 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3871 VOIDmode, 0);
3872 rtx label = 0;
3873
3874 /* Copy that much. */
3875 emit_block_move (target, temp, copy_size_rtx,
3876 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3877
3878 /* Figure out how much is left in TARGET that we have to clear.
3879 Do all calculations in ptr_mode. */
3880
3881 addr = XEXP (target, 0);
3882 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3883
3884 if (GET_CODE (copy_size_rtx) == CONST_INT)
3885 {
3886 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3887 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3888 }
3889 else
3890 {
3891 addr = force_reg (ptr_mode, addr);
3892 addr = expand_binop (ptr_mode, add_optab, addr,
3893 copy_size_rtx, NULL_RTX, 0,
3894 OPTAB_LIB_WIDEN);
3895
3896 size = expand_binop (ptr_mode, sub_optab, size,
3897 copy_size_rtx, NULL_RTX, 0,
3898 OPTAB_LIB_WIDEN);
3899
3900 label = gen_label_rtx ();
3901 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3902 GET_MODE (size), 0, 0, label);
3903 }
3904
3905 if (size != const0_rtx)
3906 {
3907 /* Be sure we can write on ADDR. */
3908 if (current_function_check_memory_usage)
3909 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3910 addr, Pmode,
3911 size, TYPE_MODE (sizetype),
3912 GEN_INT (MEMORY_USE_WO),
3913 TYPE_MODE (integer_type_node));
3914 #ifdef TARGET_MEM_FUNCTIONS
3915 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3916 addr, ptr_mode,
3917 const0_rtx, TYPE_MODE (integer_type_node),
3918 convert_to_mode (TYPE_MODE (sizetype),
3919 size,
3920 TREE_UNSIGNED (sizetype)),
3921 TYPE_MODE (sizetype));
3922 #else
3923 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3924 addr, ptr_mode,
3925 convert_to_mode (TYPE_MODE (integer_type_node),
3926 size,
3927 TREE_UNSIGNED (integer_type_node)),
3928 TYPE_MODE (integer_type_node));
3929 #endif
3930 }
3931
3932 if (label)
3933 emit_label (label);
3934 }
3935 }
3936 /* Handle calls that return values in multiple non-contiguous locations.
3937 The Irix 6 ABI has examples of this. */
3938 else if (GET_CODE (target) == PARALLEL)
3939 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3940 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3941 else if (GET_MODE (temp) == BLKmode)
3942 emit_block_move (target, temp, expr_size (exp),
3943 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3944 else
3945 emit_move_insn (target, temp);
3946 }
3947
3948 /* If we don't want a value, return NULL_RTX. */
3949 if (! want_value)
3950 return NULL_RTX;
3951
3952 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3953 ??? The latter test doesn't seem to make sense. */
3954 else if (dont_return_target && GET_CODE (temp) != MEM)
3955 return temp;
3956
3957 /* Return TARGET itself if it is a hard register. */
3958 else if (want_value && GET_MODE (target) != BLKmode
3959 && ! (GET_CODE (target) == REG
3960 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3961 return copy_to_reg (target);
3962
3963 else
3964 return target;
3965 }
3966 \f
3967 /* Return 1 if EXP just contains zeros. */
3968
3969 static int
3970 is_zeros_p (exp)
3971 tree exp;
3972 {
3973 tree elt;
3974
3975 switch (TREE_CODE (exp))
3976 {
3977 case CONVERT_EXPR:
3978 case NOP_EXPR:
3979 case NON_LVALUE_EXPR:
3980 return is_zeros_p (TREE_OPERAND (exp, 0));
3981
3982 case INTEGER_CST:
3983 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3984
3985 case COMPLEX_CST:
3986 return
3987 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3988
3989 case REAL_CST:
3990 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3991
3992 case CONSTRUCTOR:
3993 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3994 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3995 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3996 if (! is_zeros_p (TREE_VALUE (elt)))
3997 return 0;
3998
3999 return 1;
4000
4001 default:
4002 return 0;
4003 }
4004 }
4005
4006 /* Return 1 if EXP contains mostly (3/4) zeros. */
4007
4008 static int
4009 mostly_zeros_p (exp)
4010 tree exp;
4011 {
4012 if (TREE_CODE (exp) == CONSTRUCTOR)
4013 {
4014 int elts = 0, zeros = 0;
4015 tree elt = CONSTRUCTOR_ELTS (exp);
4016 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4017 {
4018 /* If there are no ranges of true bits, it is all zero. */
4019 return elt == NULL_TREE;
4020 }
4021 for (; elt; elt = TREE_CHAIN (elt))
4022 {
4023 /* We do not handle the case where the index is a RANGE_EXPR,
4024 so the statistic will be somewhat inaccurate.
4025 We do make a more accurate count in store_constructor itself,
4026 so since this function is only used for nested array elements,
4027 this should be close enough. */
4028 if (mostly_zeros_p (TREE_VALUE (elt)))
4029 zeros++;
4030 elts++;
4031 }
4032
4033 return 4 * zeros >= 3 * elts;
4034 }
4035
4036 return is_zeros_p (exp);
4037 }
4038 \f
4039 /* Helper function for store_constructor.
4040 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4041 TYPE is the type of the CONSTRUCTOR, not the element type.
4042 ALIGN and CLEARED are as for store_constructor.
4043
4044 This provides a recursive shortcut back to store_constructor when it isn't
4045 necessary to go through store_field. This is so that we can pass through
4046 the cleared field to let store_constructor know that we may not have to
4047 clear a substructure if the outer structure has already been cleared. */
4048
4049 static void
4050 store_constructor_field (target, bitsize, bitpos,
4051 mode, exp, type, align, cleared)
4052 rtx target;
4053 int bitsize, bitpos;
4054 enum machine_mode mode;
4055 tree exp, type;
4056 int align;
4057 int cleared;
4058 {
4059 if (TREE_CODE (exp) == CONSTRUCTOR
4060 && bitpos % BITS_PER_UNIT == 0
4061 /* If we have a non-zero bitpos for a register target, then we just
4062 let store_field do the bitfield handling. This is unlikely to
4063 generate unnecessary clear instructions anyways. */
4064 && (bitpos == 0 || GET_CODE (target) == MEM))
4065 {
4066 if (bitpos != 0)
4067 target
4068 = change_address (target,
4069 GET_MODE (target) == BLKmode
4070 || 0 != (bitpos
4071 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4072 ? BLKmode : VOIDmode,
4073 plus_constant (XEXP (target, 0),
4074 bitpos / BITS_PER_UNIT));
4075 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4076 }
4077 else
4078 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4079 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4080 int_size_in_bytes (type), 0);
4081 }
4082
4083 /* Store the value of constructor EXP into the rtx TARGET.
4084 TARGET is either a REG or a MEM.
4085 ALIGN is the maximum known alignment for TARGET, in bits.
4086 CLEARED is true if TARGET is known to have been zero'd.
4087 SIZE is the number of bytes of TARGET we are allowed to modify: this
4088 may not be the same as the size of EXP if we are assigning to a field
4089 which has been packed to exclude padding bits. */
4090
4091 static void
4092 store_constructor (exp, target, align, cleared, size)
4093 tree exp;
4094 rtx target;
4095 int align;
4096 int cleared;
4097 int size;
4098 {
4099 tree type = TREE_TYPE (exp);
4100 #ifdef WORD_REGISTER_OPERATIONS
4101 rtx exp_size = expr_size (exp);
4102 #endif
4103
4104 /* We know our target cannot conflict, since safe_from_p has been called. */
4105 #if 0
4106 /* Don't try copying piece by piece into a hard register
4107 since that is vulnerable to being clobbered by EXP.
4108 Instead, construct in a pseudo register and then copy it all. */
4109 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4110 {
4111 rtx temp = gen_reg_rtx (GET_MODE (target));
4112 store_constructor (exp, temp, align, cleared, size);
4113 emit_move_insn (target, temp);
4114 return;
4115 }
4116 #endif
4117
4118 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4119 || TREE_CODE (type) == QUAL_UNION_TYPE)
4120 {
4121 register tree elt;
4122
4123 /* Inform later passes that the whole union value is dead. */
4124 if ((TREE_CODE (type) == UNION_TYPE
4125 || TREE_CODE (type) == QUAL_UNION_TYPE)
4126 && ! cleared)
4127 {
4128 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4129
4130 /* If the constructor is empty, clear the union. */
4131 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4132 clear_storage (target, expr_size (exp),
4133 TYPE_ALIGN (type) / BITS_PER_UNIT);
4134 }
4135
4136 /* If we are building a static constructor into a register,
4137 set the initial value as zero so we can fold the value into
4138 a constant. But if more than one register is involved,
4139 this probably loses. */
4140 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4142 {
4143 if (! cleared)
4144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4145
4146 cleared = 1;
4147 }
4148
4149 /* If the constructor has fewer fields than the structure
4150 or if we are initializing the structure to mostly zeros,
4151 clear the whole structure first. */
4152 else if (size > 0
4153 && ((list_length (CONSTRUCTOR_ELTS (exp))
4154 != list_length (TYPE_FIELDS (type)))
4155 || mostly_zeros_p (exp)))
4156 {
4157 if (! cleared)
4158 clear_storage (target, GEN_INT (size),
4159 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4160
4161 cleared = 1;
4162 }
4163 else if (! cleared)
4164 /* Inform later passes that the old value is dead. */
4165 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4166
4167 /* Store each element of the constructor into
4168 the corresponding field of TARGET. */
4169
4170 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4171 {
4172 register tree field = TREE_PURPOSE (elt);
4173 #ifdef WORD_REGISTER_OPERATIONS
4174 tree value = TREE_VALUE (elt);
4175 #endif
4176 register enum machine_mode mode;
4177 int bitsize;
4178 int bitpos = 0;
4179 int unsignedp;
4180 tree pos, constant = 0, offset = 0;
4181 rtx to_rtx = target;
4182
4183 /* Just ignore missing fields.
4184 We cleared the whole structure, above,
4185 if any fields are missing. */
4186 if (field == 0)
4187 continue;
4188
4189 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4190 continue;
4191
4192 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4193 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4194 else
4195 bitsize = -1;
4196
4197 unsignedp = TREE_UNSIGNED (field);
4198 mode = DECL_MODE (field);
4199 if (DECL_BIT_FIELD (field))
4200 mode = VOIDmode;
4201
4202 pos = DECL_FIELD_BITPOS (field);
4203 if (TREE_CODE (pos) == INTEGER_CST)
4204 constant = pos;
4205 else if (TREE_CODE (pos) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4207 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4208 else
4209 offset = pos;
4210
4211 if (constant)
4212 bitpos = TREE_INT_CST_LOW (constant);
4213
4214 if (offset)
4215 {
4216 rtx offset_rtx;
4217
4218 if (contains_placeholder_p (offset))
4219 offset = build (WITH_RECORD_EXPR, sizetype,
4220 offset, make_tree (TREE_TYPE (exp), target));
4221
4222 offset = size_binop (EXACT_DIV_EXPR, offset,
4223 size_int (BITS_PER_UNIT));
4224
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4227 abort ();
4228
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4230 {
4231 #ifdef POINTERS_EXTEND_UNSIGNED
4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4233 #else
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4235 #endif
4236 }
4237
4238 to_rtx
4239 = change_address (to_rtx, VOIDmode,
4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4241 force_reg (ptr_mode,
4242 offset_rtx)));
4243 }
4244
4245 if (TREE_READONLY (field))
4246 {
4247 if (GET_CODE (to_rtx) == MEM)
4248 to_rtx = copy_rtx (to_rtx);
4249
4250 RTX_UNCHANGING_P (to_rtx) = 1;
4251 }
4252
4253 #ifdef WORD_REGISTER_OPERATIONS
4254 /* If this initializes a field that is smaller than a word, at the
4255 start of a word, try to widen it to a full word.
4256 This special case allows us to output C++ member function
4257 initializations in a form that the optimizers can understand. */
4258 if (constant
4259 && GET_CODE (target) == REG
4260 && bitsize < BITS_PER_WORD
4261 && bitpos % BITS_PER_WORD == 0
4262 && GET_MODE_CLASS (mode) == MODE_INT
4263 && TREE_CODE (value) == INTEGER_CST
4264 && GET_CODE (exp_size) == CONST_INT
4265 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 {
4267 tree type = TREE_TYPE (value);
4268 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 {
4270 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4271 value = convert (type, value);
4272 }
4273 if (BYTES_BIG_ENDIAN)
4274 value
4275 = fold (build (LSHIFT_EXPR, type, value,
4276 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4277 bitsize = BITS_PER_WORD;
4278 mode = word_mode;
4279 }
4280 #endif
4281 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4282 TREE_VALUE (elt), type,
4283 MIN (align,
4284 DECL_ALIGN (TREE_PURPOSE (elt))),
4285 cleared);
4286 }
4287 }
4288 else if (TREE_CODE (type) == ARRAY_TYPE)
4289 {
4290 register tree elt;
4291 register int i;
4292 int need_to_clear;
4293 tree domain = TYPE_DOMAIN (type);
4294 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4295 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4296 tree elttype = TREE_TYPE (type);
4297
4298 /* If the constructor has fewer elements than the array,
4299 clear the whole array first. Similarly if this is
4300 static constructor of a non-BLKmode object. */
4301 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4302 need_to_clear = 1;
4303 else
4304 {
4305 HOST_WIDE_INT count = 0, zero_count = 0;
4306 need_to_clear = 0;
4307 /* This loop is a more accurate version of the loop in
4308 mostly_zeros_p (it handles RANGE_EXPR in an index).
4309 It is also needed to check for missing elements. */
4310 for (elt = CONSTRUCTOR_ELTS (exp);
4311 elt != NULL_TREE;
4312 elt = TREE_CHAIN (elt))
4313 {
4314 tree index = TREE_PURPOSE (elt);
4315 HOST_WIDE_INT this_node_count;
4316 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 {
4318 tree lo_index = TREE_OPERAND (index, 0);
4319 tree hi_index = TREE_OPERAND (index, 1);
4320 if (TREE_CODE (lo_index) != INTEGER_CST
4321 || TREE_CODE (hi_index) != INTEGER_CST)
4322 {
4323 need_to_clear = 1;
4324 break;
4325 }
4326 this_node_count = TREE_INT_CST_LOW (hi_index)
4327 - TREE_INT_CST_LOW (lo_index) + 1;
4328 }
4329 else
4330 this_node_count = 1;
4331 count += this_node_count;
4332 if (mostly_zeros_p (TREE_VALUE (elt)))
4333 zero_count += this_node_count;
4334 }
4335 /* Clear the entire array first if there are any missing elements,
4336 or if the incidence of zero elements is >= 75%. */
4337 if (count < maxelt - minelt + 1
4338 || 4 * zero_count >= 3 * count)
4339 need_to_clear = 1;
4340 }
4341 if (need_to_clear && size > 0)
4342 {
4343 if (! cleared)
4344 clear_storage (target, GEN_INT (size),
4345 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4346 cleared = 1;
4347 }
4348 else
4349 /* Inform later passes that the old value is dead. */
4350 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4351
4352 /* Store each element of the constructor into
4353 the corresponding element of TARGET, determined
4354 by counting the elements. */
4355 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4356 elt;
4357 elt = TREE_CHAIN (elt), i++)
4358 {
4359 register enum machine_mode mode;
4360 int bitsize;
4361 int bitpos;
4362 int unsignedp;
4363 tree value = TREE_VALUE (elt);
4364 int align = TYPE_ALIGN (TREE_TYPE (value));
4365 tree index = TREE_PURPOSE (elt);
4366 rtx xtarget = target;
4367
4368 if (cleared && is_zeros_p (value))
4369 continue;
4370
4371 unsignedp = TREE_UNSIGNED (elttype);
4372 mode = TYPE_MODE (elttype);
4373 if (mode == BLKmode)
4374 {
4375 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4376 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4377 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4378 else
4379 bitsize = -1;
4380 }
4381 else
4382 bitsize = GET_MODE_BITSIZE (mode);
4383
4384 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4385 {
4386 tree lo_index = TREE_OPERAND (index, 0);
4387 tree hi_index = TREE_OPERAND (index, 1);
4388 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4389 struct nesting *loop;
4390 HOST_WIDE_INT lo, hi, count;
4391 tree position;
4392
4393 /* If the range is constant and "small", unroll the loop. */
4394 if (TREE_CODE (lo_index) == INTEGER_CST
4395 && TREE_CODE (hi_index) == INTEGER_CST
4396 && (lo = TREE_INT_CST_LOW (lo_index),
4397 hi = TREE_INT_CST_LOW (hi_index),
4398 count = hi - lo + 1,
4399 (GET_CODE (target) != MEM
4400 || count <= 2
4401 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4402 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4403 <= 40 * 8))))
4404 {
4405 lo -= minelt; hi -= minelt;
4406 for (; lo <= hi; lo++)
4407 {
4408 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4409 store_constructor_field (target, bitsize, bitpos, mode,
4410 value, type, align, cleared);
4411 }
4412 }
4413 else
4414 {
4415 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4416 loop_top = gen_label_rtx ();
4417 loop_end = gen_label_rtx ();
4418
4419 unsignedp = TREE_UNSIGNED (domain);
4420
4421 index = build_decl (VAR_DECL, NULL_TREE, domain);
4422
4423 DECL_RTL (index) = index_r
4424 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4425 &unsignedp, 0));
4426
4427 if (TREE_CODE (value) == SAVE_EXPR
4428 && SAVE_EXPR_RTL (value) == 0)
4429 {
4430 /* Make sure value gets expanded once before the
4431 loop. */
4432 expand_expr (value, const0_rtx, VOIDmode, 0);
4433 emit_queue ();
4434 }
4435 store_expr (lo_index, index_r, 0);
4436 loop = expand_start_loop (0);
4437
4438 /* Assign value to element index. */
4439 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4440 size_int (BITS_PER_UNIT));
4441 position = size_binop (MULT_EXPR,
4442 size_binop (MINUS_EXPR, index,
4443 TYPE_MIN_VALUE (domain)),
4444 position);
4445 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4446 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4447 xtarget = change_address (target, mode, addr);
4448 if (TREE_CODE (value) == CONSTRUCTOR)
4449 store_constructor (value, xtarget, align, cleared,
4450 bitsize / BITS_PER_UNIT);
4451 else
4452 store_expr (value, xtarget, 0);
4453
4454 expand_exit_loop_if_false (loop,
4455 build (LT_EXPR, integer_type_node,
4456 index, hi_index));
4457
4458 expand_increment (build (PREINCREMENT_EXPR,
4459 TREE_TYPE (index),
4460 index, integer_one_node), 0, 0);
4461 expand_end_loop ();
4462 emit_label (loop_end);
4463
4464 /* Needed by stupid register allocation. to extend the
4465 lifetime of pseudo-regs used by target past the end
4466 of the loop. */
4467 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4468 }
4469 }
4470 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4471 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4472 {
4473 rtx pos_rtx, addr;
4474 tree position;
4475
4476 if (index == 0)
4477 index = size_int (i);
4478
4479 if (minelt)
4480 index = size_binop (MINUS_EXPR, index,
4481 TYPE_MIN_VALUE (domain));
4482 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4483 size_int (BITS_PER_UNIT));
4484 position = size_binop (MULT_EXPR, index, position);
4485 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4486 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4487 xtarget = change_address (target, mode, addr);
4488 store_expr (value, xtarget, 0);
4489 }
4490 else
4491 {
4492 if (index != 0)
4493 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4494 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4495 else
4496 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4497 store_constructor_field (target, bitsize, bitpos, mode, value,
4498 type, align, cleared);
4499 }
4500 }
4501 }
4502 /* set constructor assignments */
4503 else if (TREE_CODE (type) == SET_TYPE)
4504 {
4505 tree elt = CONSTRUCTOR_ELTS (exp);
4506 int nbytes = int_size_in_bytes (type), nbits;
4507 tree domain = TYPE_DOMAIN (type);
4508 tree domain_min, domain_max, bitlength;
4509
4510 /* The default implementation strategy is to extract the constant
4511 parts of the constructor, use that to initialize the target,
4512 and then "or" in whatever non-constant ranges we need in addition.
4513
4514 If a large set is all zero or all ones, it is
4515 probably better to set it using memset (if available) or bzero.
4516 Also, if a large set has just a single range, it may also be
4517 better to first clear all the first clear the set (using
4518 bzero/memset), and set the bits we want. */
4519
4520 /* Check for all zeros. */
4521 if (elt == NULL_TREE && size > 0)
4522 {
4523 if (!cleared)
4524 clear_storage (target, GEN_INT (size),
4525 TYPE_ALIGN (type) / BITS_PER_UNIT);
4526 return;
4527 }
4528
4529 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4530 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4531 bitlength = size_binop (PLUS_EXPR,
4532 size_binop (MINUS_EXPR, domain_max, domain_min),
4533 size_one_node);
4534
4535 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4536 abort ();
4537 nbits = TREE_INT_CST_LOW (bitlength);
4538
4539 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4540 are "complicated" (more than one range), initialize (the
4541 constant parts) by copying from a constant. */
4542 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4543 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4544 {
4545 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4546 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4547 char *bit_buffer = (char *) alloca (nbits);
4548 HOST_WIDE_INT word = 0;
4549 int bit_pos = 0;
4550 int ibit = 0;
4551 int offset = 0; /* In bytes from beginning of set. */
4552 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4553 for (;;)
4554 {
4555 if (bit_buffer[ibit])
4556 {
4557 if (BYTES_BIG_ENDIAN)
4558 word |= (1 << (set_word_size - 1 - bit_pos));
4559 else
4560 word |= 1 << bit_pos;
4561 }
4562 bit_pos++; ibit++;
4563 if (bit_pos >= set_word_size || ibit == nbits)
4564 {
4565 if (word != 0 || ! cleared)
4566 {
4567 rtx datum = GEN_INT (word);
4568 rtx to_rtx;
4569 /* The assumption here is that it is safe to use
4570 XEXP if the set is multi-word, but not if
4571 it's single-word. */
4572 if (GET_CODE (target) == MEM)
4573 {
4574 to_rtx = plus_constant (XEXP (target, 0), offset);
4575 to_rtx = change_address (target, mode, to_rtx);
4576 }
4577 else if (offset == 0)
4578 to_rtx = target;
4579 else
4580 abort ();
4581 emit_move_insn (to_rtx, datum);
4582 }
4583 if (ibit == nbits)
4584 break;
4585 word = 0;
4586 bit_pos = 0;
4587 offset += set_word_size / BITS_PER_UNIT;
4588 }
4589 }
4590 }
4591 else if (!cleared)
4592 {
4593 /* Don't bother clearing storage if the set is all ones. */
4594 if (TREE_CHAIN (elt) != NULL_TREE
4595 || (TREE_PURPOSE (elt) == NULL_TREE
4596 ? nbits != 1
4597 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4598 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4599 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4600 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4601 != nbits))))
4602 clear_storage (target, expr_size (exp),
4603 TYPE_ALIGN (type) / BITS_PER_UNIT);
4604 }
4605
4606 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4607 {
4608 /* start of range of element or NULL */
4609 tree startbit = TREE_PURPOSE (elt);
4610 /* end of range of element, or element value */
4611 tree endbit = TREE_VALUE (elt);
4612 #ifdef TARGET_MEM_FUNCTIONS
4613 HOST_WIDE_INT startb, endb;
4614 #endif
4615 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4616
4617 bitlength_rtx = expand_expr (bitlength,
4618 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4619
4620 /* handle non-range tuple element like [ expr ] */
4621 if (startbit == NULL_TREE)
4622 {
4623 startbit = save_expr (endbit);
4624 endbit = startbit;
4625 }
4626 startbit = convert (sizetype, startbit);
4627 endbit = convert (sizetype, endbit);
4628 if (! integer_zerop (domain_min))
4629 {
4630 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4631 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4632 }
4633 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4636 EXPAND_CONST_ADDRESS);
4637
4638 if (REG_P (target))
4639 {
4640 targetx = assign_stack_temp (GET_MODE (target),
4641 GET_MODE_SIZE (GET_MODE (target)),
4642 0);
4643 emit_move_insn (targetx, target);
4644 }
4645 else if (GET_CODE (target) == MEM)
4646 targetx = target;
4647 else
4648 abort ();
4649
4650 #ifdef TARGET_MEM_FUNCTIONS
4651 /* Optimization: If startbit and endbit are
4652 constants divisible by BITS_PER_UNIT,
4653 call memset instead. */
4654 if (TREE_CODE (startbit) == INTEGER_CST
4655 && TREE_CODE (endbit) == INTEGER_CST
4656 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4657 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4658 {
4659 emit_library_call (memset_libfunc, 0,
4660 VOIDmode, 3,
4661 plus_constant (XEXP (targetx, 0),
4662 startb / BITS_PER_UNIT),
4663 Pmode,
4664 constm1_rtx, TYPE_MODE (integer_type_node),
4665 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4666 TYPE_MODE (sizetype));
4667 }
4668 else
4669 #endif
4670 {
4671 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4672 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4673 bitlength_rtx, TYPE_MODE (sizetype),
4674 startbit_rtx, TYPE_MODE (sizetype),
4675 endbit_rtx, TYPE_MODE (sizetype));
4676 }
4677 if (REG_P (target))
4678 emit_move_insn (target, targetx);
4679 }
4680 }
4681
4682 else
4683 abort ();
4684 }
4685
4686 /* Store the value of EXP (an expression tree)
4687 into a subfield of TARGET which has mode MODE and occupies
4688 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4689 If MODE is VOIDmode, it means that we are storing into a bit-field.
4690
4691 If VALUE_MODE is VOIDmode, return nothing in particular.
4692 UNSIGNEDP is not used in this case.
4693
4694 Otherwise, return an rtx for the value stored. This rtx
4695 has mode VALUE_MODE if that is convenient to do.
4696 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4697
4698 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4699 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4700
4701 ALIAS_SET is the alias set for the destination. This value will
4702 (in general) be different from that for TARGET, since TARGET is a
4703 reference to the containing structure. */
4704
4705 static rtx
4706 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4707 unsignedp, align, total_size, alias_set)
4708 rtx target;
4709 int bitsize, bitpos;
4710 enum machine_mode mode;
4711 tree exp;
4712 enum machine_mode value_mode;
4713 int unsignedp;
4714 int align;
4715 int total_size;
4716 int alias_set;
4717 {
4718 HOST_WIDE_INT width_mask = 0;
4719
4720 if (TREE_CODE (exp) == ERROR_MARK)
4721 return const0_rtx;
4722
4723 if (bitsize < HOST_BITS_PER_WIDE_INT)
4724 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4725
4726 /* If we are storing into an unaligned field of an aligned union that is
4727 in a register, we may have the mode of TARGET being an integer mode but
4728 MODE == BLKmode. In that case, get an aligned object whose size and
4729 alignment are the same as TARGET and store TARGET into it (we can avoid
4730 the store if the field being stored is the entire width of TARGET). Then
4731 call ourselves recursively to store the field into a BLKmode version of
4732 that object. Finally, load from the object into TARGET. This is not
4733 very efficient in general, but should only be slightly more expensive
4734 than the otherwise-required unaligned accesses. Perhaps this can be
4735 cleaned up later. */
4736
4737 if (mode == BLKmode
4738 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4739 {
4740 rtx object = assign_stack_temp (GET_MODE (target),
4741 GET_MODE_SIZE (GET_MODE (target)), 0);
4742 rtx blk_object = copy_rtx (object);
4743
4744 MEM_SET_IN_STRUCT_P (object, 1);
4745 MEM_SET_IN_STRUCT_P (blk_object, 1);
4746 PUT_MODE (blk_object, BLKmode);
4747
4748 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4749 emit_move_insn (object, target);
4750
4751 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4752 align, total_size, alias_set);
4753
4754 /* Even though we aren't returning target, we need to
4755 give it the updated value. */
4756 emit_move_insn (target, object);
4757
4758 return blk_object;
4759 }
4760
4761 /* If the structure is in a register or if the component
4762 is a bit field, we cannot use addressing to access it.
4763 Use bit-field techniques or SUBREG to store in it. */
4764
4765 if (mode == VOIDmode
4766 || (mode != BLKmode && ! direct_store[(int) mode]
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4768 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4769 || GET_CODE (target) == REG
4770 || GET_CODE (target) == SUBREG
4771 /* If the field isn't aligned enough to store as an ordinary memref,
4772 store it as a bit field. */
4773 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
4774 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4775 || bitpos % GET_MODE_ALIGNMENT (mode)))
4776 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
4777 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4778 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4779 /* If the RHS and field are a constant size and the size of the
4780 RHS isn't the same size as the bitfield, we must use bitfield
4781 operations. */
4782 || ((bitsize >= 0
4783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4784 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4785 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4786 {
4787 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4788
4789 /* If BITSIZE is narrower than the size of the type of EXP
4790 we will be narrowing TEMP. Normally, what's wanted are the
4791 low-order bits. However, if EXP's type is a record and this is
4792 big-endian machine, we want the upper BITSIZE bits. */
4793 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4794 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4795 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4796 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4797 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4798 - bitsize),
4799 temp, 1);
4800
4801 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4802 MODE. */
4803 if (mode != VOIDmode && mode != BLKmode
4804 && mode != TYPE_MODE (TREE_TYPE (exp)))
4805 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4806
4807 /* If the modes of TARGET and TEMP are both BLKmode, both
4808 must be in memory and BITPOS must be aligned on a byte
4809 boundary. If so, we simply do a block copy. */
4810 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4811 {
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
4818 bitpos / BITS_PER_UNIT));
4819
4820 /* Find an alignment that is consistent with the bit position. */
4821 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4822 align >>= 1;
4823
4824 emit_block_move (target, temp,
4825 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4826 / BITS_PER_UNIT),
4827 align);
4828
4829 return value_mode == VOIDmode ? const0_rtx : target;
4830 }
4831
4832 /* Store the value in the bitfield. */
4833 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4834 if (value_mode != VOIDmode)
4835 {
4836 /* The caller wants an rtx for the value. */
4837 /* If possible, avoid refetching from the bitfield itself. */
4838 if (width_mask != 0
4839 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4840 {
4841 tree count;
4842 enum machine_mode tmode;
4843
4844 if (unsignedp)
4845 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4846 tmode = GET_MODE (temp);
4847 if (tmode == VOIDmode)
4848 tmode = value_mode;
4849 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4850 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4851 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4852 }
4853 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4854 NULL_RTX, value_mode, 0, align,
4855 total_size);
4856 }
4857 return const0_rtx;
4858 }
4859 else
4860 {
4861 rtx addr = XEXP (target, 0);
4862 rtx to_rtx;
4863
4864 /* If a value is wanted, it must be the lhs;
4865 so make the address stable for multiple use. */
4866
4867 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4868 && ! CONSTANT_ADDRESS_P (addr)
4869 /* A frame-pointer reference is already stable. */
4870 && ! (GET_CODE (addr) == PLUS
4871 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4872 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4873 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4874 addr = copy_to_reg (addr);
4875
4876 /* Now build a reference to just the desired component. */
4877
4878 to_rtx = copy_rtx (change_address (target, mode,
4879 plus_constant (addr,
4880 (bitpos
4881 / BITS_PER_UNIT))));
4882 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4883 MEM_ALIAS_SET (to_rtx) = alias_set;
4884
4885 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4886 }
4887 }
4888 \f
4889 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4890 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4891 ARRAY_REFs and find the ultimate containing object, which we return.
4892
4893 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4894 bit position, and *PUNSIGNEDP to the signedness of the field.
4895 If the position of the field is variable, we store a tree
4896 giving the variable offset (in units) in *POFFSET.
4897 This offset is in addition to the bit position.
4898 If the position is not variable, we store 0 in *POFFSET.
4899 We set *PALIGNMENT to the alignment in bytes of the address that will be
4900 computed. This is the alignment of the thing we return if *POFFSET
4901 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4902
4903 If any of the extraction expressions is volatile,
4904 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4905
4906 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4907 is a mode that can be used to access the field. In that case, *PBITSIZE
4908 is redundant.
4909
4910 If the field describes a variable-sized object, *PMODE is set to
4911 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4912 this case, but the address of the object can be found. */
4913
4914 tree
4915 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4916 punsignedp, pvolatilep, palignment)
4917 tree exp;
4918 int *pbitsize;
4919 int *pbitpos;
4920 tree *poffset;
4921 enum machine_mode *pmode;
4922 int *punsignedp;
4923 int *pvolatilep;
4924 int *palignment;
4925 {
4926 tree orig_exp = exp;
4927 tree size_tree = 0;
4928 enum machine_mode mode = VOIDmode;
4929 tree offset = integer_zero_node;
4930 unsigned int alignment = BIGGEST_ALIGNMENT;
4931
4932 if (TREE_CODE (exp) == COMPONENT_REF)
4933 {
4934 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4935 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4936 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4937 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4938 }
4939 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4940 {
4941 size_tree = TREE_OPERAND (exp, 1);
4942 *punsignedp = TREE_UNSIGNED (exp);
4943 }
4944 else
4945 {
4946 mode = TYPE_MODE (TREE_TYPE (exp));
4947 if (mode == BLKmode)
4948 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4949
4950 *pbitsize = GET_MODE_BITSIZE (mode);
4951 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4952 }
4953
4954 if (size_tree)
4955 {
4956 if (TREE_CODE (size_tree) != INTEGER_CST)
4957 mode = BLKmode, *pbitsize = -1;
4958 else
4959 *pbitsize = TREE_INT_CST_LOW (size_tree);
4960 }
4961
4962 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4963 and find the ultimate containing object. */
4964
4965 *pbitpos = 0;
4966
4967 while (1)
4968 {
4969 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4970 {
4971 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4972 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4973 : TREE_OPERAND (exp, 2));
4974 tree constant = integer_zero_node, var = pos;
4975
4976 /* If this field hasn't been filled in yet, don't go
4977 past it. This should only happen when folding expressions
4978 made during type construction. */
4979 if (pos == 0)
4980 break;
4981
4982 /* Assume here that the offset is a multiple of a unit.
4983 If not, there should be an explicitly added constant. */
4984 if (TREE_CODE (pos) == PLUS_EXPR
4985 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4986 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4987 else if (TREE_CODE (pos) == INTEGER_CST)
4988 constant = pos, var = integer_zero_node;
4989
4990 *pbitpos += TREE_INT_CST_LOW (constant);
4991 offset = size_binop (PLUS_EXPR, offset,
4992 size_binop (EXACT_DIV_EXPR, var,
4993 size_int (BITS_PER_UNIT)));
4994 }
4995
4996 else if (TREE_CODE (exp) == ARRAY_REF)
4997 {
4998 /* This code is based on the code in case ARRAY_REF in expand_expr
4999 below. We assume here that the size of an array element is
5000 always an integral multiple of BITS_PER_UNIT. */
5001
5002 tree index = TREE_OPERAND (exp, 1);
5003 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5004 tree low_bound
5005 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5006 tree index_type = TREE_TYPE (index);
5007 tree xindex;
5008
5009 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5010 {
5011 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5012 index);
5013 index_type = TREE_TYPE (index);
5014 }
5015
5016 /* Optimize the special-case of a zero lower bound.
5017
5018 We convert the low_bound to sizetype to avoid some problems
5019 with constant folding. (E.g. suppose the lower bound is 1,
5020 and its mode is QI. Without the conversion, (ARRAY
5021 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5022 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5023
5024 But sizetype isn't quite right either (especially if
5025 the lowbound is negative). FIXME */
5026
5027 if (! integer_zerop (low_bound))
5028 index = fold (build (MINUS_EXPR, index_type, index,
5029 convert (sizetype, low_bound)));
5030
5031 if (TREE_CODE (index) == INTEGER_CST)
5032 {
5033 index = convert (sbitsizetype, index);
5034 index_type = TREE_TYPE (index);
5035 }
5036
5037 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5038 convert (sbitsizetype,
5039 TYPE_SIZE (TREE_TYPE (exp)))));
5040
5041 if (TREE_CODE (xindex) == INTEGER_CST
5042 && TREE_INT_CST_HIGH (xindex) == 0)
5043 *pbitpos += TREE_INT_CST_LOW (xindex);
5044 else
5045 {
5046 /* Either the bit offset calculated above is not constant, or
5047 it overflowed. In either case, redo the multiplication
5048 against the size in units. This is especially important
5049 in the non-constant case to avoid a division at runtime. */
5050 xindex = fold (build (MULT_EXPR, ssizetype, index,
5051 convert (ssizetype,
5052 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5053
5054 if (contains_placeholder_p (xindex))
5055 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5056
5057 offset = size_binop (PLUS_EXPR, offset, xindex);
5058 }
5059 }
5060 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5061 && ! ((TREE_CODE (exp) == NOP_EXPR
5062 || TREE_CODE (exp) == CONVERT_EXPR)
5063 && (TYPE_MODE (TREE_TYPE (exp))
5064 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5065 break;
5066
5067 /* If any reference in the chain is volatile, the effect is volatile. */
5068 if (TREE_THIS_VOLATILE (exp))
5069 *pvolatilep = 1;
5070
5071 /* If the offset is non-constant already, then we can't assume any
5072 alignment more than the alignment here. */
5073 if (! integer_zerop (offset))
5074 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5075
5076 exp = TREE_OPERAND (exp, 0);
5077 }
5078
5079 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5080 alignment = MIN (alignment, DECL_ALIGN (exp));
5081 else if (TREE_TYPE (exp) != 0)
5082 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5083
5084 if (integer_zerop (offset))
5085 offset = 0;
5086
5087 if (offset != 0 && contains_placeholder_p (offset))
5088 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5089
5090 *pmode = mode;
5091 *poffset = offset;
5092 *palignment = alignment / BITS_PER_UNIT;
5093 return exp;
5094 }
5095
5096 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5097 static enum memory_use_mode
5098 get_memory_usage_from_modifier (modifier)
5099 enum expand_modifier modifier;
5100 {
5101 switch (modifier)
5102 {
5103 case EXPAND_NORMAL:
5104 case EXPAND_SUM:
5105 return MEMORY_USE_RO;
5106 break;
5107 case EXPAND_MEMORY_USE_WO:
5108 return MEMORY_USE_WO;
5109 break;
5110 case EXPAND_MEMORY_USE_RW:
5111 return MEMORY_USE_RW;
5112 break;
5113 case EXPAND_MEMORY_USE_DONT:
5114 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5115 MEMORY_USE_DONT, because they are modifiers to a call of
5116 expand_expr in the ADDR_EXPR case of expand_expr. */
5117 case EXPAND_CONST_ADDRESS:
5118 case EXPAND_INITIALIZER:
5119 return MEMORY_USE_DONT;
5120 case EXPAND_MEMORY_USE_BAD:
5121 default:
5122 abort ();
5123 }
5124 }
5125 \f
5126 /* Given an rtx VALUE that may contain additions and multiplications,
5127 return an equivalent value that just refers to a register or memory.
5128 This is done by generating instructions to perform the arithmetic
5129 and returning a pseudo-register containing the value.
5130
5131 The returned value may be a REG, SUBREG, MEM or constant. */
5132
5133 rtx
5134 force_operand (value, target)
5135 rtx value, target;
5136 {
5137 register optab binoptab = 0;
5138 /* Use a temporary to force order of execution of calls to
5139 `force_operand'. */
5140 rtx tmp;
5141 register rtx op2;
5142 /* Use subtarget as the target for operand 0 of a binary operation. */
5143 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5144
5145 /* Check for a PIC address load. */
5146 if (flag_pic
5147 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5148 && XEXP (value, 0) == pic_offset_table_rtx
5149 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5150 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5151 || GET_CODE (XEXP (value, 1)) == CONST))
5152 {
5153 if (!subtarget)
5154 subtarget = gen_reg_rtx (GET_MODE (value));
5155 emit_move_insn (subtarget, value);
5156 return subtarget;
5157 }
5158
5159 if (GET_CODE (value) == PLUS)
5160 binoptab = add_optab;
5161 else if (GET_CODE (value) == MINUS)
5162 binoptab = sub_optab;
5163 else if (GET_CODE (value) == MULT)
5164 {
5165 op2 = XEXP (value, 1);
5166 if (!CONSTANT_P (op2)
5167 && !(GET_CODE (op2) == REG && op2 != subtarget))
5168 subtarget = 0;
5169 tmp = force_operand (XEXP (value, 0), subtarget);
5170 return expand_mult (GET_MODE (value), tmp,
5171 force_operand (op2, NULL_RTX),
5172 target, 0);
5173 }
5174
5175 if (binoptab)
5176 {
5177 op2 = XEXP (value, 1);
5178 if (!CONSTANT_P (op2)
5179 && !(GET_CODE (op2) == REG && op2 != subtarget))
5180 subtarget = 0;
5181 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5182 {
5183 binoptab = add_optab;
5184 op2 = negate_rtx (GET_MODE (value), op2);
5185 }
5186
5187 /* Check for an addition with OP2 a constant integer and our first
5188 operand a PLUS of a virtual register and something else. In that
5189 case, we want to emit the sum of the virtual register and the
5190 constant first and then add the other value. This allows virtual
5191 register instantiation to simply modify the constant rather than
5192 creating another one around this addition. */
5193 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5194 && GET_CODE (XEXP (value, 0)) == PLUS
5195 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5196 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5197 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5198 {
5199 rtx temp = expand_binop (GET_MODE (value), binoptab,
5200 XEXP (XEXP (value, 0), 0), op2,
5201 subtarget, 0, OPTAB_LIB_WIDEN);
5202 return expand_binop (GET_MODE (value), binoptab, temp,
5203 force_operand (XEXP (XEXP (value, 0), 1), 0),
5204 target, 0, OPTAB_LIB_WIDEN);
5205 }
5206
5207 tmp = force_operand (XEXP (value, 0), subtarget);
5208 return expand_binop (GET_MODE (value), binoptab, tmp,
5209 force_operand (op2, NULL_RTX),
5210 target, 0, OPTAB_LIB_WIDEN);
5211 /* We give UNSIGNEDP = 0 to expand_binop
5212 because the only operations we are expanding here are signed ones. */
5213 }
5214 return value;
5215 }
5216 \f
5217 /* Subroutine of expand_expr:
5218 save the non-copied parts (LIST) of an expr (LHS), and return a list
5219 which can restore these values to their previous values,
5220 should something modify their storage. */
5221
5222 static tree
5223 save_noncopied_parts (lhs, list)
5224 tree lhs;
5225 tree list;
5226 {
5227 tree tail;
5228 tree parts = 0;
5229
5230 for (tail = list; tail; tail = TREE_CHAIN (tail))
5231 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5232 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5233 else
5234 {
5235 tree part = TREE_VALUE (tail);
5236 tree part_type = TREE_TYPE (part);
5237 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5238 rtx target = assign_temp (part_type, 0, 1, 1);
5239 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5240 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5241 parts = tree_cons (to_be_saved,
5242 build (RTL_EXPR, part_type, NULL_TREE,
5243 (tree) target),
5244 parts);
5245 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5246 }
5247 return parts;
5248 }
5249
5250 /* Subroutine of expand_expr:
5251 record the non-copied parts (LIST) of an expr (LHS), and return a list
5252 which specifies the initial values of these parts. */
5253
5254 static tree
5255 init_noncopied_parts (lhs, list)
5256 tree lhs;
5257 tree list;
5258 {
5259 tree tail;
5260 tree parts = 0;
5261
5262 for (tail = list; tail; tail = TREE_CHAIN (tail))
5263 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5264 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5265 else if (TREE_PURPOSE (tail))
5266 {
5267 tree part = TREE_VALUE (tail);
5268 tree part_type = TREE_TYPE (part);
5269 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5270 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5271 }
5272 return parts;
5273 }
5274
5275 /* Subroutine of expand_expr: return nonzero iff there is no way that
5276 EXP can reference X, which is being modified. TOP_P is nonzero if this
5277 call is going to be used to determine whether we need a temporary
5278 for EXP, as opposed to a recursive call to this function.
5279
5280 It is always safe for this routine to return zero since it merely
5281 searches for optimization opportunities. */
5282
5283 static int
5284 safe_from_p (x, exp, top_p)
5285 rtx x;
5286 tree exp;
5287 int top_p;
5288 {
5289 rtx exp_rtl = 0;
5290 int i, nops;
5291 static int save_expr_count;
5292 static int save_expr_size = 0;
5293 static tree *save_expr_rewritten;
5294 static tree save_expr_trees[256];
5295
5296 if (x == 0
5297 /* If EXP has varying size, we MUST use a target since we currently
5298 have no way of allocating temporaries of variable size
5299 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5300 So we assume here that something at a higher level has prevented a
5301 clash. This is somewhat bogus, but the best we can do. Only
5302 do this when X is BLKmode and when we are at the top level. */
5303 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5305 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5306 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5307 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5308 != INTEGER_CST)
5309 && GET_MODE (x) == BLKmode))
5310 return 1;
5311
5312 if (top_p && save_expr_size == 0)
5313 {
5314 int rtn;
5315
5316 save_expr_count = 0;
5317 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5318 save_expr_rewritten = &save_expr_trees[0];
5319
5320 rtn = safe_from_p (x, exp, 1);
5321
5322 for (i = 0; i < save_expr_count; ++i)
5323 {
5324 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5325 abort ();
5326 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5327 }
5328
5329 save_expr_size = 0;
5330
5331 return rtn;
5332 }
5333
5334 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5335 find the underlying pseudo. */
5336 if (GET_CODE (x) == SUBREG)
5337 {
5338 x = SUBREG_REG (x);
5339 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5340 return 0;
5341 }
5342
5343 /* If X is a location in the outgoing argument area, it is always safe. */
5344 if (GET_CODE (x) == MEM
5345 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5346 || (GET_CODE (XEXP (x, 0)) == PLUS
5347 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5348 return 1;
5349
5350 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5351 {
5352 case 'd':
5353 exp_rtl = DECL_RTL (exp);
5354 break;
5355
5356 case 'c':
5357 return 1;
5358
5359 case 'x':
5360 if (TREE_CODE (exp) == TREE_LIST)
5361 return ((TREE_VALUE (exp) == 0
5362 || safe_from_p (x, TREE_VALUE (exp), 0))
5363 && (TREE_CHAIN (exp) == 0
5364 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5365 else if (TREE_CODE (exp) == ERROR_MARK)
5366 return 1; /* An already-visited SAVE_EXPR? */
5367 else
5368 return 0;
5369
5370 case '1':
5371 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5372
5373 case '2':
5374 case '<':
5375 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5376 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5377
5378 case 'e':
5379 case 'r':
5380 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5381 the expression. If it is set, we conflict iff we are that rtx or
5382 both are in memory. Otherwise, we check all operands of the
5383 expression recursively. */
5384
5385 switch (TREE_CODE (exp))
5386 {
5387 case ADDR_EXPR:
5388 return (staticp (TREE_OPERAND (exp, 0))
5389 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5390 || TREE_STATIC (exp));
5391
5392 case INDIRECT_REF:
5393 if (GET_CODE (x) == MEM)
5394 return 0;
5395 break;
5396
5397 case CALL_EXPR:
5398 exp_rtl = CALL_EXPR_RTL (exp);
5399 if (exp_rtl == 0)
5400 {
5401 /* Assume that the call will clobber all hard registers and
5402 all of memory. */
5403 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5404 || GET_CODE (x) == MEM)
5405 return 0;
5406 }
5407
5408 break;
5409
5410 case RTL_EXPR:
5411 /* If a sequence exists, we would have to scan every instruction
5412 in the sequence to see if it was safe. This is probably not
5413 worthwhile. */
5414 if (RTL_EXPR_SEQUENCE (exp))
5415 return 0;
5416
5417 exp_rtl = RTL_EXPR_RTL (exp);
5418 break;
5419
5420 case WITH_CLEANUP_EXPR:
5421 exp_rtl = RTL_EXPR_RTL (exp);
5422 break;
5423
5424 case CLEANUP_POINT_EXPR:
5425 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5426
5427 case SAVE_EXPR:
5428 exp_rtl = SAVE_EXPR_RTL (exp);
5429 if (exp_rtl)
5430 break;
5431
5432 /* This SAVE_EXPR might appear many times in the top-level
5433 safe_from_p() expression, and if it has a complex
5434 subexpression, examining it multiple times could result
5435 in a combinatorial explosion. E.g. on an Alpha
5436 running at least 200MHz, a Fortran test case compiled with
5437 optimization took about 28 minutes to compile -- even though
5438 it was only a few lines long, and the complicated line causing
5439 so much time to be spent in the earlier version of safe_from_p()
5440 had only 293 or so unique nodes.
5441
5442 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5443 where it is so we can turn it back in the top-level safe_from_p()
5444 when we're done. */
5445
5446 /* For now, don't bother re-sizing the array. */
5447 if (save_expr_count >= save_expr_size)
5448 return 0;
5449 save_expr_rewritten[save_expr_count++] = exp;
5450
5451 nops = tree_code_length[(int) SAVE_EXPR];
5452 for (i = 0; i < nops; i++)
5453 {
5454 tree operand = TREE_OPERAND (exp, i);
5455 if (operand == NULL_TREE)
5456 continue;
5457 TREE_SET_CODE (exp, ERROR_MARK);
5458 if (!safe_from_p (x, operand, 0))
5459 return 0;
5460 TREE_SET_CODE (exp, SAVE_EXPR);
5461 }
5462 TREE_SET_CODE (exp, ERROR_MARK);
5463 return 1;
5464
5465 case BIND_EXPR:
5466 /* The only operand we look at is operand 1. The rest aren't
5467 part of the expression. */
5468 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5469
5470 case METHOD_CALL_EXPR:
5471 /* This takes a rtx argument, but shouldn't appear here. */
5472 abort ();
5473
5474 default:
5475 break;
5476 }
5477
5478 /* If we have an rtx, we do not need to scan our operands. */
5479 if (exp_rtl)
5480 break;
5481
5482 nops = tree_code_length[(int) TREE_CODE (exp)];
5483 for (i = 0; i < nops; i++)
5484 if (TREE_OPERAND (exp, i) != 0
5485 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5486 return 0;
5487 }
5488
5489 /* If we have an rtl, find any enclosed object. Then see if we conflict
5490 with it. */
5491 if (exp_rtl)
5492 {
5493 if (GET_CODE (exp_rtl) == SUBREG)
5494 {
5495 exp_rtl = SUBREG_REG (exp_rtl);
5496 if (GET_CODE (exp_rtl) == REG
5497 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5498 return 0;
5499 }
5500
5501 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5502 are memory and EXP is not readonly. */
5503 return ! (rtx_equal_p (x, exp_rtl)
5504 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5505 && ! TREE_READONLY (exp)));
5506 }
5507
5508 /* If we reach here, it is safe. */
5509 return 1;
5510 }
5511
5512 /* Subroutine of expand_expr: return nonzero iff EXP is an
5513 expression whose type is statically determinable. */
5514
5515 static int
5516 fixed_type_p (exp)
5517 tree exp;
5518 {
5519 if (TREE_CODE (exp) == PARM_DECL
5520 || TREE_CODE (exp) == VAR_DECL
5521 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5522 || TREE_CODE (exp) == COMPONENT_REF
5523 || TREE_CODE (exp) == ARRAY_REF)
5524 return 1;
5525 return 0;
5526 }
5527
5528 /* Subroutine of expand_expr: return rtx if EXP is a
5529 variable or parameter; else return 0. */
5530
5531 static rtx
5532 var_rtx (exp)
5533 tree exp;
5534 {
5535 STRIP_NOPS (exp);
5536 switch (TREE_CODE (exp))
5537 {
5538 case PARM_DECL:
5539 case VAR_DECL:
5540 return DECL_RTL (exp);
5541 default:
5542 return 0;
5543 }
5544 }
5545
5546 #ifdef MAX_INTEGER_COMPUTATION_MODE
5547 void
5548 check_max_integer_computation_mode (exp)
5549 tree exp;
5550 {
5551 enum tree_code code;
5552 enum machine_mode mode;
5553
5554 /* Strip any NOPs that don't change the mode. */
5555 STRIP_NOPS (exp);
5556 code = TREE_CODE (exp);
5557
5558 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5559 if (code == NOP_EXPR
5560 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5561 return;
5562
5563 /* First check the type of the overall operation. We need only look at
5564 unary, binary and relational operations. */
5565 if (TREE_CODE_CLASS (code) == '1'
5566 || TREE_CODE_CLASS (code) == '2'
5567 || TREE_CODE_CLASS (code) == '<')
5568 {
5569 mode = TYPE_MODE (TREE_TYPE (exp));
5570 if (GET_MODE_CLASS (mode) == MODE_INT
5571 && mode > MAX_INTEGER_COMPUTATION_MODE)
5572 fatal ("unsupported wide integer operation");
5573 }
5574
5575 /* Check operand of a unary op. */
5576 if (TREE_CODE_CLASS (code) == '1')
5577 {
5578 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5579 if (GET_MODE_CLASS (mode) == MODE_INT
5580 && mode > MAX_INTEGER_COMPUTATION_MODE)
5581 fatal ("unsupported wide integer operation");
5582 }
5583
5584 /* Check operands of a binary/comparison op. */
5585 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5586 {
5587 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5588 if (GET_MODE_CLASS (mode) == MODE_INT
5589 && mode > MAX_INTEGER_COMPUTATION_MODE)
5590 fatal ("unsupported wide integer operation");
5591
5592 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5593 if (GET_MODE_CLASS (mode) == MODE_INT
5594 && mode > MAX_INTEGER_COMPUTATION_MODE)
5595 fatal ("unsupported wide integer operation");
5596 }
5597 }
5598 #endif
5599
5600 \f
5601 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5602 has any readonly fields. If any of the fields have types that
5603 contain readonly fields, return true as well. */
5604
5605 static int
5606 readonly_fields_p (type)
5607 tree type;
5608 {
5609 tree field;
5610
5611 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5612 if (TREE_CODE (field) == FIELD_DECL
5613 && (TREE_READONLY (field)
5614 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5615 && readonly_fields_p (TREE_TYPE (field)))))
5616 return 1;
5617
5618 return 0;
5619 }
5620 \f
5621 /* expand_expr: generate code for computing expression EXP.
5622 An rtx for the computed value is returned. The value is never null.
5623 In the case of a void EXP, const0_rtx is returned.
5624
5625 The value may be stored in TARGET if TARGET is nonzero.
5626 TARGET is just a suggestion; callers must assume that
5627 the rtx returned may not be the same as TARGET.
5628
5629 If TARGET is CONST0_RTX, it means that the value will be ignored.
5630
5631 If TMODE is not VOIDmode, it suggests generating the
5632 result in mode TMODE. But this is done only when convenient.
5633 Otherwise, TMODE is ignored and the value generated in its natural mode.
5634 TMODE is just a suggestion; callers must assume that
5635 the rtx returned may not have mode TMODE.
5636
5637 Note that TARGET may have neither TMODE nor MODE. In that case, it
5638 probably will not be used.
5639
5640 If MODIFIER is EXPAND_SUM then when EXP is an addition
5641 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5642 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5643 products as above, or REG or MEM, or constant.
5644 Ordinarily in such cases we would output mul or add instructions
5645 and then return a pseudo reg containing the sum.
5646
5647 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5648 it also marks a label as absolutely required (it can't be dead).
5649 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5650 This is used for outputting expressions used in initializers.
5651
5652 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5653 with a constant address even if that address is not normally legitimate.
5654 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5655
5656 rtx
5657 expand_expr (exp, target, tmode, modifier)
5658 register tree exp;
5659 rtx target;
5660 enum machine_mode tmode;
5661 enum expand_modifier modifier;
5662 {
5663 register rtx op0, op1, temp;
5664 tree type = TREE_TYPE (exp);
5665 int unsignedp = TREE_UNSIGNED (type);
5666 register enum machine_mode mode;
5667 register enum tree_code code = TREE_CODE (exp);
5668 optab this_optab;
5669 rtx subtarget, original_target;
5670 int ignore;
5671 tree context;
5672 /* Used by check-memory-usage to make modifier read only. */
5673 enum expand_modifier ro_modifier;
5674
5675 /* Handle ERROR_MARK before anybody tries to access its type. */
5676 if (TREE_CODE (exp) == ERROR_MARK)
5677 {
5678 op0 = CONST0_RTX (tmode);
5679 if (op0 != 0)
5680 return op0;
5681 return const0_rtx;
5682 }
5683
5684 mode = TYPE_MODE (type);
5685 /* Use subtarget as the target for operand 0 of a binary operation. */
5686 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5687 original_target = target;
5688 ignore = (target == const0_rtx
5689 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5690 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5691 || code == COND_EXPR)
5692 && TREE_CODE (type) == VOID_TYPE));
5693
5694 /* Make a read-only version of the modifier. */
5695 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5696 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5697 ro_modifier = modifier;
5698 else
5699 ro_modifier = EXPAND_NORMAL;
5700
5701 /* Don't use hard regs as subtargets, because the combiner
5702 can only handle pseudo regs. */
5703 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5704 subtarget = 0;
5705 /* Avoid subtargets inside loops,
5706 since they hide some invariant expressions. */
5707 if (preserve_subexpressions_p ())
5708 subtarget = 0;
5709
5710 /* If we are going to ignore this result, we need only do something
5711 if there is a side-effect somewhere in the expression. If there
5712 is, short-circuit the most common cases here. Note that we must
5713 not call expand_expr with anything but const0_rtx in case this
5714 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5715
5716 if (ignore)
5717 {
5718 if (! TREE_SIDE_EFFECTS (exp))
5719 return const0_rtx;
5720
5721 /* Ensure we reference a volatile object even if value is ignored, but
5722 don't do this if all we are doing is taking its address. */
5723 if (TREE_THIS_VOLATILE (exp)
5724 && TREE_CODE (exp) != FUNCTION_DECL
5725 && mode != VOIDmode && mode != BLKmode
5726 && modifier != EXPAND_CONST_ADDRESS)
5727 {
5728 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5729 if (GET_CODE (temp) == MEM)
5730 temp = copy_to_reg (temp);
5731 return const0_rtx;
5732 }
5733
5734 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5735 || code == INDIRECT_REF || code == BUFFER_REF)
5736 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5737 VOIDmode, ro_modifier);
5738 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5739 || code == ARRAY_REF)
5740 {
5741 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5742 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5743 return const0_rtx;
5744 }
5745 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5746 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5747 /* If the second operand has no side effects, just evaluate
5748 the first. */
5749 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5750 VOIDmode, ro_modifier);
5751 else if (code == BIT_FIELD_REF)
5752 {
5753 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5754 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5755 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5756 return const0_rtx;
5757 }
5758 ;
5759 target = 0;
5760 }
5761
5762 #ifdef MAX_INTEGER_COMPUTATION_MODE
5763 /* Only check stuff here if the mode we want is different from the mode
5764 of the expression; if it's the same, check_max_integer_computiation_mode
5765 will handle it. Do we really need to check this stuff at all? */
5766
5767 if (target
5768 && GET_MODE (target) != mode
5769 && TREE_CODE (exp) != INTEGER_CST
5770 && TREE_CODE (exp) != PARM_DECL
5771 && TREE_CODE (exp) != ARRAY_REF
5772 && TREE_CODE (exp) != COMPONENT_REF
5773 && TREE_CODE (exp) != BIT_FIELD_REF
5774 && TREE_CODE (exp) != INDIRECT_REF
5775 && TREE_CODE (exp) != CALL_EXPR
5776 && TREE_CODE (exp) != VAR_DECL
5777 && TREE_CODE (exp) != RTL_EXPR)
5778 {
5779 enum machine_mode mode = GET_MODE (target);
5780
5781 if (GET_MODE_CLASS (mode) == MODE_INT
5782 && mode > MAX_INTEGER_COMPUTATION_MODE)
5783 fatal ("unsupported wide integer operation");
5784 }
5785
5786 if (tmode != mode
5787 && TREE_CODE (exp) != INTEGER_CST
5788 && TREE_CODE (exp) != PARM_DECL
5789 && TREE_CODE (exp) != ARRAY_REF
5790 && TREE_CODE (exp) != COMPONENT_REF
5791 && TREE_CODE (exp) != BIT_FIELD_REF
5792 && TREE_CODE (exp) != INDIRECT_REF
5793 && TREE_CODE (exp) != VAR_DECL
5794 && TREE_CODE (exp) != CALL_EXPR
5795 && TREE_CODE (exp) != RTL_EXPR
5796 && GET_MODE_CLASS (tmode) == MODE_INT
5797 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5798 fatal ("unsupported wide integer operation");
5799
5800 check_max_integer_computation_mode (exp);
5801 #endif
5802
5803 /* If will do cse, generate all results into pseudo registers
5804 since 1) that allows cse to find more things
5805 and 2) otherwise cse could produce an insn the machine
5806 cannot support. */
5807
5808 if (! cse_not_expected && mode != BLKmode && target
5809 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5810 target = subtarget;
5811
5812 switch (code)
5813 {
5814 case LABEL_DECL:
5815 {
5816 tree function = decl_function_context (exp);
5817 /* Handle using a label in a containing function. */
5818 if (function != current_function_decl
5819 && function != inline_function_decl && function != 0)
5820 {
5821 struct function *p = find_function_data (function);
5822 /* Allocate in the memory associated with the function
5823 that the label is in. */
5824 push_obstacks (p->function_obstack,
5825 p->function_maybepermanent_obstack);
5826
5827 p->expr->x_forced_labels
5828 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5829 p->expr->x_forced_labels);
5830 pop_obstacks ();
5831 }
5832 else
5833 {
5834 if (modifier == EXPAND_INITIALIZER)
5835 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5836 label_rtx (exp),
5837 forced_labels);
5838 }
5839
5840 temp = gen_rtx_MEM (FUNCTION_MODE,
5841 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5842 if (function != current_function_decl
5843 && function != inline_function_decl && function != 0)
5844 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5845 return temp;
5846 }
5847
5848 case PARM_DECL:
5849 if (DECL_RTL (exp) == 0)
5850 {
5851 error_with_decl (exp, "prior parameter's size depends on `%s'");
5852 return CONST0_RTX (mode);
5853 }
5854
5855 /* ... fall through ... */
5856
5857 case VAR_DECL:
5858 /* If a static var's type was incomplete when the decl was written,
5859 but the type is complete now, lay out the decl now. */
5860 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5861 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5862 {
5863 push_obstacks_nochange ();
5864 end_temporary_allocation ();
5865 layout_decl (exp, 0);
5866 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5867 pop_obstacks ();
5868 }
5869
5870 /* Although static-storage variables start off initialized, according to
5871 ANSI C, a memcpy could overwrite them with uninitialized values. So
5872 we check them too. This also lets us check for read-only variables
5873 accessed via a non-const declaration, in case it won't be detected
5874 any other way (e.g., in an embedded system or OS kernel without
5875 memory protection).
5876
5877 Aggregates are not checked here; they're handled elsewhere. */
5878 if (cfun && current_function_check_memory_usage
5879 && code == VAR_DECL
5880 && GET_CODE (DECL_RTL (exp)) == MEM
5881 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5882 {
5883 enum memory_use_mode memory_usage;
5884 memory_usage = get_memory_usage_from_modifier (modifier);
5885
5886 if (memory_usage != MEMORY_USE_DONT)
5887 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5888 XEXP (DECL_RTL (exp), 0), Pmode,
5889 GEN_INT (int_size_in_bytes (type)),
5890 TYPE_MODE (sizetype),
5891 GEN_INT (memory_usage),
5892 TYPE_MODE (integer_type_node));
5893 }
5894
5895 /* ... fall through ... */
5896
5897 case FUNCTION_DECL:
5898 case RESULT_DECL:
5899 if (DECL_RTL (exp) == 0)
5900 abort ();
5901
5902 /* Ensure variable marked as used even if it doesn't go through
5903 a parser. If it hasn't be used yet, write out an external
5904 definition. */
5905 if (! TREE_USED (exp))
5906 {
5907 assemble_external (exp);
5908 TREE_USED (exp) = 1;
5909 }
5910
5911 /* Show we haven't gotten RTL for this yet. */
5912 temp = 0;
5913
5914 /* Handle variables inherited from containing functions. */
5915 context = decl_function_context (exp);
5916
5917 /* We treat inline_function_decl as an alias for the current function
5918 because that is the inline function whose vars, types, etc.
5919 are being merged into the current function.
5920 See expand_inline_function. */
5921
5922 if (context != 0 && context != current_function_decl
5923 && context != inline_function_decl
5924 /* If var is static, we don't need a static chain to access it. */
5925 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5926 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5927 {
5928 rtx addr;
5929
5930 /* Mark as non-local and addressable. */
5931 DECL_NONLOCAL (exp) = 1;
5932 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5933 abort ();
5934 mark_addressable (exp);
5935 if (GET_CODE (DECL_RTL (exp)) != MEM)
5936 abort ();
5937 addr = XEXP (DECL_RTL (exp), 0);
5938 if (GET_CODE (addr) == MEM)
5939 addr = gen_rtx_MEM (Pmode,
5940 fix_lexical_addr (XEXP (addr, 0), exp));
5941 else
5942 addr = fix_lexical_addr (addr, exp);
5943 temp = change_address (DECL_RTL (exp), mode, addr);
5944 }
5945
5946 /* This is the case of an array whose size is to be determined
5947 from its initializer, while the initializer is still being parsed.
5948 See expand_decl. */
5949
5950 else if (GET_CODE (DECL_RTL (exp)) == MEM
5951 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5952 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5953 XEXP (DECL_RTL (exp), 0));
5954
5955 /* If DECL_RTL is memory, we are in the normal case and either
5956 the address is not valid or it is not a register and -fforce-addr
5957 is specified, get the address into a register. */
5958
5959 else if (GET_CODE (DECL_RTL (exp)) == MEM
5960 && modifier != EXPAND_CONST_ADDRESS
5961 && modifier != EXPAND_SUM
5962 && modifier != EXPAND_INITIALIZER
5963 && (! memory_address_p (DECL_MODE (exp),
5964 XEXP (DECL_RTL (exp), 0))
5965 || (flag_force_addr
5966 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5967 temp = change_address (DECL_RTL (exp), VOIDmode,
5968 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5969
5970 /* If we got something, return it. But first, set the alignment
5971 the address is a register. */
5972 if (temp != 0)
5973 {
5974 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5975 mark_reg_pointer (XEXP (temp, 0),
5976 DECL_ALIGN (exp) / BITS_PER_UNIT);
5977
5978 return temp;
5979 }
5980
5981 /* If the mode of DECL_RTL does not match that of the decl, it
5982 must be a promoted value. We return a SUBREG of the wanted mode,
5983 but mark it so that we know that it was already extended. */
5984
5985 if (GET_CODE (DECL_RTL (exp)) == REG
5986 && GET_MODE (DECL_RTL (exp)) != mode)
5987 {
5988 /* Get the signedness used for this variable. Ensure we get the
5989 same mode we got when the variable was declared. */
5990 if (GET_MODE (DECL_RTL (exp))
5991 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5992 abort ();
5993
5994 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5995 SUBREG_PROMOTED_VAR_P (temp) = 1;
5996 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5997 return temp;
5998 }
5999
6000 return DECL_RTL (exp);
6001
6002 case INTEGER_CST:
6003 return immed_double_const (TREE_INT_CST_LOW (exp),
6004 TREE_INT_CST_HIGH (exp),
6005 mode);
6006
6007 case CONST_DECL:
6008 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6009 EXPAND_MEMORY_USE_BAD);
6010
6011 case REAL_CST:
6012 /* If optimized, generate immediate CONST_DOUBLE
6013 which will be turned into memory by reload if necessary.
6014
6015 We used to force a register so that loop.c could see it. But
6016 this does not allow gen_* patterns to perform optimizations with
6017 the constants. It also produces two insns in cases like "x = 1.0;".
6018 On most machines, floating-point constants are not permitted in
6019 many insns, so we'd end up copying it to a register in any case.
6020
6021 Now, we do the copying in expand_binop, if appropriate. */
6022 return immed_real_const (exp);
6023
6024 case COMPLEX_CST:
6025 case STRING_CST:
6026 if (! TREE_CST_RTL (exp))
6027 output_constant_def (exp);
6028
6029 /* TREE_CST_RTL probably contains a constant address.
6030 On RISC machines where a constant address isn't valid,
6031 make some insns to get that address into a register. */
6032 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6033 && modifier != EXPAND_CONST_ADDRESS
6034 && modifier != EXPAND_INITIALIZER
6035 && modifier != EXPAND_SUM
6036 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6037 || (flag_force_addr
6038 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6039 return change_address (TREE_CST_RTL (exp), VOIDmode,
6040 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6041 return TREE_CST_RTL (exp);
6042
6043 case EXPR_WITH_FILE_LOCATION:
6044 {
6045 rtx to_return;
6046 char *saved_input_filename = input_filename;
6047 int saved_lineno = lineno;
6048 input_filename = EXPR_WFL_FILENAME (exp);
6049 lineno = EXPR_WFL_LINENO (exp);
6050 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6051 emit_line_note (input_filename, lineno);
6052 /* Possibly avoid switching back and force here */
6053 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6054 input_filename = saved_input_filename;
6055 lineno = saved_lineno;
6056 return to_return;
6057 }
6058
6059 case SAVE_EXPR:
6060 context = decl_function_context (exp);
6061
6062 /* If this SAVE_EXPR was at global context, assume we are an
6063 initialization function and move it into our context. */
6064 if (context == 0)
6065 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6066
6067 /* We treat inline_function_decl as an alias for the current function
6068 because that is the inline function whose vars, types, etc.
6069 are being merged into the current function.
6070 See expand_inline_function. */
6071 if (context == current_function_decl || context == inline_function_decl)
6072 context = 0;
6073
6074 /* If this is non-local, handle it. */
6075 if (context)
6076 {
6077 /* The following call just exists to abort if the context is
6078 not of a containing function. */
6079 find_function_data (context);
6080
6081 temp = SAVE_EXPR_RTL (exp);
6082 if (temp && GET_CODE (temp) == REG)
6083 {
6084 put_var_into_stack (exp);
6085 temp = SAVE_EXPR_RTL (exp);
6086 }
6087 if (temp == 0 || GET_CODE (temp) != MEM)
6088 abort ();
6089 return change_address (temp, mode,
6090 fix_lexical_addr (XEXP (temp, 0), exp));
6091 }
6092 if (SAVE_EXPR_RTL (exp) == 0)
6093 {
6094 if (mode == VOIDmode)
6095 temp = const0_rtx;
6096 else
6097 temp = assign_temp (type, 3, 0, 0);
6098
6099 SAVE_EXPR_RTL (exp) = temp;
6100 if (!optimize && GET_CODE (temp) == REG)
6101 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6102 save_expr_regs);
6103
6104 /* If the mode of TEMP does not match that of the expression, it
6105 must be a promoted value. We pass store_expr a SUBREG of the
6106 wanted mode but mark it so that we know that it was already
6107 extended. Note that `unsignedp' was modified above in
6108 this case. */
6109
6110 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6111 {
6112 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6113 SUBREG_PROMOTED_VAR_P (temp) = 1;
6114 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6115 }
6116
6117 if (temp == const0_rtx)
6118 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6119 EXPAND_MEMORY_USE_BAD);
6120 else
6121 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6122
6123 TREE_USED (exp) = 1;
6124 }
6125
6126 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6127 must be a promoted value. We return a SUBREG of the wanted mode,
6128 but mark it so that we know that it was already extended. */
6129
6130 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6131 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6132 {
6133 /* Compute the signedness and make the proper SUBREG. */
6134 promote_mode (type, mode, &unsignedp, 0);
6135 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6136 SUBREG_PROMOTED_VAR_P (temp) = 1;
6137 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6138 return temp;
6139 }
6140
6141 return SAVE_EXPR_RTL (exp);
6142
6143 case UNSAVE_EXPR:
6144 {
6145 rtx temp;
6146 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6147 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6148 return temp;
6149 }
6150
6151 case PLACEHOLDER_EXPR:
6152 {
6153 tree placeholder_expr;
6154
6155 /* If there is an object on the head of the placeholder list,
6156 see if some object in it of type TYPE or a pointer to it. For
6157 further information, see tree.def. */
6158 for (placeholder_expr = placeholder_list;
6159 placeholder_expr != 0;
6160 placeholder_expr = TREE_CHAIN (placeholder_expr))
6161 {
6162 tree need_type = TYPE_MAIN_VARIANT (type);
6163 tree object = 0;
6164 tree old_list = placeholder_list;
6165 tree elt;
6166
6167 /* Find the outermost reference that is of the type we want.
6168 If none, see if any object has a type that is a pointer to
6169 the type we want. */
6170 for (elt = TREE_PURPOSE (placeholder_expr);
6171 elt != 0 && object == 0;
6172 elt
6173 = ((TREE_CODE (elt) == COMPOUND_EXPR
6174 || TREE_CODE (elt) == COND_EXPR)
6175 ? TREE_OPERAND (elt, 1)
6176 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6177 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6178 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6179 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6180 ? TREE_OPERAND (elt, 0) : 0))
6181 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6182 object = elt;
6183
6184 for (elt = TREE_PURPOSE (placeholder_expr);
6185 elt != 0 && object == 0;
6186 elt
6187 = ((TREE_CODE (elt) == COMPOUND_EXPR
6188 || TREE_CODE (elt) == COND_EXPR)
6189 ? TREE_OPERAND (elt, 1)
6190 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6191 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6192 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6193 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6194 ? TREE_OPERAND (elt, 0) : 0))
6195 if (POINTER_TYPE_P (TREE_TYPE (elt))
6196 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6197 == need_type))
6198 object = build1 (INDIRECT_REF, need_type, elt);
6199
6200 if (object != 0)
6201 {
6202 /* Expand this object skipping the list entries before
6203 it was found in case it is also a PLACEHOLDER_EXPR.
6204 In that case, we want to translate it using subsequent
6205 entries. */
6206 placeholder_list = TREE_CHAIN (placeholder_expr);
6207 temp = expand_expr (object, original_target, tmode,
6208 ro_modifier);
6209 placeholder_list = old_list;
6210 return temp;
6211 }
6212 }
6213 }
6214
6215 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6216 abort ();
6217
6218 case WITH_RECORD_EXPR:
6219 /* Put the object on the placeholder list, expand our first operand,
6220 and pop the list. */
6221 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6222 placeholder_list);
6223 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6224 tmode, ro_modifier);
6225 placeholder_list = TREE_CHAIN (placeholder_list);
6226 return target;
6227
6228 case GOTO_EXPR:
6229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6230 expand_goto (TREE_OPERAND (exp, 0));
6231 else
6232 expand_computed_goto (TREE_OPERAND (exp, 0));
6233 return const0_rtx;
6234
6235 case EXIT_EXPR:
6236 expand_exit_loop_if_false (NULL_PTR,
6237 invert_truthvalue (TREE_OPERAND (exp, 0)));
6238 return const0_rtx;
6239
6240 case LABELED_BLOCK_EXPR:
6241 if (LABELED_BLOCK_BODY (exp))
6242 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6243 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6244 return const0_rtx;
6245
6246 case EXIT_BLOCK_EXPR:
6247 if (EXIT_BLOCK_RETURN (exp))
6248 sorry ("returned value in block_exit_expr");
6249 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6250 return const0_rtx;
6251
6252 case LOOP_EXPR:
6253 push_temp_slots ();
6254 expand_start_loop (1);
6255 expand_expr_stmt (TREE_OPERAND (exp, 0));
6256 expand_end_loop ();
6257 pop_temp_slots ();
6258
6259 return const0_rtx;
6260
6261 case BIND_EXPR:
6262 {
6263 tree vars = TREE_OPERAND (exp, 0);
6264 int vars_need_expansion = 0;
6265
6266 /* Need to open a binding contour here because
6267 if there are any cleanups they must be contained here. */
6268 expand_start_bindings (2);
6269
6270 /* Mark the corresponding BLOCK for output in its proper place. */
6271 if (TREE_OPERAND (exp, 2) != 0
6272 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6273 insert_block (TREE_OPERAND (exp, 2));
6274
6275 /* If VARS have not yet been expanded, expand them now. */
6276 while (vars)
6277 {
6278 if (DECL_RTL (vars) == 0)
6279 {
6280 vars_need_expansion = 1;
6281 expand_decl (vars);
6282 }
6283 expand_decl_init (vars);
6284 vars = TREE_CHAIN (vars);
6285 }
6286
6287 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6288
6289 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6290
6291 return temp;
6292 }
6293
6294 case RTL_EXPR:
6295 if (RTL_EXPR_SEQUENCE (exp))
6296 {
6297 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6298 abort ();
6299 emit_insns (RTL_EXPR_SEQUENCE (exp));
6300 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6301 }
6302 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6303 free_temps_for_rtl_expr (exp);
6304 return RTL_EXPR_RTL (exp);
6305
6306 case CONSTRUCTOR:
6307 /* If we don't need the result, just ensure we evaluate any
6308 subexpressions. */
6309 if (ignore)
6310 {
6311 tree elt;
6312 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6313 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6314 EXPAND_MEMORY_USE_BAD);
6315 return const0_rtx;
6316 }
6317
6318 /* All elts simple constants => refer to a constant in memory. But
6319 if this is a non-BLKmode mode, let it store a field at a time
6320 since that should make a CONST_INT or CONST_DOUBLE when we
6321 fold. Likewise, if we have a target we can use, it is best to
6322 store directly into the target unless the type is large enough
6323 that memcpy will be used. If we are making an initializer and
6324 all operands are constant, put it in memory as well. */
6325 else if ((TREE_STATIC (exp)
6326 && ((mode == BLKmode
6327 && ! (target != 0 && safe_from_p (target, exp, 1)))
6328 || TREE_ADDRESSABLE (exp)
6329 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6330 && (!MOVE_BY_PIECES_P
6331 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6332 TYPE_ALIGN (type) / BITS_PER_UNIT))
6333 && ! mostly_zeros_p (exp))))
6334 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6335 {
6336 rtx constructor = output_constant_def (exp);
6337 if (modifier != EXPAND_CONST_ADDRESS
6338 && modifier != EXPAND_INITIALIZER
6339 && modifier != EXPAND_SUM
6340 && (! memory_address_p (GET_MODE (constructor),
6341 XEXP (constructor, 0))
6342 || (flag_force_addr
6343 && GET_CODE (XEXP (constructor, 0)) != REG)))
6344 constructor = change_address (constructor, VOIDmode,
6345 XEXP (constructor, 0));
6346 return constructor;
6347 }
6348
6349 else
6350 {
6351 /* Handle calls that pass values in multiple non-contiguous
6352 locations. The Irix 6 ABI has examples of this. */
6353 if (target == 0 || ! safe_from_p (target, exp, 1)
6354 || GET_CODE (target) == PARALLEL)
6355 {
6356 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6357 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6358 else
6359 target = assign_temp (type, 0, 1, 1);
6360 }
6361
6362 if (TREE_READONLY (exp))
6363 {
6364 if (GET_CODE (target) == MEM)
6365 target = copy_rtx (target);
6366
6367 RTX_UNCHANGING_P (target) = 1;
6368 }
6369
6370 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6371 int_size_in_bytes (TREE_TYPE (exp)));
6372 return target;
6373 }
6374
6375 case INDIRECT_REF:
6376 {
6377 tree exp1 = TREE_OPERAND (exp, 0);
6378 tree exp2;
6379 tree index;
6380 tree string = string_constant (exp1, &index);
6381 int i;
6382
6383 /* Try to optimize reads from const strings. */
6384 if (string
6385 && TREE_CODE (string) == STRING_CST
6386 && TREE_CODE (index) == INTEGER_CST
6387 && !TREE_INT_CST_HIGH (index)
6388 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6389 && GET_MODE_CLASS (mode) == MODE_INT
6390 && GET_MODE_SIZE (mode) == 1
6391 && modifier != EXPAND_MEMORY_USE_WO)
6392 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6393
6394 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6395 op0 = memory_address (mode, op0);
6396
6397 if (cfun && current_function_check_memory_usage
6398 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6399 {
6400 enum memory_use_mode memory_usage;
6401 memory_usage = get_memory_usage_from_modifier (modifier);
6402
6403 if (memory_usage != MEMORY_USE_DONT)
6404 {
6405 in_check_memory_usage = 1;
6406 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6407 op0, Pmode,
6408 GEN_INT (int_size_in_bytes (type)),
6409 TYPE_MODE (sizetype),
6410 GEN_INT (memory_usage),
6411 TYPE_MODE (integer_type_node));
6412 in_check_memory_usage = 0;
6413 }
6414 }
6415
6416 temp = gen_rtx_MEM (mode, op0);
6417 /* If address was computed by addition,
6418 mark this as an element of an aggregate. */
6419 if (TREE_CODE (exp1) == PLUS_EXPR
6420 || (TREE_CODE (exp1) == SAVE_EXPR
6421 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6422 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6423 || (TREE_CODE (exp1) == ADDR_EXPR
6424 && (exp2 = TREE_OPERAND (exp1, 0))
6425 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6426 MEM_SET_IN_STRUCT_P (temp, 1);
6427
6428 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6429 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6430
6431 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6432 here, because, in C and C++, the fact that a location is accessed
6433 through a pointer to const does not mean that the value there can
6434 never change. Languages where it can never change should
6435 also set TREE_STATIC. */
6436 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6437
6438 /* If we are writing to this object and its type is a record with
6439 readonly fields, we must mark it as readonly so it will
6440 conflict with readonly references to those fields. */
6441 if (modifier == EXPAND_MEMORY_USE_WO
6442 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6443 RTX_UNCHANGING_P (temp) = 1;
6444
6445 return temp;
6446 }
6447
6448 case ARRAY_REF:
6449 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6450 abort ();
6451
6452 {
6453 tree array = TREE_OPERAND (exp, 0);
6454 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6455 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6456 tree index = TREE_OPERAND (exp, 1);
6457 tree index_type = TREE_TYPE (index);
6458 HOST_WIDE_INT i;
6459
6460 /* Optimize the special-case of a zero lower bound.
6461
6462 We convert the low_bound to sizetype to avoid some problems
6463 with constant folding. (E.g. suppose the lower bound is 1,
6464 and its mode is QI. Without the conversion, (ARRAY
6465 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6466 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6467
6468 But sizetype isn't quite right either (especially if
6469 the lowbound is negative). FIXME */
6470
6471 if (! integer_zerop (low_bound))
6472 index = fold (build (MINUS_EXPR, index_type, index,
6473 convert (sizetype, low_bound)));
6474
6475 /* Fold an expression like: "foo"[2].
6476 This is not done in fold so it won't happen inside &.
6477 Don't fold if this is for wide characters since it's too
6478 difficult to do correctly and this is a very rare case. */
6479
6480 if (TREE_CODE (array) == STRING_CST
6481 && TREE_CODE (index) == INTEGER_CST
6482 && !TREE_INT_CST_HIGH (index)
6483 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6484 && GET_MODE_CLASS (mode) == MODE_INT
6485 && GET_MODE_SIZE (mode) == 1)
6486 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6487
6488 /* If this is a constant index into a constant array,
6489 just get the value from the array. Handle both the cases when
6490 we have an explicit constructor and when our operand is a variable
6491 that was declared const. */
6492
6493 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6494 {
6495 if (TREE_CODE (index) == INTEGER_CST
6496 && TREE_INT_CST_HIGH (index) == 0)
6497 {
6498 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6499
6500 i = TREE_INT_CST_LOW (index);
6501 while (elem && i--)
6502 elem = TREE_CHAIN (elem);
6503 if (elem)
6504 return expand_expr (fold (TREE_VALUE (elem)), target,
6505 tmode, ro_modifier);
6506 }
6507 }
6508
6509 else if (optimize >= 1
6510 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6511 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6512 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6513 {
6514 if (TREE_CODE (index) == INTEGER_CST)
6515 {
6516 tree init = DECL_INITIAL (array);
6517
6518 i = TREE_INT_CST_LOW (index);
6519 if (TREE_CODE (init) == CONSTRUCTOR)
6520 {
6521 tree elem = CONSTRUCTOR_ELTS (init);
6522
6523 while (elem
6524 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6525 elem = TREE_CHAIN (elem);
6526 if (elem)
6527 return expand_expr (fold (TREE_VALUE (elem)), target,
6528 tmode, ro_modifier);
6529 }
6530 else if (TREE_CODE (init) == STRING_CST
6531 && TREE_INT_CST_HIGH (index) == 0
6532 && (TREE_INT_CST_LOW (index)
6533 < TREE_STRING_LENGTH (init)))
6534 return (GEN_INT
6535 (TREE_STRING_POINTER
6536 (init)[TREE_INT_CST_LOW (index)]));
6537 }
6538 }
6539 }
6540
6541 /* ... fall through ... */
6542
6543 case COMPONENT_REF:
6544 case BIT_FIELD_REF:
6545 /* If the operand is a CONSTRUCTOR, we can just extract the
6546 appropriate field if it is present. Don't do this if we have
6547 already written the data since we want to refer to that copy
6548 and varasm.c assumes that's what we'll do. */
6549 if (code != ARRAY_REF
6550 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6551 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6552 {
6553 tree elt;
6554
6555 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6556 elt = TREE_CHAIN (elt))
6557 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6558 /* We can normally use the value of the field in the
6559 CONSTRUCTOR. However, if this is a bitfield in
6560 an integral mode that we can fit in a HOST_WIDE_INT,
6561 we must mask only the number of bits in the bitfield,
6562 since this is done implicitly by the constructor. If
6563 the bitfield does not meet either of those conditions,
6564 we can't do this optimization. */
6565 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6566 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6567 == MODE_INT)
6568 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6569 <= HOST_BITS_PER_WIDE_INT))))
6570 {
6571 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6572 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6573 {
6574 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6575
6576 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6577 {
6578 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6579 op0 = expand_and (op0, op1, target);
6580 }
6581 else
6582 {
6583 enum machine_mode imode
6584 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6585 tree count
6586 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6587 0);
6588
6589 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6590 target, 0);
6591 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6592 target, 0);
6593 }
6594 }
6595
6596 return op0;
6597 }
6598 }
6599
6600 {
6601 enum machine_mode mode1;
6602 int bitsize;
6603 int bitpos;
6604 tree offset;
6605 int volatilep = 0;
6606 int alignment;
6607 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6608 &mode1, &unsignedp, &volatilep,
6609 &alignment);
6610
6611 /* If we got back the original object, something is wrong. Perhaps
6612 we are evaluating an expression too early. In any event, don't
6613 infinitely recurse. */
6614 if (tem == exp)
6615 abort ();
6616
6617 /* If TEM's type is a union of variable size, pass TARGET to the inner
6618 computation, since it will need a temporary and TARGET is known
6619 to have to do. This occurs in unchecked conversion in Ada. */
6620
6621 op0 = expand_expr (tem,
6622 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6623 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6624 != INTEGER_CST)
6625 ? target : NULL_RTX),
6626 VOIDmode,
6627 (modifier == EXPAND_INITIALIZER
6628 || modifier == EXPAND_CONST_ADDRESS)
6629 ? modifier : EXPAND_NORMAL);
6630
6631 /* If this is a constant, put it into a register if it is a
6632 legitimate constant and OFFSET is 0 and memory if it isn't. */
6633 if (CONSTANT_P (op0))
6634 {
6635 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6636 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6637 && offset == 0)
6638 op0 = force_reg (mode, op0);
6639 else
6640 op0 = validize_mem (force_const_mem (mode, op0));
6641 }
6642
6643 if (offset != 0)
6644 {
6645 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6646
6647 /* If this object is in memory, put it into a register.
6648 This case can't occur in C, but can in Ada if we have
6649 unchecked conversion of an expression from a scalar type to
6650 an array or record type. */
6651 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6652 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6653 {
6654 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6655
6656 mark_temp_addr_taken (memloc);
6657 emit_move_insn (memloc, op0);
6658 op0 = memloc;
6659 }
6660
6661 if (GET_CODE (op0) != MEM)
6662 abort ();
6663
6664 if (GET_MODE (offset_rtx) != ptr_mode)
6665 {
6666 #ifdef POINTERS_EXTEND_UNSIGNED
6667 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6668 #else
6669 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6670 #endif
6671 }
6672
6673 /* A constant address in OP0 can have VOIDmode, we must not try
6674 to call force_reg for that case. Avoid that case. */
6675 if (GET_CODE (op0) == MEM
6676 && GET_MODE (op0) == BLKmode
6677 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6678 && bitsize != 0
6679 && (bitpos % bitsize) == 0
6680 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6681 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6682 {
6683 rtx temp = change_address (op0, mode1,
6684 plus_constant (XEXP (op0, 0),
6685 (bitpos /
6686 BITS_PER_UNIT)));
6687 if (GET_CODE (XEXP (temp, 0)) == REG)
6688 op0 = temp;
6689 else
6690 op0 = change_address (op0, mode1,
6691 force_reg (GET_MODE (XEXP (temp, 0)),
6692 XEXP (temp, 0)));
6693 bitpos = 0;
6694 }
6695
6696
6697 op0 = change_address (op0, VOIDmode,
6698 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6699 force_reg (ptr_mode,
6700 offset_rtx)));
6701 }
6702
6703 /* Don't forget about volatility even if this is a bitfield. */
6704 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6705 {
6706 op0 = copy_rtx (op0);
6707 MEM_VOLATILE_P (op0) = 1;
6708 }
6709
6710 /* Check the access. */
6711 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6712 {
6713 enum memory_use_mode memory_usage;
6714 memory_usage = get_memory_usage_from_modifier (modifier);
6715
6716 if (memory_usage != MEMORY_USE_DONT)
6717 {
6718 rtx to;
6719 int size;
6720
6721 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6722 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6723
6724 /* Check the access right of the pointer. */
6725 if (size > BITS_PER_UNIT)
6726 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6727 to, Pmode,
6728 GEN_INT (size / BITS_PER_UNIT),
6729 TYPE_MODE (sizetype),
6730 GEN_INT (memory_usage),
6731 TYPE_MODE (integer_type_node));
6732 }
6733 }
6734
6735 /* In cases where an aligned union has an unaligned object
6736 as a field, we might be extracting a BLKmode value from
6737 an integer-mode (e.g., SImode) object. Handle this case
6738 by doing the extract into an object as wide as the field
6739 (which we know to be the width of a basic mode), then
6740 storing into memory, and changing the mode to BLKmode.
6741 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6742 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6743 if (mode1 == VOIDmode
6744 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6745 || (modifier != EXPAND_CONST_ADDRESS
6746 && modifier != EXPAND_INITIALIZER
6747 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6748 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6749 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6750 /* If the field isn't aligned enough to fetch as a memref,
6751 fetch it as a bit field. */
6752 || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
6753 && ((TYPE_ALIGN (TREE_TYPE (tem))
6754 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6755 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6756 || (modifier != EXPAND_CONST_ADDRESS
6757 && modifier != EXPAND_INITIALIZER
6758 && mode == BLKmode
6759 && SLOW_UNALIGNED_ACCESS
6760 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6761 || bitpos % TYPE_ALIGN (type) != 0)))
6762 {
6763 enum machine_mode ext_mode = mode;
6764
6765 if (ext_mode == BLKmode
6766 && ! (target != 0 && GET_CODE (op0) == MEM
6767 && GET_CODE (target) == MEM
6768 && bitpos % BITS_PER_UNIT == 0))
6769 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6770
6771 if (ext_mode == BLKmode)
6772 {
6773 /* In this case, BITPOS must start at a byte boundary and
6774 TARGET, if specified, must be a MEM. */
6775 if (GET_CODE (op0) != MEM
6776 || (target != 0 && GET_CODE (target) != MEM)
6777 || bitpos % BITS_PER_UNIT != 0)
6778 abort ();
6779
6780 op0 = change_address (op0, VOIDmode,
6781 plus_constant (XEXP (op0, 0),
6782 bitpos / BITS_PER_UNIT));
6783 if (target == 0)
6784 target = assign_temp (type, 0, 1, 1);
6785
6786 emit_block_move (target, op0,
6787 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6788 / BITS_PER_UNIT),
6789 1);
6790
6791 return target;
6792 }
6793
6794 op0 = validize_mem (op0);
6795
6796 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6797 mark_reg_pointer (XEXP (op0, 0), alignment);
6798
6799 op0 = extract_bit_field (op0, bitsize, bitpos,
6800 unsignedp, target, ext_mode, ext_mode,
6801 alignment,
6802 int_size_in_bytes (TREE_TYPE (tem)));
6803
6804 /* If the result is a record type and BITSIZE is narrower than
6805 the mode of OP0, an integral mode, and this is a big endian
6806 machine, we must put the field into the high-order bits. */
6807 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6808 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6809 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6810 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6811 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6812 - bitsize),
6813 op0, 1);
6814
6815 if (mode == BLKmode)
6816 {
6817 rtx new = assign_stack_temp (ext_mode,
6818 bitsize / BITS_PER_UNIT, 0);
6819
6820 emit_move_insn (new, op0);
6821 op0 = copy_rtx (new);
6822 PUT_MODE (op0, BLKmode);
6823 MEM_SET_IN_STRUCT_P (op0, 1);
6824 }
6825
6826 return op0;
6827 }
6828
6829 /* If the result is BLKmode, use that to access the object
6830 now as well. */
6831 if (mode == BLKmode)
6832 mode1 = BLKmode;
6833
6834 /* Get a reference to just this component. */
6835 if (modifier == EXPAND_CONST_ADDRESS
6836 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6837 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6838 (bitpos / BITS_PER_UNIT)));
6839 else
6840 op0 = change_address (op0, mode1,
6841 plus_constant (XEXP (op0, 0),
6842 (bitpos / BITS_PER_UNIT)));
6843
6844 if (GET_CODE (op0) == MEM)
6845 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6846
6847 if (GET_CODE (XEXP (op0, 0)) == REG)
6848 mark_reg_pointer (XEXP (op0, 0), alignment);
6849
6850 MEM_SET_IN_STRUCT_P (op0, 1);
6851 MEM_VOLATILE_P (op0) |= volatilep;
6852 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6853 || modifier == EXPAND_CONST_ADDRESS
6854 || modifier == EXPAND_INITIALIZER)
6855 return op0;
6856 else if (target == 0)
6857 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6858
6859 convert_move (target, op0, unsignedp);
6860 return target;
6861 }
6862
6863 /* Intended for a reference to a buffer of a file-object in Pascal.
6864 But it's not certain that a special tree code will really be
6865 necessary for these. INDIRECT_REF might work for them. */
6866 case BUFFER_REF:
6867 abort ();
6868
6869 case IN_EXPR:
6870 {
6871 /* Pascal set IN expression.
6872
6873 Algorithm:
6874 rlo = set_low - (set_low%bits_per_word);
6875 the_word = set [ (index - rlo)/bits_per_word ];
6876 bit_index = index % bits_per_word;
6877 bitmask = 1 << bit_index;
6878 return !!(the_word & bitmask); */
6879
6880 tree set = TREE_OPERAND (exp, 0);
6881 tree index = TREE_OPERAND (exp, 1);
6882 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6883 tree set_type = TREE_TYPE (set);
6884 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6885 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6886 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6887 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6888 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6889 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6890 rtx setaddr = XEXP (setval, 0);
6891 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6892 rtx rlow;
6893 rtx diff, quo, rem, addr, bit, result;
6894
6895 preexpand_calls (exp);
6896
6897 /* If domain is empty, answer is no. Likewise if index is constant
6898 and out of bounds. */
6899 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6900 && TREE_CODE (set_low_bound) == INTEGER_CST
6901 && tree_int_cst_lt (set_high_bound, set_low_bound))
6902 || (TREE_CODE (index) == INTEGER_CST
6903 && TREE_CODE (set_low_bound) == INTEGER_CST
6904 && tree_int_cst_lt (index, set_low_bound))
6905 || (TREE_CODE (set_high_bound) == INTEGER_CST
6906 && TREE_CODE (index) == INTEGER_CST
6907 && tree_int_cst_lt (set_high_bound, index))))
6908 return const0_rtx;
6909
6910 if (target == 0)
6911 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6912
6913 /* If we get here, we have to generate the code for both cases
6914 (in range and out of range). */
6915
6916 op0 = gen_label_rtx ();
6917 op1 = gen_label_rtx ();
6918
6919 if (! (GET_CODE (index_val) == CONST_INT
6920 && GET_CODE (lo_r) == CONST_INT))
6921 {
6922 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6923 GET_MODE (index_val), iunsignedp, 0, op1);
6924 }
6925
6926 if (! (GET_CODE (index_val) == CONST_INT
6927 && GET_CODE (hi_r) == CONST_INT))
6928 {
6929 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6930 GET_MODE (index_val), iunsignedp, 0, op1);
6931 }
6932
6933 /* Calculate the element number of bit zero in the first word
6934 of the set. */
6935 if (GET_CODE (lo_r) == CONST_INT)
6936 rlow = GEN_INT (INTVAL (lo_r)
6937 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6938 else
6939 rlow = expand_binop (index_mode, and_optab, lo_r,
6940 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6941 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6942
6943 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6944 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6945
6946 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6947 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6948 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6949 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6950
6951 addr = memory_address (byte_mode,
6952 expand_binop (index_mode, add_optab, diff,
6953 setaddr, NULL_RTX, iunsignedp,
6954 OPTAB_LIB_WIDEN));
6955
6956 /* Extract the bit we want to examine */
6957 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6958 gen_rtx_MEM (byte_mode, addr),
6959 make_tree (TREE_TYPE (index), rem),
6960 NULL_RTX, 1);
6961 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6962 GET_MODE (target) == byte_mode ? target : 0,
6963 1, OPTAB_LIB_WIDEN);
6964
6965 if (result != target)
6966 convert_move (target, result, 1);
6967
6968 /* Output the code to handle the out-of-range case. */
6969 emit_jump (op0);
6970 emit_label (op1);
6971 emit_move_insn (target, const0_rtx);
6972 emit_label (op0);
6973 return target;
6974 }
6975
6976 case WITH_CLEANUP_EXPR:
6977 if (RTL_EXPR_RTL (exp) == 0)
6978 {
6979 RTL_EXPR_RTL (exp)
6980 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6981 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6982
6983 /* That's it for this cleanup. */
6984 TREE_OPERAND (exp, 2) = 0;
6985 }
6986 return RTL_EXPR_RTL (exp);
6987
6988 case CLEANUP_POINT_EXPR:
6989 {
6990 /* Start a new binding layer that will keep track of all cleanup
6991 actions to be performed. */
6992 expand_start_bindings (2);
6993
6994 target_temp_slot_level = temp_slot_level;
6995
6996 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6997 /* If we're going to use this value, load it up now. */
6998 if (! ignore)
6999 op0 = force_not_mem (op0);
7000 preserve_temp_slots (op0);
7001 expand_end_bindings (NULL_TREE, 0, 0);
7002 }
7003 return op0;
7004
7005 case CALL_EXPR:
7006 /* Check for a built-in function. */
7007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7008 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7009 == FUNCTION_DECL)
7010 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7011 return expand_builtin (exp, target, subtarget, tmode, ignore);
7012
7013 /* If this call was expanded already by preexpand_calls,
7014 just return the result we got. */
7015 if (CALL_EXPR_RTL (exp) != 0)
7016 return CALL_EXPR_RTL (exp);
7017
7018 return expand_call (exp, target, ignore);
7019
7020 case NON_LVALUE_EXPR:
7021 case NOP_EXPR:
7022 case CONVERT_EXPR:
7023 case REFERENCE_EXPR:
7024 if (TREE_CODE (type) == UNION_TYPE)
7025 {
7026 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7027
7028 /* If both input and output are BLKmode, this conversion
7029 isn't actually doing anything unless we need to make the
7030 alignment stricter. */
7031 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7032 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7033 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7034 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7035 modifier);
7036
7037 if (target == 0)
7038 {
7039 if (mode != BLKmode)
7040 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7041 else
7042 target = assign_temp (type, 0, 1, 1);
7043 }
7044
7045 if (GET_CODE (target) == MEM)
7046 /* Store data into beginning of memory target. */
7047 store_expr (TREE_OPERAND (exp, 0),
7048 change_address (target, TYPE_MODE (valtype), 0), 0);
7049
7050 else if (GET_CODE (target) == REG)
7051 /* Store this field into a union of the proper type. */
7052 store_field (target,
7053 MIN ((int_size_in_bytes (TREE_TYPE
7054 (TREE_OPERAND (exp, 0)))
7055 * BITS_PER_UNIT),
7056 GET_MODE_BITSIZE (mode)),
7057 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7058 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7059 else
7060 abort ();
7061
7062 /* Return the entire union. */
7063 return target;
7064 }
7065
7066 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7067 {
7068 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7069 ro_modifier);
7070
7071 /* If the signedness of the conversion differs and OP0 is
7072 a promoted SUBREG, clear that indication since we now
7073 have to do the proper extension. */
7074 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7075 && GET_CODE (op0) == SUBREG)
7076 SUBREG_PROMOTED_VAR_P (op0) = 0;
7077
7078 return op0;
7079 }
7080
7081 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7082 if (GET_MODE (op0) == mode)
7083 return op0;
7084
7085 /* If OP0 is a constant, just convert it into the proper mode. */
7086 if (CONSTANT_P (op0))
7087 return
7088 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7089 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7090
7091 if (modifier == EXPAND_INITIALIZER)
7092 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7093
7094 if (target == 0)
7095 return
7096 convert_to_mode (mode, op0,
7097 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7098 else
7099 convert_move (target, op0,
7100 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7101 return target;
7102
7103 case PLUS_EXPR:
7104 /* We come here from MINUS_EXPR when the second operand is a
7105 constant. */
7106 plus_expr:
7107 this_optab = add_optab;
7108
7109 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7110 something else, make sure we add the register to the constant and
7111 then to the other thing. This case can occur during strength
7112 reduction and doing it this way will produce better code if the
7113 frame pointer or argument pointer is eliminated.
7114
7115 fold-const.c will ensure that the constant is always in the inner
7116 PLUS_EXPR, so the only case we need to do anything about is if
7117 sp, ap, or fp is our second argument, in which case we must swap
7118 the innermost first argument and our second argument. */
7119
7120 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7121 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7122 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7123 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7124 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7125 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7126 {
7127 tree t = TREE_OPERAND (exp, 1);
7128
7129 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7130 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7131 }
7132
7133 /* If the result is to be ptr_mode and we are adding an integer to
7134 something, we might be forming a constant. So try to use
7135 plus_constant. If it produces a sum and we can't accept it,
7136 use force_operand. This allows P = &ARR[const] to generate
7137 efficient code on machines where a SYMBOL_REF is not a valid
7138 address.
7139
7140 If this is an EXPAND_SUM call, always return the sum. */
7141 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7142 || mode == ptr_mode)
7143 {
7144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7145 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7146 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7147 {
7148 rtx constant_part;
7149
7150 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7151 EXPAND_SUM);
7152 /* Use immed_double_const to ensure that the constant is
7153 truncated according to the mode of OP1, then sign extended
7154 to a HOST_WIDE_INT. Using the constant directly can result
7155 in non-canonical RTL in a 64x32 cross compile. */
7156 constant_part
7157 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7158 (HOST_WIDE_INT) 0,
7159 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7160 op1 = plus_constant (op1, INTVAL (constant_part));
7161 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7162 op1 = force_operand (op1, target);
7163 return op1;
7164 }
7165
7166 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7167 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7168 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7169 {
7170 rtx constant_part;
7171
7172 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7173 EXPAND_SUM);
7174 if (! CONSTANT_P (op0))
7175 {
7176 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7177 VOIDmode, modifier);
7178 /* Don't go to both_summands if modifier
7179 says it's not right to return a PLUS. */
7180 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7181 goto binop2;
7182 goto both_summands;
7183 }
7184 /* Use immed_double_const to ensure that the constant is
7185 truncated according to the mode of OP1, then sign extended
7186 to a HOST_WIDE_INT. Using the constant directly can result
7187 in non-canonical RTL in a 64x32 cross compile. */
7188 constant_part
7189 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7190 (HOST_WIDE_INT) 0,
7191 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7192 op0 = plus_constant (op0, INTVAL (constant_part));
7193 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7194 op0 = force_operand (op0, target);
7195 return op0;
7196 }
7197 }
7198
7199 /* No sense saving up arithmetic to be done
7200 if it's all in the wrong mode to form part of an address.
7201 And force_operand won't know whether to sign-extend or
7202 zero-extend. */
7203 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7204 || mode != ptr_mode)
7205 goto binop;
7206
7207 preexpand_calls (exp);
7208 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7209 subtarget = 0;
7210
7211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7212 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7213
7214 both_summands:
7215 /* Make sure any term that's a sum with a constant comes last. */
7216 if (GET_CODE (op0) == PLUS
7217 && CONSTANT_P (XEXP (op0, 1)))
7218 {
7219 temp = op0;
7220 op0 = op1;
7221 op1 = temp;
7222 }
7223 /* If adding to a sum including a constant,
7224 associate it to put the constant outside. */
7225 if (GET_CODE (op1) == PLUS
7226 && CONSTANT_P (XEXP (op1, 1)))
7227 {
7228 rtx constant_term = const0_rtx;
7229
7230 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7231 if (temp != 0)
7232 op0 = temp;
7233 /* Ensure that MULT comes first if there is one. */
7234 else if (GET_CODE (op0) == MULT)
7235 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7236 else
7237 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7238
7239 /* Let's also eliminate constants from op0 if possible. */
7240 op0 = eliminate_constant_term (op0, &constant_term);
7241
7242 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7243 their sum should be a constant. Form it into OP1, since the
7244 result we want will then be OP0 + OP1. */
7245
7246 temp = simplify_binary_operation (PLUS, mode, constant_term,
7247 XEXP (op1, 1));
7248 if (temp != 0)
7249 op1 = temp;
7250 else
7251 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7252 }
7253
7254 /* Put a constant term last and put a multiplication first. */
7255 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7256 temp = op1, op1 = op0, op0 = temp;
7257
7258 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7259 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7260
7261 case MINUS_EXPR:
7262 /* For initializers, we are allowed to return a MINUS of two
7263 symbolic constants. Here we handle all cases when both operands
7264 are constant. */
7265 /* Handle difference of two symbolic constants,
7266 for the sake of an initializer. */
7267 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7268 && really_constant_p (TREE_OPERAND (exp, 0))
7269 && really_constant_p (TREE_OPERAND (exp, 1)))
7270 {
7271 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7272 VOIDmode, ro_modifier);
7273 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7274 VOIDmode, ro_modifier);
7275
7276 /* If the last operand is a CONST_INT, use plus_constant of
7277 the negated constant. Else make the MINUS. */
7278 if (GET_CODE (op1) == CONST_INT)
7279 return plus_constant (op0, - INTVAL (op1));
7280 else
7281 return gen_rtx_MINUS (mode, op0, op1);
7282 }
7283 /* Convert A - const to A + (-const). */
7284 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7285 {
7286 tree negated = fold (build1 (NEGATE_EXPR, type,
7287 TREE_OPERAND (exp, 1)));
7288
7289 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7290 /* If we can't negate the constant in TYPE, leave it alone and
7291 expand_binop will negate it for us. We used to try to do it
7292 here in the signed version of TYPE, but that doesn't work
7293 on POINTER_TYPEs. */;
7294 else
7295 {
7296 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7297 goto plus_expr;
7298 }
7299 }
7300 this_optab = sub_optab;
7301 goto binop;
7302
7303 case MULT_EXPR:
7304 preexpand_calls (exp);
7305 /* If first operand is constant, swap them.
7306 Thus the following special case checks need only
7307 check the second operand. */
7308 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7309 {
7310 register tree t1 = TREE_OPERAND (exp, 0);
7311 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7312 TREE_OPERAND (exp, 1) = t1;
7313 }
7314
7315 /* Attempt to return something suitable for generating an
7316 indexed address, for machines that support that. */
7317
7318 if (modifier == EXPAND_SUM && mode == ptr_mode
7319 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7320 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7321 {
7322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7323 EXPAND_SUM);
7324
7325 /* Apply distributive law if OP0 is x+c. */
7326 if (GET_CODE (op0) == PLUS
7327 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7328 return
7329 gen_rtx_PLUS
7330 (mode,
7331 gen_rtx_MULT
7332 (mode, XEXP (op0, 0),
7333 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7334 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7335 * INTVAL (XEXP (op0, 1))));
7336
7337 if (GET_CODE (op0) != REG)
7338 op0 = force_operand (op0, NULL_RTX);
7339 if (GET_CODE (op0) != REG)
7340 op0 = copy_to_mode_reg (mode, op0);
7341
7342 return
7343 gen_rtx_MULT (mode, op0,
7344 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7345 }
7346
7347 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7348 subtarget = 0;
7349
7350 /* Check for multiplying things that have been extended
7351 from a narrower type. If this machine supports multiplying
7352 in that narrower type with a result in the desired type,
7353 do it that way, and avoid the explicit type-conversion. */
7354 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7355 && TREE_CODE (type) == INTEGER_TYPE
7356 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7357 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7358 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7359 && int_fits_type_p (TREE_OPERAND (exp, 1),
7360 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7361 /* Don't use a widening multiply if a shift will do. */
7362 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7363 > HOST_BITS_PER_WIDE_INT)
7364 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7365 ||
7366 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7367 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7368 ==
7369 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7370 /* If both operands are extended, they must either both
7371 be zero-extended or both be sign-extended. */
7372 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7373 ==
7374 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7375 {
7376 enum machine_mode innermode
7377 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7378 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7379 ? smul_widen_optab : umul_widen_optab);
7380 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7381 ? umul_widen_optab : smul_widen_optab);
7382 if (mode == GET_MODE_WIDER_MODE (innermode))
7383 {
7384 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7385 {
7386 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7387 NULL_RTX, VOIDmode, 0);
7388 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7389 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7390 VOIDmode, 0);
7391 else
7392 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7393 NULL_RTX, VOIDmode, 0);
7394 goto binop2;
7395 }
7396 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7397 && innermode == word_mode)
7398 {
7399 rtx htem;
7400 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7401 NULL_RTX, VOIDmode, 0);
7402 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7403 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7404 VOIDmode, 0);
7405 else
7406 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7407 NULL_RTX, VOIDmode, 0);
7408 temp = expand_binop (mode, other_optab, op0, op1, target,
7409 unsignedp, OPTAB_LIB_WIDEN);
7410 htem = expand_mult_highpart_adjust (innermode,
7411 gen_highpart (innermode, temp),
7412 op0, op1,
7413 gen_highpart (innermode, temp),
7414 unsignedp);
7415 emit_move_insn (gen_highpart (innermode, temp), htem);
7416 return temp;
7417 }
7418 }
7419 }
7420 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7421 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7422 return expand_mult (mode, op0, op1, target, unsignedp);
7423
7424 case TRUNC_DIV_EXPR:
7425 case FLOOR_DIV_EXPR:
7426 case CEIL_DIV_EXPR:
7427 case ROUND_DIV_EXPR:
7428 case EXACT_DIV_EXPR:
7429 preexpand_calls (exp);
7430 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7431 subtarget = 0;
7432 /* Possible optimization: compute the dividend with EXPAND_SUM
7433 then if the divisor is constant can optimize the case
7434 where some terms of the dividend have coeffs divisible by it. */
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7436 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7437 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7438
7439 case RDIV_EXPR:
7440 this_optab = flodiv_optab;
7441 goto binop;
7442
7443 case TRUNC_MOD_EXPR:
7444 case FLOOR_MOD_EXPR:
7445 case CEIL_MOD_EXPR:
7446 case ROUND_MOD_EXPR:
7447 preexpand_calls (exp);
7448 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7449 subtarget = 0;
7450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7451 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7452 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7453
7454 case FIX_ROUND_EXPR:
7455 case FIX_FLOOR_EXPR:
7456 case FIX_CEIL_EXPR:
7457 abort (); /* Not used for C. */
7458
7459 case FIX_TRUNC_EXPR:
7460 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7461 if (target == 0)
7462 target = gen_reg_rtx (mode);
7463 expand_fix (target, op0, unsignedp);
7464 return target;
7465
7466 case FLOAT_EXPR:
7467 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7468 if (target == 0)
7469 target = gen_reg_rtx (mode);
7470 /* expand_float can't figure out what to do if FROM has VOIDmode.
7471 So give it the correct mode. With -O, cse will optimize this. */
7472 if (GET_MODE (op0) == VOIDmode)
7473 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7474 op0);
7475 expand_float (target, op0,
7476 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7477 return target;
7478
7479 case NEGATE_EXPR:
7480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7481 temp = expand_unop (mode, neg_optab, op0, target, 0);
7482 if (temp == 0)
7483 abort ();
7484 return temp;
7485
7486 case ABS_EXPR:
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7488
7489 /* Handle complex values specially. */
7490 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7491 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7492 return expand_complex_abs (mode, op0, target, unsignedp);
7493
7494 /* Unsigned abs is simply the operand. Testing here means we don't
7495 risk generating incorrect code below. */
7496 if (TREE_UNSIGNED (type))
7497 return op0;
7498
7499 return expand_abs (mode, op0, target,
7500 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7501
7502 case MAX_EXPR:
7503 case MIN_EXPR:
7504 target = original_target;
7505 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7506 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7507 || GET_MODE (target) != mode
7508 || (GET_CODE (target) == REG
7509 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7510 target = gen_reg_rtx (mode);
7511 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7512 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7513
7514 /* First try to do it with a special MIN or MAX instruction.
7515 If that does not win, use a conditional jump to select the proper
7516 value. */
7517 this_optab = (TREE_UNSIGNED (type)
7518 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7519 : (code == MIN_EXPR ? smin_optab : smax_optab));
7520
7521 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7522 OPTAB_WIDEN);
7523 if (temp != 0)
7524 return temp;
7525
7526 /* At this point, a MEM target is no longer useful; we will get better
7527 code without it. */
7528
7529 if (GET_CODE (target) == MEM)
7530 target = gen_reg_rtx (mode);
7531
7532 if (target != op0)
7533 emit_move_insn (target, op0);
7534
7535 op0 = gen_label_rtx ();
7536
7537 /* If this mode is an integer too wide to compare properly,
7538 compare word by word. Rely on cse to optimize constant cases. */
7539 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7540 {
7541 if (code == MAX_EXPR)
7542 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7543 target, op1, NULL_RTX, op0);
7544 else
7545 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7546 op1, target, NULL_RTX, op0);
7547 }
7548 else
7549 {
7550 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7551 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7552 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7553 op0);
7554 }
7555 emit_move_insn (target, op1);
7556 emit_label (op0);
7557 return target;
7558
7559 case BIT_NOT_EXPR:
7560 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7561 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7562 if (temp == 0)
7563 abort ();
7564 return temp;
7565
7566 case FFS_EXPR:
7567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7568 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7569 if (temp == 0)
7570 abort ();
7571 return temp;
7572
7573 /* ??? Can optimize bitwise operations with one arg constant.
7574 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7575 and (a bitwise1 b) bitwise2 b (etc)
7576 but that is probably not worth while. */
7577
7578 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7579 boolean values when we want in all cases to compute both of them. In
7580 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7581 as actual zero-or-1 values and then bitwise anding. In cases where
7582 there cannot be any side effects, better code would be made by
7583 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7584 how to recognize those cases. */
7585
7586 case TRUTH_AND_EXPR:
7587 case BIT_AND_EXPR:
7588 this_optab = and_optab;
7589 goto binop;
7590
7591 case TRUTH_OR_EXPR:
7592 case BIT_IOR_EXPR:
7593 this_optab = ior_optab;
7594 goto binop;
7595
7596 case TRUTH_XOR_EXPR:
7597 case BIT_XOR_EXPR:
7598 this_optab = xor_optab;
7599 goto binop;
7600
7601 case LSHIFT_EXPR:
7602 case RSHIFT_EXPR:
7603 case LROTATE_EXPR:
7604 case RROTATE_EXPR:
7605 preexpand_calls (exp);
7606 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7607 subtarget = 0;
7608 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7609 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7610 unsignedp);
7611
7612 /* Could determine the answer when only additive constants differ. Also,
7613 the addition of one can be handled by changing the condition. */
7614 case LT_EXPR:
7615 case LE_EXPR:
7616 case GT_EXPR:
7617 case GE_EXPR:
7618 case EQ_EXPR:
7619 case NE_EXPR:
7620 preexpand_calls (exp);
7621 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7622 if (temp != 0)
7623 return temp;
7624
7625 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7626 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7627 && original_target
7628 && GET_CODE (original_target) == REG
7629 && (GET_MODE (original_target)
7630 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7631 {
7632 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7633 VOIDmode, 0);
7634
7635 if (temp != original_target)
7636 temp = copy_to_reg (temp);
7637
7638 op1 = gen_label_rtx ();
7639 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7640 GET_MODE (temp), unsignedp, 0, op1);
7641 emit_move_insn (temp, const1_rtx);
7642 emit_label (op1);
7643 return temp;
7644 }
7645
7646 /* If no set-flag instruction, must generate a conditional
7647 store into a temporary variable. Drop through
7648 and handle this like && and ||. */
7649
7650 case TRUTH_ANDIF_EXPR:
7651 case TRUTH_ORIF_EXPR:
7652 if (! ignore
7653 && (target == 0 || ! safe_from_p (target, exp, 1)
7654 /* Make sure we don't have a hard reg (such as function's return
7655 value) live across basic blocks, if not optimizing. */
7656 || (!optimize && GET_CODE (target) == REG
7657 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7658 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7659
7660 if (target)
7661 emit_clr_insn (target);
7662
7663 op1 = gen_label_rtx ();
7664 jumpifnot (exp, op1);
7665
7666 if (target)
7667 emit_0_to_1_insn (target);
7668
7669 emit_label (op1);
7670 return ignore ? const0_rtx : target;
7671
7672 case TRUTH_NOT_EXPR:
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7674 /* The parser is careful to generate TRUTH_NOT_EXPR
7675 only with operands that are always zero or one. */
7676 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7677 target, 1, OPTAB_LIB_WIDEN);
7678 if (temp == 0)
7679 abort ();
7680 return temp;
7681
7682 case COMPOUND_EXPR:
7683 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7684 emit_queue ();
7685 return expand_expr (TREE_OPERAND (exp, 1),
7686 (ignore ? const0_rtx : target),
7687 VOIDmode, 0);
7688
7689 case COND_EXPR:
7690 /* If we would have a "singleton" (see below) were it not for a
7691 conversion in each arm, bring that conversion back out. */
7692 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7693 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7694 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7695 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7696 {
7697 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7698 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7699
7700 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7701 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7702 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7703 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7704 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7705 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7706 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7707 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7708 return expand_expr (build1 (NOP_EXPR, type,
7709 build (COND_EXPR, TREE_TYPE (true),
7710 TREE_OPERAND (exp, 0),
7711 true, false)),
7712 target, tmode, modifier);
7713 }
7714
7715 {
7716 /* Note that COND_EXPRs whose type is a structure or union
7717 are required to be constructed to contain assignments of
7718 a temporary variable, so that we can evaluate them here
7719 for side effect only. If type is void, we must do likewise. */
7720
7721 /* If an arm of the branch requires a cleanup,
7722 only that cleanup is performed. */
7723
7724 tree singleton = 0;
7725 tree binary_op = 0, unary_op = 0;
7726
7727 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7728 convert it to our mode, if necessary. */
7729 if (integer_onep (TREE_OPERAND (exp, 1))
7730 && integer_zerop (TREE_OPERAND (exp, 2))
7731 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7732 {
7733 if (ignore)
7734 {
7735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7736 ro_modifier);
7737 return const0_rtx;
7738 }
7739
7740 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7741 if (GET_MODE (op0) == mode)
7742 return op0;
7743
7744 if (target == 0)
7745 target = gen_reg_rtx (mode);
7746 convert_move (target, op0, unsignedp);
7747 return target;
7748 }
7749
7750 /* Check for X ? A + B : A. If we have this, we can copy A to the
7751 output and conditionally add B. Similarly for unary operations.
7752 Don't do this if X has side-effects because those side effects
7753 might affect A or B and the "?" operation is a sequence point in
7754 ANSI. (operand_equal_p tests for side effects.) */
7755
7756 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7757 && operand_equal_p (TREE_OPERAND (exp, 2),
7758 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7759 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7760 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7761 && operand_equal_p (TREE_OPERAND (exp, 1),
7762 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7763 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7764 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7765 && operand_equal_p (TREE_OPERAND (exp, 2),
7766 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7767 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7768 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7769 && operand_equal_p (TREE_OPERAND (exp, 1),
7770 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7771 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7772
7773 /* If we are not to produce a result, we have no target. Otherwise,
7774 if a target was specified use it; it will not be used as an
7775 intermediate target unless it is safe. If no target, use a
7776 temporary. */
7777
7778 if (ignore)
7779 temp = 0;
7780 else if (original_target
7781 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7782 || (singleton && GET_CODE (original_target) == REG
7783 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7784 && original_target == var_rtx (singleton)))
7785 && GET_MODE (original_target) == mode
7786 #ifdef HAVE_conditional_move
7787 && (! can_conditionally_move_p (mode)
7788 || GET_CODE (original_target) == REG
7789 || TREE_ADDRESSABLE (type))
7790 #endif
7791 && ! (GET_CODE (original_target) == MEM
7792 && MEM_VOLATILE_P (original_target)))
7793 temp = original_target;
7794 else if (TREE_ADDRESSABLE (type))
7795 abort ();
7796 else
7797 temp = assign_temp (type, 0, 0, 1);
7798
7799 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7800 do the test of X as a store-flag operation, do this as
7801 A + ((X != 0) << log C). Similarly for other simple binary
7802 operators. Only do for C == 1 if BRANCH_COST is low. */
7803 if (temp && singleton && binary_op
7804 && (TREE_CODE (binary_op) == PLUS_EXPR
7805 || TREE_CODE (binary_op) == MINUS_EXPR
7806 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7807 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7808 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7809 : integer_onep (TREE_OPERAND (binary_op, 1)))
7810 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7811 {
7812 rtx result;
7813 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7814 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7815 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7816 : xor_optab);
7817
7818 /* If we had X ? A : A + 1, do this as A + (X == 0).
7819
7820 We have to invert the truth value here and then put it
7821 back later if do_store_flag fails. We cannot simply copy
7822 TREE_OPERAND (exp, 0) to another variable and modify that
7823 because invert_truthvalue can modify the tree pointed to
7824 by its argument. */
7825 if (singleton == TREE_OPERAND (exp, 1))
7826 TREE_OPERAND (exp, 0)
7827 = invert_truthvalue (TREE_OPERAND (exp, 0));
7828
7829 result = do_store_flag (TREE_OPERAND (exp, 0),
7830 (safe_from_p (temp, singleton, 1)
7831 ? temp : NULL_RTX),
7832 mode, BRANCH_COST <= 1);
7833
7834 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7835 result = expand_shift (LSHIFT_EXPR, mode, result,
7836 build_int_2 (tree_log2
7837 (TREE_OPERAND
7838 (binary_op, 1)),
7839 0),
7840 (safe_from_p (temp, singleton, 1)
7841 ? temp : NULL_RTX), 0);
7842
7843 if (result)
7844 {
7845 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7846 return expand_binop (mode, boptab, op1, result, temp,
7847 unsignedp, OPTAB_LIB_WIDEN);
7848 }
7849 else if (singleton == TREE_OPERAND (exp, 1))
7850 TREE_OPERAND (exp, 0)
7851 = invert_truthvalue (TREE_OPERAND (exp, 0));
7852 }
7853
7854 do_pending_stack_adjust ();
7855 NO_DEFER_POP;
7856 op0 = gen_label_rtx ();
7857
7858 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7859 {
7860 if (temp != 0)
7861 {
7862 /* If the target conflicts with the other operand of the
7863 binary op, we can't use it. Also, we can't use the target
7864 if it is a hard register, because evaluating the condition
7865 might clobber it. */
7866 if ((binary_op
7867 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7868 || (GET_CODE (temp) == REG
7869 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7870 temp = gen_reg_rtx (mode);
7871 store_expr (singleton, temp, 0);
7872 }
7873 else
7874 expand_expr (singleton,
7875 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7876 if (singleton == TREE_OPERAND (exp, 1))
7877 jumpif (TREE_OPERAND (exp, 0), op0);
7878 else
7879 jumpifnot (TREE_OPERAND (exp, 0), op0);
7880
7881 start_cleanup_deferral ();
7882 if (binary_op && temp == 0)
7883 /* Just touch the other operand. */
7884 expand_expr (TREE_OPERAND (binary_op, 1),
7885 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7886 else if (binary_op)
7887 store_expr (build (TREE_CODE (binary_op), type,
7888 make_tree (type, temp),
7889 TREE_OPERAND (binary_op, 1)),
7890 temp, 0);
7891 else
7892 store_expr (build1 (TREE_CODE (unary_op), type,
7893 make_tree (type, temp)),
7894 temp, 0);
7895 op1 = op0;
7896 }
7897 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7898 comparison operator. If we have one of these cases, set the
7899 output to A, branch on A (cse will merge these two references),
7900 then set the output to FOO. */
7901 else if (temp
7902 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7903 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7905 TREE_OPERAND (exp, 1), 0)
7906 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7907 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7908 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7909 {
7910 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7911 temp = gen_reg_rtx (mode);
7912 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7913 jumpif (TREE_OPERAND (exp, 0), op0);
7914
7915 start_cleanup_deferral ();
7916 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7917 op1 = op0;
7918 }
7919 else if (temp
7920 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7921 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7922 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7923 TREE_OPERAND (exp, 2), 0)
7924 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7925 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7926 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7927 {
7928 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7929 temp = gen_reg_rtx (mode);
7930 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7931 jumpifnot (TREE_OPERAND (exp, 0), op0);
7932
7933 start_cleanup_deferral ();
7934 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7935 op1 = op0;
7936 }
7937 else
7938 {
7939 op1 = gen_label_rtx ();
7940 jumpifnot (TREE_OPERAND (exp, 0), op0);
7941
7942 start_cleanup_deferral ();
7943
7944 /* One branch of the cond can be void, if it never returns. For
7945 example A ? throw : E */
7946 if (temp != 0
7947 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7948 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7949 else
7950 expand_expr (TREE_OPERAND (exp, 1),
7951 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7952 end_cleanup_deferral ();
7953 emit_queue ();
7954 emit_jump_insn (gen_jump (op1));
7955 emit_barrier ();
7956 emit_label (op0);
7957 start_cleanup_deferral ();
7958 if (temp != 0
7959 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7960 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7961 else
7962 expand_expr (TREE_OPERAND (exp, 2),
7963 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7964 }
7965
7966 end_cleanup_deferral ();
7967
7968 emit_queue ();
7969 emit_label (op1);
7970 OK_DEFER_POP;
7971
7972 return temp;
7973 }
7974
7975 case TARGET_EXPR:
7976 {
7977 /* Something needs to be initialized, but we didn't know
7978 where that thing was when building the tree. For example,
7979 it could be the return value of a function, or a parameter
7980 to a function which lays down in the stack, or a temporary
7981 variable which must be passed by reference.
7982
7983 We guarantee that the expression will either be constructed
7984 or copied into our original target. */
7985
7986 tree slot = TREE_OPERAND (exp, 0);
7987 tree cleanups = NULL_TREE;
7988 tree exp1;
7989
7990 if (TREE_CODE (slot) != VAR_DECL)
7991 abort ();
7992
7993 if (! ignore)
7994 target = original_target;
7995
7996 /* Set this here so that if we get a target that refers to a
7997 register variable that's already been used, put_reg_into_stack
7998 knows that it should fix up those uses. */
7999 TREE_USED (slot) = 1;
8000
8001 if (target == 0)
8002 {
8003 if (DECL_RTL (slot) != 0)
8004 {
8005 target = DECL_RTL (slot);
8006 /* If we have already expanded the slot, so don't do
8007 it again. (mrs) */
8008 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8009 return target;
8010 }
8011 else
8012 {
8013 target = assign_temp (type, 2, 0, 1);
8014 /* All temp slots at this level must not conflict. */
8015 preserve_temp_slots (target);
8016 DECL_RTL (slot) = target;
8017 if (TREE_ADDRESSABLE (slot))
8018 {
8019 TREE_ADDRESSABLE (slot) = 0;
8020 mark_addressable (slot);
8021 }
8022
8023 /* Since SLOT is not known to the called function
8024 to belong to its stack frame, we must build an explicit
8025 cleanup. This case occurs when we must build up a reference
8026 to pass the reference as an argument. In this case,
8027 it is very likely that such a reference need not be
8028 built here. */
8029
8030 if (TREE_OPERAND (exp, 2) == 0)
8031 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8032 cleanups = TREE_OPERAND (exp, 2);
8033 }
8034 }
8035 else
8036 {
8037 /* This case does occur, when expanding a parameter which
8038 needs to be constructed on the stack. The target
8039 is the actual stack address that we want to initialize.
8040 The function we call will perform the cleanup in this case. */
8041
8042 /* If we have already assigned it space, use that space,
8043 not target that we were passed in, as our target
8044 parameter is only a hint. */
8045 if (DECL_RTL (slot) != 0)
8046 {
8047 target = DECL_RTL (slot);
8048 /* If we have already expanded the slot, so don't do
8049 it again. (mrs) */
8050 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8051 return target;
8052 }
8053 else
8054 {
8055 DECL_RTL (slot) = target;
8056 /* If we must have an addressable slot, then make sure that
8057 the RTL that we just stored in slot is OK. */
8058 if (TREE_ADDRESSABLE (slot))
8059 {
8060 TREE_ADDRESSABLE (slot) = 0;
8061 mark_addressable (slot);
8062 }
8063 }
8064 }
8065
8066 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8067 /* Mark it as expanded. */
8068 TREE_OPERAND (exp, 1) = NULL_TREE;
8069
8070 store_expr (exp1, target, 0);
8071
8072 expand_decl_cleanup (NULL_TREE, cleanups);
8073
8074 return target;
8075 }
8076
8077 case INIT_EXPR:
8078 {
8079 tree lhs = TREE_OPERAND (exp, 0);
8080 tree rhs = TREE_OPERAND (exp, 1);
8081 tree noncopied_parts = 0;
8082 tree lhs_type = TREE_TYPE (lhs);
8083
8084 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8085 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8086 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8087 TYPE_NONCOPIED_PARTS (lhs_type));
8088 while (noncopied_parts != 0)
8089 {
8090 expand_assignment (TREE_VALUE (noncopied_parts),
8091 TREE_PURPOSE (noncopied_parts), 0, 0);
8092 noncopied_parts = TREE_CHAIN (noncopied_parts);
8093 }
8094 return temp;
8095 }
8096
8097 case MODIFY_EXPR:
8098 {
8099 /* If lhs is complex, expand calls in rhs before computing it.
8100 That's so we don't compute a pointer and save it over a call.
8101 If lhs is simple, compute it first so we can give it as a
8102 target if the rhs is just a call. This avoids an extra temp and copy
8103 and that prevents a partial-subsumption which makes bad code.
8104 Actually we could treat component_ref's of vars like vars. */
8105
8106 tree lhs = TREE_OPERAND (exp, 0);
8107 tree rhs = TREE_OPERAND (exp, 1);
8108 tree noncopied_parts = 0;
8109 tree lhs_type = TREE_TYPE (lhs);
8110
8111 temp = 0;
8112
8113 if (TREE_CODE (lhs) != VAR_DECL
8114 && TREE_CODE (lhs) != RESULT_DECL
8115 && TREE_CODE (lhs) != PARM_DECL
8116 && ! (TREE_CODE (lhs) == INDIRECT_REF
8117 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8118 preexpand_calls (exp);
8119
8120 /* Check for |= or &= of a bitfield of size one into another bitfield
8121 of size 1. In this case, (unless we need the result of the
8122 assignment) we can do this more efficiently with a
8123 test followed by an assignment, if necessary.
8124
8125 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8126 things change so we do, this code should be enhanced to
8127 support it. */
8128 if (ignore
8129 && TREE_CODE (lhs) == COMPONENT_REF
8130 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8131 || TREE_CODE (rhs) == BIT_AND_EXPR)
8132 && TREE_OPERAND (rhs, 0) == lhs
8133 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8134 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8135 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8136 {
8137 rtx label = gen_label_rtx ();
8138
8139 do_jump (TREE_OPERAND (rhs, 1),
8140 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8141 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8142 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8143 (TREE_CODE (rhs) == BIT_IOR_EXPR
8144 ? integer_one_node
8145 : integer_zero_node)),
8146 0, 0);
8147 do_pending_stack_adjust ();
8148 emit_label (label);
8149 return const0_rtx;
8150 }
8151
8152 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8153 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8154 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8155 TYPE_NONCOPIED_PARTS (lhs_type));
8156
8157 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8158 while (noncopied_parts != 0)
8159 {
8160 expand_assignment (TREE_PURPOSE (noncopied_parts),
8161 TREE_VALUE (noncopied_parts), 0, 0);
8162 noncopied_parts = TREE_CHAIN (noncopied_parts);
8163 }
8164 return temp;
8165 }
8166
8167 case RETURN_EXPR:
8168 if (!TREE_OPERAND (exp, 0))
8169 expand_null_return ();
8170 else
8171 expand_return (TREE_OPERAND (exp, 0));
8172 return const0_rtx;
8173
8174 case PREINCREMENT_EXPR:
8175 case PREDECREMENT_EXPR:
8176 return expand_increment (exp, 0, ignore);
8177
8178 case POSTINCREMENT_EXPR:
8179 case POSTDECREMENT_EXPR:
8180 /* Faster to treat as pre-increment if result is not used. */
8181 return expand_increment (exp, ! ignore, ignore);
8182
8183 case ADDR_EXPR:
8184 /* If nonzero, TEMP will be set to the address of something that might
8185 be a MEM corresponding to a stack slot. */
8186 temp = 0;
8187
8188 /* Are we taking the address of a nested function? */
8189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8190 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8191 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8192 && ! TREE_STATIC (exp))
8193 {
8194 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8195 op0 = force_operand (op0, target);
8196 }
8197 /* If we are taking the address of something erroneous, just
8198 return a zero. */
8199 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8200 return const0_rtx;
8201 else
8202 {
8203 /* We make sure to pass const0_rtx down if we came in with
8204 ignore set, to avoid doing the cleanups twice for something. */
8205 op0 = expand_expr (TREE_OPERAND (exp, 0),
8206 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8207 (modifier == EXPAND_INITIALIZER
8208 ? modifier : EXPAND_CONST_ADDRESS));
8209
8210 /* If we are going to ignore the result, OP0 will have been set
8211 to const0_rtx, so just return it. Don't get confused and
8212 think we are taking the address of the constant. */
8213 if (ignore)
8214 return op0;
8215
8216 op0 = protect_from_queue (op0, 0);
8217
8218 /* We would like the object in memory. If it is a constant, we can
8219 have it be statically allocated into memory. For a non-constant,
8220 we need to allocate some memory and store the value into it. */
8221
8222 if (CONSTANT_P (op0))
8223 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8224 op0);
8225 else if (GET_CODE (op0) == MEM)
8226 {
8227 mark_temp_addr_taken (op0);
8228 temp = XEXP (op0, 0);
8229 }
8230
8231 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8232 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8233 {
8234 /* If this object is in a register, it must be not
8235 be BLKmode. */
8236 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8237 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8238
8239 mark_temp_addr_taken (memloc);
8240 emit_move_insn (memloc, op0);
8241 op0 = memloc;
8242 }
8243
8244 if (GET_CODE (op0) != MEM)
8245 abort ();
8246
8247 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8248 {
8249 temp = XEXP (op0, 0);
8250 #ifdef POINTERS_EXTEND_UNSIGNED
8251 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8252 && mode == ptr_mode)
8253 temp = convert_memory_address (ptr_mode, temp);
8254 #endif
8255 return temp;
8256 }
8257
8258 op0 = force_operand (XEXP (op0, 0), target);
8259 }
8260
8261 if (flag_force_addr && GET_CODE (op0) != REG)
8262 op0 = force_reg (Pmode, op0);
8263
8264 if (GET_CODE (op0) == REG
8265 && ! REG_USERVAR_P (op0))
8266 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8267
8268 /* If we might have had a temp slot, add an equivalent address
8269 for it. */
8270 if (temp != 0)
8271 update_temp_slot_address (temp, op0);
8272
8273 #ifdef POINTERS_EXTEND_UNSIGNED
8274 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8275 && mode == ptr_mode)
8276 op0 = convert_memory_address (ptr_mode, op0);
8277 #endif
8278
8279 return op0;
8280
8281 case ENTRY_VALUE_EXPR:
8282 abort ();
8283
8284 /* COMPLEX type for Extended Pascal & Fortran */
8285 case COMPLEX_EXPR:
8286 {
8287 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8288 rtx insns;
8289
8290 /* Get the rtx code of the operands. */
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8292 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8293
8294 if (! target)
8295 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8296
8297 start_sequence ();
8298
8299 /* Move the real (op0) and imaginary (op1) parts to their location. */
8300 emit_move_insn (gen_realpart (mode, target), op0);
8301 emit_move_insn (gen_imagpart (mode, target), op1);
8302
8303 insns = get_insns ();
8304 end_sequence ();
8305
8306 /* Complex construction should appear as a single unit. */
8307 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8308 each with a separate pseudo as destination.
8309 It's not correct for flow to treat them as a unit. */
8310 if (GET_CODE (target) != CONCAT)
8311 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8312 else
8313 emit_insns (insns);
8314
8315 return target;
8316 }
8317
8318 case REALPART_EXPR:
8319 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8320 return gen_realpart (mode, op0);
8321
8322 case IMAGPART_EXPR:
8323 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8324 return gen_imagpart (mode, op0);
8325
8326 case CONJ_EXPR:
8327 {
8328 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8329 rtx imag_t;
8330 rtx insns;
8331
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333
8334 if (! target)
8335 target = gen_reg_rtx (mode);
8336
8337 start_sequence ();
8338
8339 /* Store the realpart and the negated imagpart to target. */
8340 emit_move_insn (gen_realpart (partmode, target),
8341 gen_realpart (partmode, op0));
8342
8343 imag_t = gen_imagpart (partmode, target);
8344 temp = expand_unop (partmode, neg_optab,
8345 gen_imagpart (partmode, op0), imag_t, 0);
8346 if (temp != imag_t)
8347 emit_move_insn (imag_t, temp);
8348
8349 insns = get_insns ();
8350 end_sequence ();
8351
8352 /* Conjugate should appear as a single unit
8353 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8354 each with a separate pseudo as destination.
8355 It's not correct for flow to treat them as a unit. */
8356 if (GET_CODE (target) != CONCAT)
8357 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8358 else
8359 emit_insns (insns);
8360
8361 return target;
8362 }
8363
8364 case TRY_CATCH_EXPR:
8365 {
8366 tree handler = TREE_OPERAND (exp, 1);
8367
8368 expand_eh_region_start ();
8369
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8371
8372 expand_eh_region_end (handler);
8373
8374 return op0;
8375 }
8376
8377 case TRY_FINALLY_EXPR:
8378 {
8379 tree try_block = TREE_OPERAND (exp, 0);
8380 tree finally_block = TREE_OPERAND (exp, 1);
8381 rtx finally_label = gen_label_rtx ();
8382 rtx done_label = gen_label_rtx ();
8383 rtx return_link = gen_reg_rtx (Pmode);
8384 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8385 (tree) finally_label, (tree) return_link);
8386 TREE_SIDE_EFFECTS (cleanup) = 1;
8387
8388 /* Start a new binding layer that will keep track of all cleanup
8389 actions to be performed. */
8390 expand_start_bindings (2);
8391
8392 target_temp_slot_level = temp_slot_level;
8393
8394 expand_decl_cleanup (NULL_TREE, cleanup);
8395 op0 = expand_expr (try_block, target, tmode, modifier);
8396
8397 preserve_temp_slots (op0);
8398 expand_end_bindings (NULL_TREE, 0, 0);
8399 emit_jump (done_label);
8400 emit_label (finally_label);
8401 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8402 emit_indirect_jump (return_link);
8403 emit_label (done_label);
8404 return op0;
8405 }
8406
8407 case GOTO_SUBROUTINE_EXPR:
8408 {
8409 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8410 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8411 rtx return_address = gen_label_rtx ();
8412 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8413 emit_jump (subr);
8414 emit_label (return_address);
8415 return const0_rtx;
8416 }
8417
8418 case POPDCC_EXPR:
8419 {
8420 rtx dcc = get_dynamic_cleanup_chain ();
8421 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8422 return const0_rtx;
8423 }
8424
8425 case POPDHC_EXPR:
8426 {
8427 rtx dhc = get_dynamic_handler_chain ();
8428 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8429 return const0_rtx;
8430 }
8431
8432 case VA_ARG_EXPR:
8433 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8434
8435 default:
8436 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8437 }
8438
8439 /* Here to do an ordinary binary operator, generating an instruction
8440 from the optab already placed in `this_optab'. */
8441 binop:
8442 preexpand_calls (exp);
8443 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8444 subtarget = 0;
8445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8447 binop2:
8448 temp = expand_binop (mode, this_optab, op0, op1, target,
8449 unsignedp, OPTAB_LIB_WIDEN);
8450 if (temp == 0)
8451 abort ();
8452 return temp;
8453 }
8454 \f
8455 /* Similar to expand_expr, except that we don't specify a target, target
8456 mode, or modifier and we return the alignment of the inner type. This is
8457 used in cases where it is not necessary to align the result to the
8458 alignment of its type as long as we know the alignment of the result, for
8459 example for comparisons of BLKmode values. */
8460
8461 static rtx
8462 expand_expr_unaligned (exp, palign)
8463 register tree exp;
8464 int *palign;
8465 {
8466 register rtx op0;
8467 tree type = TREE_TYPE (exp);
8468 register enum machine_mode mode = TYPE_MODE (type);
8469
8470 /* Default the alignment we return to that of the type. */
8471 *palign = TYPE_ALIGN (type);
8472
8473 /* The only cases in which we do anything special is if the resulting mode
8474 is BLKmode. */
8475 if (mode != BLKmode)
8476 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8477
8478 switch (TREE_CODE (exp))
8479 {
8480 case CONVERT_EXPR:
8481 case NOP_EXPR:
8482 case NON_LVALUE_EXPR:
8483 /* Conversions between BLKmode values don't change the underlying
8484 alignment or value. */
8485 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8486 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8487 break;
8488
8489 case ARRAY_REF:
8490 /* Much of the code for this case is copied directly from expand_expr.
8491 We need to duplicate it here because we will do something different
8492 in the fall-through case, so we need to handle the same exceptions
8493 it does. */
8494 {
8495 tree array = TREE_OPERAND (exp, 0);
8496 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8497 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8498 tree index = TREE_OPERAND (exp, 1);
8499 tree index_type = TREE_TYPE (index);
8500 HOST_WIDE_INT i;
8501
8502 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8503 abort ();
8504
8505 /* Optimize the special-case of a zero lower bound.
8506
8507 We convert the low_bound to sizetype to avoid some problems
8508 with constant folding. (E.g. suppose the lower bound is 1,
8509 and its mode is QI. Without the conversion, (ARRAY
8510 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8511 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8512
8513 But sizetype isn't quite right either (especially if
8514 the lowbound is negative). FIXME */
8515
8516 if (! integer_zerop (low_bound))
8517 index = fold (build (MINUS_EXPR, index_type, index,
8518 convert (sizetype, low_bound)));
8519
8520 /* If this is a constant index into a constant array,
8521 just get the value from the array. Handle both the cases when
8522 we have an explicit constructor and when our operand is a variable
8523 that was declared const. */
8524
8525 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8526 {
8527 if (TREE_CODE (index) == INTEGER_CST
8528 && TREE_INT_CST_HIGH (index) == 0)
8529 {
8530 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8531
8532 i = TREE_INT_CST_LOW (index);
8533 while (elem && i--)
8534 elem = TREE_CHAIN (elem);
8535 if (elem)
8536 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8537 palign);
8538 }
8539 }
8540
8541 else if (optimize >= 1
8542 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8543 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8544 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8545 {
8546 if (TREE_CODE (index) == INTEGER_CST)
8547 {
8548 tree init = DECL_INITIAL (array);
8549
8550 i = TREE_INT_CST_LOW (index);
8551 if (TREE_CODE (init) == CONSTRUCTOR)
8552 {
8553 tree elem = CONSTRUCTOR_ELTS (init);
8554
8555 while (elem
8556 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8557 elem = TREE_CHAIN (elem);
8558 if (elem)
8559 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8560 palign);
8561 }
8562 }
8563 }
8564 }
8565
8566 /* ... fall through ... */
8567
8568 case COMPONENT_REF:
8569 case BIT_FIELD_REF:
8570 /* If the operand is a CONSTRUCTOR, we can just extract the
8571 appropriate field if it is present. Don't do this if we have
8572 already written the data since we want to refer to that copy
8573 and varasm.c assumes that's what we'll do. */
8574 if (TREE_CODE (exp) != ARRAY_REF
8575 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8576 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8577 {
8578 tree elt;
8579
8580 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8581 elt = TREE_CHAIN (elt))
8582 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8583 /* Note that unlike the case in expand_expr, we know this is
8584 BLKmode and hence not an integer. */
8585 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8586 }
8587
8588 {
8589 enum machine_mode mode1;
8590 int bitsize;
8591 int bitpos;
8592 tree offset;
8593 int volatilep = 0;
8594 int alignment;
8595 int unsignedp;
8596 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8597 &mode1, &unsignedp, &volatilep,
8598 &alignment);
8599
8600 /* If we got back the original object, something is wrong. Perhaps
8601 we are evaluating an expression too early. In any event, don't
8602 infinitely recurse. */
8603 if (tem == exp)
8604 abort ();
8605
8606 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8607
8608 /* If this is a constant, put it into a register if it is a
8609 legitimate constant and OFFSET is 0 and memory if it isn't. */
8610 if (CONSTANT_P (op0))
8611 {
8612 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8613
8614 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8615 && offset == 0)
8616 op0 = force_reg (inner_mode, op0);
8617 else
8618 op0 = validize_mem (force_const_mem (inner_mode, op0));
8619 }
8620
8621 if (offset != 0)
8622 {
8623 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8624
8625 /* If this object is in a register, put it into memory.
8626 This case can't occur in C, but can in Ada if we have
8627 unchecked conversion of an expression from a scalar type to
8628 an array or record type. */
8629 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8630 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8631 {
8632 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8633
8634 mark_temp_addr_taken (memloc);
8635 emit_move_insn (memloc, op0);
8636 op0 = memloc;
8637 }
8638
8639 if (GET_CODE (op0) != MEM)
8640 abort ();
8641
8642 if (GET_MODE (offset_rtx) != ptr_mode)
8643 {
8644 #ifdef POINTERS_EXTEND_UNSIGNED
8645 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8646 #else
8647 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8648 #endif
8649 }
8650
8651 op0 = change_address (op0, VOIDmode,
8652 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8653 force_reg (ptr_mode,
8654 offset_rtx)));
8655 }
8656
8657 /* Don't forget about volatility even if this is a bitfield. */
8658 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8659 {
8660 op0 = copy_rtx (op0);
8661 MEM_VOLATILE_P (op0) = 1;
8662 }
8663
8664 /* Check the access. */
8665 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8666 {
8667 rtx to;
8668 int size;
8669
8670 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8671 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8672
8673 /* Check the access right of the pointer. */
8674 if (size > BITS_PER_UNIT)
8675 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8676 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8677 TYPE_MODE (sizetype),
8678 GEN_INT (MEMORY_USE_RO),
8679 TYPE_MODE (integer_type_node));
8680 }
8681
8682 /* In cases where an aligned union has an unaligned object
8683 as a field, we might be extracting a BLKmode value from
8684 an integer-mode (e.g., SImode) object. Handle this case
8685 by doing the extract into an object as wide as the field
8686 (which we know to be the width of a basic mode), then
8687 storing into memory, and changing the mode to BLKmode.
8688 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8689 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8690 if (mode1 == VOIDmode
8691 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8692 || (SLOW_UNALIGNED_ACCESS
8693 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8694 || bitpos % TYPE_ALIGN (type) != 0)))
8695 {
8696 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8697
8698 if (ext_mode == BLKmode)
8699 {
8700 /* In this case, BITPOS must start at a byte boundary. */
8701 if (GET_CODE (op0) != MEM
8702 || bitpos % BITS_PER_UNIT != 0)
8703 abort ();
8704
8705 op0 = change_address (op0, VOIDmode,
8706 plus_constant (XEXP (op0, 0),
8707 bitpos / BITS_PER_UNIT));
8708 }
8709 else
8710 {
8711 rtx new = assign_stack_temp (ext_mode,
8712 bitsize / BITS_PER_UNIT, 0);
8713
8714 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8715 unsignedp, NULL_RTX, ext_mode,
8716 ext_mode, alignment,
8717 int_size_in_bytes (TREE_TYPE (tem)));
8718
8719 /* If the result is a record type and BITSIZE is narrower than
8720 the mode of OP0, an integral mode, and this is a big endian
8721 machine, we must put the field into the high-order bits. */
8722 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8723 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8724 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8725 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8726 size_int (GET_MODE_BITSIZE
8727 (GET_MODE (op0))
8728 - bitsize),
8729 op0, 1);
8730
8731
8732 emit_move_insn (new, op0);
8733 op0 = copy_rtx (new);
8734 PUT_MODE (op0, BLKmode);
8735 }
8736 }
8737 else
8738 /* Get a reference to just this component. */
8739 op0 = change_address (op0, mode1,
8740 plus_constant (XEXP (op0, 0),
8741 (bitpos / BITS_PER_UNIT)));
8742
8743 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8744
8745 /* Adjust the alignment in case the bit position is not
8746 a multiple of the alignment of the inner object. */
8747 while (bitpos % alignment != 0)
8748 alignment >>= 1;
8749
8750 if (GET_CODE (XEXP (op0, 0)) == REG)
8751 mark_reg_pointer (XEXP (op0, 0), alignment);
8752
8753 MEM_IN_STRUCT_P (op0) = 1;
8754 MEM_VOLATILE_P (op0) |= volatilep;
8755
8756 *palign = alignment;
8757 return op0;
8758 }
8759
8760 default:
8761 break;
8762
8763 }
8764
8765 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8766 }
8767 \f
8768 /* Return the tree node and offset if a given argument corresponds to
8769 a string constant. */
8770
8771 tree
8772 string_constant (arg, ptr_offset)
8773 tree arg;
8774 tree *ptr_offset;
8775 {
8776 STRIP_NOPS (arg);
8777
8778 if (TREE_CODE (arg) == ADDR_EXPR
8779 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8780 {
8781 *ptr_offset = integer_zero_node;
8782 return TREE_OPERAND (arg, 0);
8783 }
8784 else if (TREE_CODE (arg) == PLUS_EXPR)
8785 {
8786 tree arg0 = TREE_OPERAND (arg, 0);
8787 tree arg1 = TREE_OPERAND (arg, 1);
8788
8789 STRIP_NOPS (arg0);
8790 STRIP_NOPS (arg1);
8791
8792 if (TREE_CODE (arg0) == ADDR_EXPR
8793 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8794 {
8795 *ptr_offset = arg1;
8796 return TREE_OPERAND (arg0, 0);
8797 }
8798 else if (TREE_CODE (arg1) == ADDR_EXPR
8799 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8800 {
8801 *ptr_offset = arg0;
8802 return TREE_OPERAND (arg1, 0);
8803 }
8804 }
8805
8806 return 0;
8807 }
8808 \f
8809 /* Expand code for a post- or pre- increment or decrement
8810 and return the RTX for the result.
8811 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8812
8813 static rtx
8814 expand_increment (exp, post, ignore)
8815 register tree exp;
8816 int post, ignore;
8817 {
8818 register rtx op0, op1;
8819 register rtx temp, value;
8820 register tree incremented = TREE_OPERAND (exp, 0);
8821 optab this_optab = add_optab;
8822 int icode;
8823 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8824 int op0_is_copy = 0;
8825 int single_insn = 0;
8826 /* 1 means we can't store into OP0 directly,
8827 because it is a subreg narrower than a word,
8828 and we don't dare clobber the rest of the word. */
8829 int bad_subreg = 0;
8830
8831 /* Stabilize any component ref that might need to be
8832 evaluated more than once below. */
8833 if (!post
8834 || TREE_CODE (incremented) == BIT_FIELD_REF
8835 || (TREE_CODE (incremented) == COMPONENT_REF
8836 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8837 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8838 incremented = stabilize_reference (incremented);
8839 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8840 ones into save exprs so that they don't accidentally get evaluated
8841 more than once by the code below. */
8842 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8843 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8844 incremented = save_expr (incremented);
8845
8846 /* Compute the operands as RTX.
8847 Note whether OP0 is the actual lvalue or a copy of it:
8848 I believe it is a copy iff it is a register or subreg
8849 and insns were generated in computing it. */
8850
8851 temp = get_last_insn ();
8852 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8853
8854 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8855 in place but instead must do sign- or zero-extension during assignment,
8856 so we copy it into a new register and let the code below use it as
8857 a copy.
8858
8859 Note that we can safely modify this SUBREG since it is know not to be
8860 shared (it was made by the expand_expr call above). */
8861
8862 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8863 {
8864 if (post)
8865 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8866 else
8867 bad_subreg = 1;
8868 }
8869 else if (GET_CODE (op0) == SUBREG
8870 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8871 {
8872 /* We cannot increment this SUBREG in place. If we are
8873 post-incrementing, get a copy of the old value. Otherwise,
8874 just mark that we cannot increment in place. */
8875 if (post)
8876 op0 = copy_to_reg (op0);
8877 else
8878 bad_subreg = 1;
8879 }
8880
8881 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8882 && temp != get_last_insn ());
8883 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8884 EXPAND_MEMORY_USE_BAD);
8885
8886 /* Decide whether incrementing or decrementing. */
8887 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8888 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8889 this_optab = sub_optab;
8890
8891 /* Convert decrement by a constant into a negative increment. */
8892 if (this_optab == sub_optab
8893 && GET_CODE (op1) == CONST_INT)
8894 {
8895 op1 = GEN_INT (- INTVAL (op1));
8896 this_optab = add_optab;
8897 }
8898
8899 /* For a preincrement, see if we can do this with a single instruction. */
8900 if (!post)
8901 {
8902 icode = (int) this_optab->handlers[(int) mode].insn_code;
8903 if (icode != (int) CODE_FOR_nothing
8904 /* Make sure that OP0 is valid for operands 0 and 1
8905 of the insn we want to queue. */
8906 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8907 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8908 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8909 single_insn = 1;
8910 }
8911
8912 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8913 then we cannot just increment OP0. We must therefore contrive to
8914 increment the original value. Then, for postincrement, we can return
8915 OP0 since it is a copy of the old value. For preincrement, expand here
8916 unless we can do it with a single insn.
8917
8918 Likewise if storing directly into OP0 would clobber high bits
8919 we need to preserve (bad_subreg). */
8920 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8921 {
8922 /* This is the easiest way to increment the value wherever it is.
8923 Problems with multiple evaluation of INCREMENTED are prevented
8924 because either (1) it is a component_ref or preincrement,
8925 in which case it was stabilized above, or (2) it is an array_ref
8926 with constant index in an array in a register, which is
8927 safe to reevaluate. */
8928 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8929 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8930 ? MINUS_EXPR : PLUS_EXPR),
8931 TREE_TYPE (exp),
8932 incremented,
8933 TREE_OPERAND (exp, 1));
8934
8935 while (TREE_CODE (incremented) == NOP_EXPR
8936 || TREE_CODE (incremented) == CONVERT_EXPR)
8937 {
8938 newexp = convert (TREE_TYPE (incremented), newexp);
8939 incremented = TREE_OPERAND (incremented, 0);
8940 }
8941
8942 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8943 return post ? op0 : temp;
8944 }
8945
8946 if (post)
8947 {
8948 /* We have a true reference to the value in OP0.
8949 If there is an insn to add or subtract in this mode, queue it.
8950 Queueing the increment insn avoids the register shuffling
8951 that often results if we must increment now and first save
8952 the old value for subsequent use. */
8953
8954 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8955 op0 = stabilize (op0);
8956 #endif
8957
8958 icode = (int) this_optab->handlers[(int) mode].insn_code;
8959 if (icode != (int) CODE_FOR_nothing
8960 /* Make sure that OP0 is valid for operands 0 and 1
8961 of the insn we want to queue. */
8962 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8963 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8964 {
8965 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8966 op1 = force_reg (mode, op1);
8967
8968 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8969 }
8970 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8971 {
8972 rtx addr = (general_operand (XEXP (op0, 0), mode)
8973 ? force_reg (Pmode, XEXP (op0, 0))
8974 : copy_to_reg (XEXP (op0, 0)));
8975 rtx temp, result;
8976
8977 op0 = change_address (op0, VOIDmode, addr);
8978 temp = force_reg (GET_MODE (op0), op0);
8979 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8980 op1 = force_reg (mode, op1);
8981
8982 /* The increment queue is LIFO, thus we have to `queue'
8983 the instructions in reverse order. */
8984 enqueue_insn (op0, gen_move_insn (op0, temp));
8985 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8986 return result;
8987 }
8988 }
8989
8990 /* Preincrement, or we can't increment with one simple insn. */
8991 if (post)
8992 /* Save a copy of the value before inc or dec, to return it later. */
8993 temp = value = copy_to_reg (op0);
8994 else
8995 /* Arrange to return the incremented value. */
8996 /* Copy the rtx because expand_binop will protect from the queue,
8997 and the results of that would be invalid for us to return
8998 if our caller does emit_queue before using our result. */
8999 temp = copy_rtx (value = op0);
9000
9001 /* Increment however we can. */
9002 op1 = expand_binop (mode, this_optab, value, op1,
9003 current_function_check_memory_usage ? NULL_RTX : op0,
9004 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9005 /* Make sure the value is stored into OP0. */
9006 if (op1 != op0)
9007 emit_move_insn (op0, op1);
9008
9009 return temp;
9010 }
9011 \f
9012 /* Expand all function calls contained within EXP, innermost ones first.
9013 But don't look within expressions that have sequence points.
9014 For each CALL_EXPR, record the rtx for its value
9015 in the CALL_EXPR_RTL field. */
9016
9017 static void
9018 preexpand_calls (exp)
9019 tree exp;
9020 {
9021 register int nops, i;
9022 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9023
9024 if (! do_preexpand_calls)
9025 return;
9026
9027 /* Only expressions and references can contain calls. */
9028
9029 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9030 return;
9031
9032 switch (TREE_CODE (exp))
9033 {
9034 case CALL_EXPR:
9035 /* Do nothing if already expanded. */
9036 if (CALL_EXPR_RTL (exp) != 0
9037 /* Do nothing if the call returns a variable-sized object. */
9038 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9039 /* Do nothing to built-in functions. */
9040 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9041 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9042 == FUNCTION_DECL)
9043 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9044 return;
9045
9046 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9047 return;
9048
9049 case COMPOUND_EXPR:
9050 case COND_EXPR:
9051 case TRUTH_ANDIF_EXPR:
9052 case TRUTH_ORIF_EXPR:
9053 /* If we find one of these, then we can be sure
9054 the adjust will be done for it (since it makes jumps).
9055 Do it now, so that if this is inside an argument
9056 of a function, we don't get the stack adjustment
9057 after some other args have already been pushed. */
9058 do_pending_stack_adjust ();
9059 return;
9060
9061 case BLOCK:
9062 case RTL_EXPR:
9063 case WITH_CLEANUP_EXPR:
9064 case CLEANUP_POINT_EXPR:
9065 case TRY_CATCH_EXPR:
9066 return;
9067
9068 case SAVE_EXPR:
9069 if (SAVE_EXPR_RTL (exp) != 0)
9070 return;
9071
9072 default:
9073 break;
9074 }
9075
9076 nops = tree_code_length[(int) TREE_CODE (exp)];
9077 for (i = 0; i < nops; i++)
9078 if (TREE_OPERAND (exp, i) != 0)
9079 {
9080 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9081 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9082 It doesn't happen before the call is made. */
9083 ;
9084 else
9085 {
9086 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9087 if (type == 'e' || type == '<' || type == '1' || type == '2'
9088 || type == 'r')
9089 preexpand_calls (TREE_OPERAND (exp, i));
9090 }
9091 }
9092 }
9093 \f
9094 /* At the start of a function, record that we have no previously-pushed
9095 arguments waiting to be popped. */
9096
9097 void
9098 init_pending_stack_adjust ()
9099 {
9100 pending_stack_adjust = 0;
9101 }
9102
9103 /* When exiting from function, if safe, clear out any pending stack adjust
9104 so the adjustment won't get done.
9105
9106 Note, if the current function calls alloca, then it must have a
9107 frame pointer regardless of the value of flag_omit_frame_pointer. */
9108
9109 void
9110 clear_pending_stack_adjust ()
9111 {
9112 #ifdef EXIT_IGNORE_STACK
9113 if (optimize > 0
9114 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9115 && EXIT_IGNORE_STACK
9116 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9117 && ! flag_inline_functions)
9118 pending_stack_adjust = 0;
9119 #endif
9120 }
9121
9122 /* Pop any previously-pushed arguments that have not been popped yet. */
9123
9124 void
9125 do_pending_stack_adjust ()
9126 {
9127 if (inhibit_defer_pop == 0)
9128 {
9129 if (pending_stack_adjust != 0)
9130 adjust_stack (GEN_INT (pending_stack_adjust));
9131 pending_stack_adjust = 0;
9132 }
9133 }
9134 \f
9135 /* Expand conditional expressions. */
9136
9137 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9138 LABEL is an rtx of code CODE_LABEL, in this function and all the
9139 functions here. */
9140
9141 void
9142 jumpifnot (exp, label)
9143 tree exp;
9144 rtx label;
9145 {
9146 do_jump (exp, label, NULL_RTX);
9147 }
9148
9149 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9150
9151 void
9152 jumpif (exp, label)
9153 tree exp;
9154 rtx label;
9155 {
9156 do_jump (exp, NULL_RTX, label);
9157 }
9158
9159 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9160 the result is zero, or IF_TRUE_LABEL if the result is one.
9161 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9162 meaning fall through in that case.
9163
9164 do_jump always does any pending stack adjust except when it does not
9165 actually perform a jump. An example where there is no jump
9166 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9167
9168 This function is responsible for optimizing cases such as
9169 &&, || and comparison operators in EXP. */
9170
9171 void
9172 do_jump (exp, if_false_label, if_true_label)
9173 tree exp;
9174 rtx if_false_label, if_true_label;
9175 {
9176 register enum tree_code code = TREE_CODE (exp);
9177 /* Some cases need to create a label to jump to
9178 in order to properly fall through.
9179 These cases set DROP_THROUGH_LABEL nonzero. */
9180 rtx drop_through_label = 0;
9181 rtx temp;
9182 int i;
9183 tree type;
9184 enum machine_mode mode;
9185
9186 #ifdef MAX_INTEGER_COMPUTATION_MODE
9187 check_max_integer_computation_mode (exp);
9188 #endif
9189
9190 emit_queue ();
9191
9192 switch (code)
9193 {
9194 case ERROR_MARK:
9195 break;
9196
9197 case INTEGER_CST:
9198 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9199 if (temp)
9200 emit_jump (temp);
9201 break;
9202
9203 #if 0
9204 /* This is not true with #pragma weak */
9205 case ADDR_EXPR:
9206 /* The address of something can never be zero. */
9207 if (if_true_label)
9208 emit_jump (if_true_label);
9209 break;
9210 #endif
9211
9212 case NOP_EXPR:
9213 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9214 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9215 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9216 goto normal;
9217 case CONVERT_EXPR:
9218 /* If we are narrowing the operand, we have to do the compare in the
9219 narrower mode. */
9220 if ((TYPE_PRECISION (TREE_TYPE (exp))
9221 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9222 goto normal;
9223 case NON_LVALUE_EXPR:
9224 case REFERENCE_EXPR:
9225 case ABS_EXPR:
9226 case NEGATE_EXPR:
9227 case LROTATE_EXPR:
9228 case RROTATE_EXPR:
9229 /* These cannot change zero->non-zero or vice versa. */
9230 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9231 break;
9232
9233 case WITH_RECORD_EXPR:
9234 /* Put the object on the placeholder list, recurse through our first
9235 operand, and pop the list. */
9236 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9237 placeholder_list);
9238 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9239 placeholder_list = TREE_CHAIN (placeholder_list);
9240 break;
9241
9242 #if 0
9243 /* This is never less insns than evaluating the PLUS_EXPR followed by
9244 a test and can be longer if the test is eliminated. */
9245 case PLUS_EXPR:
9246 /* Reduce to minus. */
9247 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9248 TREE_OPERAND (exp, 0),
9249 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9250 TREE_OPERAND (exp, 1))));
9251 /* Process as MINUS. */
9252 #endif
9253
9254 case MINUS_EXPR:
9255 /* Non-zero iff operands of minus differ. */
9256 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9257 TREE_OPERAND (exp, 0),
9258 TREE_OPERAND (exp, 1)),
9259 NE, NE, if_false_label, if_true_label);
9260 break;
9261
9262 case BIT_AND_EXPR:
9263 /* If we are AND'ing with a small constant, do this comparison in the
9264 smallest type that fits. If the machine doesn't have comparisons
9265 that small, it will be converted back to the wider comparison.
9266 This helps if we are testing the sign bit of a narrower object.
9267 combine can't do this for us because it can't know whether a
9268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9269
9270 if (! SLOW_BYTE_ACCESS
9271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9273 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9275 && (type = type_for_mode (mode, 1)) != 0
9276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9278 != CODE_FOR_nothing))
9279 {
9280 do_jump (convert (type, exp), if_false_label, if_true_label);
9281 break;
9282 }
9283 goto normal;
9284
9285 case TRUTH_NOT_EXPR:
9286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9287 break;
9288
9289 case TRUTH_ANDIF_EXPR:
9290 if (if_false_label == 0)
9291 if_false_label = drop_through_label = gen_label_rtx ();
9292 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9293 start_cleanup_deferral ();
9294 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9295 end_cleanup_deferral ();
9296 break;
9297
9298 case TRUTH_ORIF_EXPR:
9299 if (if_true_label == 0)
9300 if_true_label = drop_through_label = gen_label_rtx ();
9301 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9302 start_cleanup_deferral ();
9303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9304 end_cleanup_deferral ();
9305 break;
9306
9307 case COMPOUND_EXPR:
9308 push_temp_slots ();
9309 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9310 preserve_temp_slots (NULL_RTX);
9311 free_temp_slots ();
9312 pop_temp_slots ();
9313 emit_queue ();
9314 do_pending_stack_adjust ();
9315 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9316 break;
9317
9318 case COMPONENT_REF:
9319 case BIT_FIELD_REF:
9320 case ARRAY_REF:
9321 {
9322 int bitsize, bitpos, unsignedp;
9323 enum machine_mode mode;
9324 tree type;
9325 tree offset;
9326 int volatilep = 0;
9327 int alignment;
9328
9329 /* Get description of this reference. We don't actually care
9330 about the underlying object here. */
9331 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9332 &mode, &unsignedp, &volatilep,
9333 &alignment);
9334
9335 type = type_for_size (bitsize, unsignedp);
9336 if (! SLOW_BYTE_ACCESS
9337 && type != 0 && bitsize >= 0
9338 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9339 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9340 != CODE_FOR_nothing))
9341 {
9342 do_jump (convert (type, exp), if_false_label, if_true_label);
9343 break;
9344 }
9345 goto normal;
9346 }
9347
9348 case COND_EXPR:
9349 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9350 if (integer_onep (TREE_OPERAND (exp, 1))
9351 && integer_zerop (TREE_OPERAND (exp, 2)))
9352 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9353
9354 else if (integer_zerop (TREE_OPERAND (exp, 1))
9355 && integer_onep (TREE_OPERAND (exp, 2)))
9356 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9357
9358 else
9359 {
9360 register rtx label1 = gen_label_rtx ();
9361 drop_through_label = gen_label_rtx ();
9362
9363 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9364
9365 start_cleanup_deferral ();
9366 /* Now the THEN-expression. */
9367 do_jump (TREE_OPERAND (exp, 1),
9368 if_false_label ? if_false_label : drop_through_label,
9369 if_true_label ? if_true_label : drop_through_label);
9370 /* In case the do_jump just above never jumps. */
9371 do_pending_stack_adjust ();
9372 emit_label (label1);
9373
9374 /* Now the ELSE-expression. */
9375 do_jump (TREE_OPERAND (exp, 2),
9376 if_false_label ? if_false_label : drop_through_label,
9377 if_true_label ? if_true_label : drop_through_label);
9378 end_cleanup_deferral ();
9379 }
9380 break;
9381
9382 case EQ_EXPR:
9383 {
9384 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9385
9386 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9387 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9388 {
9389 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9390 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9391 do_jump
9392 (fold
9393 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9394 fold (build (EQ_EXPR, TREE_TYPE (exp),
9395 fold (build1 (REALPART_EXPR,
9396 TREE_TYPE (inner_type),
9397 exp0)),
9398 fold (build1 (REALPART_EXPR,
9399 TREE_TYPE (inner_type),
9400 exp1)))),
9401 fold (build (EQ_EXPR, TREE_TYPE (exp),
9402 fold (build1 (IMAGPART_EXPR,
9403 TREE_TYPE (inner_type),
9404 exp0)),
9405 fold (build1 (IMAGPART_EXPR,
9406 TREE_TYPE (inner_type),
9407 exp1)))))),
9408 if_false_label, if_true_label);
9409 }
9410
9411 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9412 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9413
9414 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9415 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9416 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9417 else
9418 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9419 break;
9420 }
9421
9422 case NE_EXPR:
9423 {
9424 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9425
9426 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9427 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9428 {
9429 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9430 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9431 do_jump
9432 (fold
9433 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9434 fold (build (NE_EXPR, TREE_TYPE (exp),
9435 fold (build1 (REALPART_EXPR,
9436 TREE_TYPE (inner_type),
9437 exp0)),
9438 fold (build1 (REALPART_EXPR,
9439 TREE_TYPE (inner_type),
9440 exp1)))),
9441 fold (build (NE_EXPR, TREE_TYPE (exp),
9442 fold (build1 (IMAGPART_EXPR,
9443 TREE_TYPE (inner_type),
9444 exp0)),
9445 fold (build1 (IMAGPART_EXPR,
9446 TREE_TYPE (inner_type),
9447 exp1)))))),
9448 if_false_label, if_true_label);
9449 }
9450
9451 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9452 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9453
9454 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9455 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9456 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9457 else
9458 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9459 break;
9460 }
9461
9462 case LT_EXPR:
9463 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9464 if (GET_MODE_CLASS (mode) == MODE_INT
9465 && ! can_compare_p (mode, ccp_jump))
9466 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9467 else
9468 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9469 break;
9470
9471 case LE_EXPR:
9472 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9473 if (GET_MODE_CLASS (mode) == MODE_INT
9474 && ! can_compare_p (mode, ccp_jump))
9475 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9476 else
9477 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9478 break;
9479
9480 case GT_EXPR:
9481 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9482 if (GET_MODE_CLASS (mode) == MODE_INT
9483 && ! can_compare_p (mode, ccp_jump))
9484 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9485 else
9486 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9487 break;
9488
9489 case GE_EXPR:
9490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9491 if (GET_MODE_CLASS (mode) == MODE_INT
9492 && ! can_compare_p (mode, ccp_jump))
9493 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9494 else
9495 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9496 break;
9497
9498 default:
9499 normal:
9500 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9501 #if 0
9502 /* This is not needed any more and causes poor code since it causes
9503 comparisons and tests from non-SI objects to have different code
9504 sequences. */
9505 /* Copy to register to avoid generating bad insns by cse
9506 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9507 if (!cse_not_expected && GET_CODE (temp) == MEM)
9508 temp = copy_to_reg (temp);
9509 #endif
9510 do_pending_stack_adjust ();
9511 /* Do any postincrements in the expression that was tested. */
9512 emit_queue ();
9513
9514 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9515 {
9516 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9517 if (target)
9518 emit_jump (target);
9519 }
9520 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9521 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9522 /* Note swapping the labels gives us not-equal. */
9523 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9524 else if (GET_MODE (temp) != VOIDmode)
9525 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9526 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9527 GET_MODE (temp), NULL_RTX, 0,
9528 if_false_label, if_true_label);
9529 else
9530 abort ();
9531 }
9532
9533 if (drop_through_label)
9534 {
9535 /* If do_jump produces code that might be jumped around,
9536 do any stack adjusts from that code, before the place
9537 where control merges in. */
9538 do_pending_stack_adjust ();
9539 emit_label (drop_through_label);
9540 }
9541 }
9542 \f
9543 /* Given a comparison expression EXP for values too wide to be compared
9544 with one insn, test the comparison and jump to the appropriate label.
9545 The code of EXP is ignored; we always test GT if SWAP is 0,
9546 and LT if SWAP is 1. */
9547
9548 static void
9549 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9550 tree exp;
9551 int swap;
9552 rtx if_false_label, if_true_label;
9553 {
9554 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9555 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9556 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9557 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9558
9559 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9560 }
9561
9562 /* Compare OP0 with OP1, word at a time, in mode MODE.
9563 UNSIGNEDP says to do unsigned comparison.
9564 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9565
9566 void
9567 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9568 enum machine_mode mode;
9569 int unsignedp;
9570 rtx op0, op1;
9571 rtx if_false_label, if_true_label;
9572 {
9573 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9574 rtx drop_through_label = 0;
9575 int i;
9576
9577 if (! if_true_label || ! if_false_label)
9578 drop_through_label = gen_label_rtx ();
9579 if (! if_true_label)
9580 if_true_label = drop_through_label;
9581 if (! if_false_label)
9582 if_false_label = drop_through_label;
9583
9584 /* Compare a word at a time, high order first. */
9585 for (i = 0; i < nwords; i++)
9586 {
9587 rtx op0_word, op1_word;
9588
9589 if (WORDS_BIG_ENDIAN)
9590 {
9591 op0_word = operand_subword_force (op0, i, mode);
9592 op1_word = operand_subword_force (op1, i, mode);
9593 }
9594 else
9595 {
9596 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9597 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9598 }
9599
9600 /* All but high-order word must be compared as unsigned. */
9601 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9602 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9603 NULL_RTX, if_true_label);
9604
9605 /* Consider lower words only if these are equal. */
9606 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9607 NULL_RTX, 0, NULL_RTX, if_false_label);
9608 }
9609
9610 if (if_false_label)
9611 emit_jump (if_false_label);
9612 if (drop_through_label)
9613 emit_label (drop_through_label);
9614 }
9615
9616 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9617 with one insn, test the comparison and jump to the appropriate label. */
9618
9619 static void
9620 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9621 tree exp;
9622 rtx if_false_label, if_true_label;
9623 {
9624 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9625 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9626 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9627 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9628 int i;
9629 rtx drop_through_label = 0;
9630
9631 if (! if_false_label)
9632 drop_through_label = if_false_label = gen_label_rtx ();
9633
9634 for (i = 0; i < nwords; i++)
9635 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9636 operand_subword_force (op1, i, mode),
9637 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9638 word_mode, NULL_RTX, 0, if_false_label,
9639 NULL_RTX);
9640
9641 if (if_true_label)
9642 emit_jump (if_true_label);
9643 if (drop_through_label)
9644 emit_label (drop_through_label);
9645 }
9646 \f
9647 /* Jump according to whether OP0 is 0.
9648 We assume that OP0 has an integer mode that is too wide
9649 for the available compare insns. */
9650
9651 void
9652 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9653 rtx op0;
9654 rtx if_false_label, if_true_label;
9655 {
9656 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9657 rtx part;
9658 int i;
9659 rtx drop_through_label = 0;
9660
9661 /* The fastest way of doing this comparison on almost any machine is to
9662 "or" all the words and compare the result. If all have to be loaded
9663 from memory and this is a very wide item, it's possible this may
9664 be slower, but that's highly unlikely. */
9665
9666 part = gen_reg_rtx (word_mode);
9667 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9668 for (i = 1; i < nwords && part != 0; i++)
9669 part = expand_binop (word_mode, ior_optab, part,
9670 operand_subword_force (op0, i, GET_MODE (op0)),
9671 part, 1, OPTAB_WIDEN);
9672
9673 if (part != 0)
9674 {
9675 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9676 NULL_RTX, 0, if_false_label, if_true_label);
9677
9678 return;
9679 }
9680
9681 /* If we couldn't do the "or" simply, do this with a series of compares. */
9682 if (! if_false_label)
9683 drop_through_label = if_false_label = gen_label_rtx ();
9684
9685 for (i = 0; i < nwords; i++)
9686 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9687 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9688 if_false_label, NULL_RTX);
9689
9690 if (if_true_label)
9691 emit_jump (if_true_label);
9692
9693 if (drop_through_label)
9694 emit_label (drop_through_label);
9695 }
9696 \f
9697 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9698 (including code to compute the values to be compared)
9699 and set (CC0) according to the result.
9700 The decision as to signed or unsigned comparison must be made by the caller.
9701
9702 We force a stack adjustment unless there are currently
9703 things pushed on the stack that aren't yet used.
9704
9705 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9706 compared.
9707
9708 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9709 size of MODE should be used. */
9710
9711 rtx
9712 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9713 register rtx op0, op1;
9714 enum rtx_code code;
9715 int unsignedp;
9716 enum machine_mode mode;
9717 rtx size;
9718 int align;
9719 {
9720 rtx tem;
9721
9722 /* If one operand is constant, make it the second one. Only do this
9723 if the other operand is not constant as well. */
9724
9725 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9726 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9727 {
9728 tem = op0;
9729 op0 = op1;
9730 op1 = tem;
9731 code = swap_condition (code);
9732 }
9733
9734 if (flag_force_mem)
9735 {
9736 op0 = force_not_mem (op0);
9737 op1 = force_not_mem (op1);
9738 }
9739
9740 do_pending_stack_adjust ();
9741
9742 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9743 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9744 return tem;
9745
9746 #if 0
9747 /* There's no need to do this now that combine.c can eliminate lots of
9748 sign extensions. This can be less efficient in certain cases on other
9749 machines. */
9750
9751 /* If this is a signed equality comparison, we can do it as an
9752 unsigned comparison since zero-extension is cheaper than sign
9753 extension and comparisons with zero are done as unsigned. This is
9754 the case even on machines that can do fast sign extension, since
9755 zero-extension is easier to combine with other operations than
9756 sign-extension is. If we are comparing against a constant, we must
9757 convert it to what it would look like unsigned. */
9758 if ((code == EQ || code == NE) && ! unsignedp
9759 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9760 {
9761 if (GET_CODE (op1) == CONST_INT
9762 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9763 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9764 unsignedp = 1;
9765 }
9766 #endif
9767
9768 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9769
9770 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9771 }
9772
9773 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9774 The decision as to signed or unsigned comparison must be made by the caller.
9775
9776 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9777 compared.
9778
9779 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9780 size of MODE should be used. */
9781
9782 void
9783 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9784 if_false_label, if_true_label)
9785 register rtx op0, op1;
9786 enum rtx_code code;
9787 int unsignedp;
9788 enum machine_mode mode;
9789 rtx size;
9790 int align;
9791 rtx if_false_label, if_true_label;
9792 {
9793 rtx tem;
9794 int dummy_true_label = 0;
9795
9796 /* Reverse the comparison if that is safe and we want to jump if it is
9797 false. */
9798 if (! if_true_label && ! FLOAT_MODE_P (mode))
9799 {
9800 if_true_label = if_false_label;
9801 if_false_label = 0;
9802 code = reverse_condition (code);
9803 }
9804
9805 /* If one operand is constant, make it the second one. Only do this
9806 if the other operand is not constant as well. */
9807
9808 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9809 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9810 {
9811 tem = op0;
9812 op0 = op1;
9813 op1 = tem;
9814 code = swap_condition (code);
9815 }
9816
9817 if (flag_force_mem)
9818 {
9819 op0 = force_not_mem (op0);
9820 op1 = force_not_mem (op1);
9821 }
9822
9823 do_pending_stack_adjust ();
9824
9825 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9826 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9827 {
9828 if (tem == const_true_rtx)
9829 {
9830 if (if_true_label)
9831 emit_jump (if_true_label);
9832 }
9833 else
9834 {
9835 if (if_false_label)
9836 emit_jump (if_false_label);
9837 }
9838 return;
9839 }
9840
9841 #if 0
9842 /* There's no need to do this now that combine.c can eliminate lots of
9843 sign extensions. This can be less efficient in certain cases on other
9844 machines. */
9845
9846 /* If this is a signed equality comparison, we can do it as an
9847 unsigned comparison since zero-extension is cheaper than sign
9848 extension and comparisons with zero are done as unsigned. This is
9849 the case even on machines that can do fast sign extension, since
9850 zero-extension is easier to combine with other operations than
9851 sign-extension is. If we are comparing against a constant, we must
9852 convert it to what it would look like unsigned. */
9853 if ((code == EQ || code == NE) && ! unsignedp
9854 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9855 {
9856 if (GET_CODE (op1) == CONST_INT
9857 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9858 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9859 unsignedp = 1;
9860 }
9861 #endif
9862
9863 if (! if_true_label)
9864 {
9865 dummy_true_label = 1;
9866 if_true_label = gen_label_rtx ();
9867 }
9868
9869 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9870 if_true_label);
9871
9872 if (if_false_label)
9873 emit_jump (if_false_label);
9874 if (dummy_true_label)
9875 emit_label (if_true_label);
9876 }
9877
9878 /* Generate code for a comparison expression EXP (including code to compute
9879 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9880 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9881 generated code will drop through.
9882 SIGNED_CODE should be the rtx operation for this comparison for
9883 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9884
9885 We force a stack adjustment unless there are currently
9886 things pushed on the stack that aren't yet used. */
9887
9888 static void
9889 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9890 if_true_label)
9891 register tree exp;
9892 enum rtx_code signed_code, unsigned_code;
9893 rtx if_false_label, if_true_label;
9894 {
9895 int align0, align1;
9896 register rtx op0, op1;
9897 register tree type;
9898 register enum machine_mode mode;
9899 int unsignedp;
9900 enum rtx_code code;
9901
9902 /* Don't crash if the comparison was erroneous. */
9903 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9905 return;
9906
9907 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9908 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9909 mode = TYPE_MODE (type);
9910 unsignedp = TREE_UNSIGNED (type);
9911 code = unsignedp ? unsigned_code : signed_code;
9912
9913 #ifdef HAVE_canonicalize_funcptr_for_compare
9914 /* If function pointers need to be "canonicalized" before they can
9915 be reliably compared, then canonicalize them. */
9916 if (HAVE_canonicalize_funcptr_for_compare
9917 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9918 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9919 == FUNCTION_TYPE))
9920 {
9921 rtx new_op0 = gen_reg_rtx (mode);
9922
9923 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9924 op0 = new_op0;
9925 }
9926
9927 if (HAVE_canonicalize_funcptr_for_compare
9928 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9929 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9930 == FUNCTION_TYPE))
9931 {
9932 rtx new_op1 = gen_reg_rtx (mode);
9933
9934 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9935 op1 = new_op1;
9936 }
9937 #endif
9938
9939 /* Do any postincrements in the expression that was tested. */
9940 emit_queue ();
9941
9942 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9943 ((mode == BLKmode)
9944 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9945 MIN (align0, align1) / BITS_PER_UNIT,
9946 if_false_label, if_true_label);
9947 }
9948 \f
9949 /* Generate code to calculate EXP using a store-flag instruction
9950 and return an rtx for the result. EXP is either a comparison
9951 or a TRUTH_NOT_EXPR whose operand is a comparison.
9952
9953 If TARGET is nonzero, store the result there if convenient.
9954
9955 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9956 cheap.
9957
9958 Return zero if there is no suitable set-flag instruction
9959 available on this machine.
9960
9961 Once expand_expr has been called on the arguments of the comparison,
9962 we are committed to doing the store flag, since it is not safe to
9963 re-evaluate the expression. We emit the store-flag insn by calling
9964 emit_store_flag, but only expand the arguments if we have a reason
9965 to believe that emit_store_flag will be successful. If we think that
9966 it will, but it isn't, we have to simulate the store-flag with a
9967 set/jump/set sequence. */
9968
9969 static rtx
9970 do_store_flag (exp, target, mode, only_cheap)
9971 tree exp;
9972 rtx target;
9973 enum machine_mode mode;
9974 int only_cheap;
9975 {
9976 enum rtx_code code;
9977 tree arg0, arg1, type;
9978 tree tem;
9979 enum machine_mode operand_mode;
9980 int invert = 0;
9981 int unsignedp;
9982 rtx op0, op1;
9983 enum insn_code icode;
9984 rtx subtarget = target;
9985 rtx result, label;
9986
9987 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9988 result at the end. We can't simply invert the test since it would
9989 have already been inverted if it were valid. This case occurs for
9990 some floating-point comparisons. */
9991
9992 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9993 invert = 1, exp = TREE_OPERAND (exp, 0);
9994
9995 arg0 = TREE_OPERAND (exp, 0);
9996 arg1 = TREE_OPERAND (exp, 1);
9997 type = TREE_TYPE (arg0);
9998 operand_mode = TYPE_MODE (type);
9999 unsignedp = TREE_UNSIGNED (type);
10000
10001 /* We won't bother with BLKmode store-flag operations because it would mean
10002 passing a lot of information to emit_store_flag. */
10003 if (operand_mode == BLKmode)
10004 return 0;
10005
10006 /* We won't bother with store-flag operations involving function pointers
10007 when function pointers must be canonicalized before comparisons. */
10008 #ifdef HAVE_canonicalize_funcptr_for_compare
10009 if (HAVE_canonicalize_funcptr_for_compare
10010 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10011 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10012 == FUNCTION_TYPE))
10013 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10014 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10015 == FUNCTION_TYPE))))
10016 return 0;
10017 #endif
10018
10019 STRIP_NOPS (arg0);
10020 STRIP_NOPS (arg1);
10021
10022 /* Get the rtx comparison code to use. We know that EXP is a comparison
10023 operation of some type. Some comparisons against 1 and -1 can be
10024 converted to comparisons with zero. Do so here so that the tests
10025 below will be aware that we have a comparison with zero. These
10026 tests will not catch constants in the first operand, but constants
10027 are rarely passed as the first operand. */
10028
10029 switch (TREE_CODE (exp))
10030 {
10031 case EQ_EXPR:
10032 code = EQ;
10033 break;
10034 case NE_EXPR:
10035 code = NE;
10036 break;
10037 case LT_EXPR:
10038 if (integer_onep (arg1))
10039 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10040 else
10041 code = unsignedp ? LTU : LT;
10042 break;
10043 case LE_EXPR:
10044 if (! unsignedp && integer_all_onesp (arg1))
10045 arg1 = integer_zero_node, code = LT;
10046 else
10047 code = unsignedp ? LEU : LE;
10048 break;
10049 case GT_EXPR:
10050 if (! unsignedp && integer_all_onesp (arg1))
10051 arg1 = integer_zero_node, code = GE;
10052 else
10053 code = unsignedp ? GTU : GT;
10054 break;
10055 case GE_EXPR:
10056 if (integer_onep (arg1))
10057 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10058 else
10059 code = unsignedp ? GEU : GE;
10060 break;
10061 default:
10062 abort ();
10063 }
10064
10065 /* Put a constant second. */
10066 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10067 {
10068 tem = arg0; arg0 = arg1; arg1 = tem;
10069 code = swap_condition (code);
10070 }
10071
10072 /* If this is an equality or inequality test of a single bit, we can
10073 do this by shifting the bit being tested to the low-order bit and
10074 masking the result with the constant 1. If the condition was EQ,
10075 we xor it with 1. This does not require an scc insn and is faster
10076 than an scc insn even if we have it. */
10077
10078 if ((code == NE || code == EQ)
10079 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10080 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10081 {
10082 tree inner = TREE_OPERAND (arg0, 0);
10083 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10084 int ops_unsignedp;
10085
10086 /* If INNER is a right shift of a constant and it plus BITNUM does
10087 not overflow, adjust BITNUM and INNER. */
10088
10089 if (TREE_CODE (inner) == RSHIFT_EXPR
10090 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10091 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10092 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10093 < TYPE_PRECISION (type)))
10094 {
10095 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10096 inner = TREE_OPERAND (inner, 0);
10097 }
10098
10099 /* If we are going to be able to omit the AND below, we must do our
10100 operations as unsigned. If we must use the AND, we have a choice.
10101 Normally unsigned is faster, but for some machines signed is. */
10102 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10103 #ifdef LOAD_EXTEND_OP
10104 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10105 #else
10106 : 1
10107 #endif
10108 );
10109
10110 if (subtarget == 0 || GET_CODE (subtarget) != REG
10111 || GET_MODE (subtarget) != operand_mode
10112 || ! safe_from_p (subtarget, inner, 1))
10113 subtarget = 0;
10114
10115 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10116
10117 if (bitnum != 0)
10118 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10119 size_int (bitnum), subtarget, ops_unsignedp);
10120
10121 if (GET_MODE (op0) != mode)
10122 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10123
10124 if ((code == EQ && ! invert) || (code == NE && invert))
10125 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10126 ops_unsignedp, OPTAB_LIB_WIDEN);
10127
10128 /* Put the AND last so it can combine with more things. */
10129 if (bitnum != TYPE_PRECISION (type) - 1)
10130 op0 = expand_and (op0, const1_rtx, subtarget);
10131
10132 return op0;
10133 }
10134
10135 /* Now see if we are likely to be able to do this. Return if not. */
10136 if (! can_compare_p (operand_mode, ccp_store_flag))
10137 return 0;
10138 icode = setcc_gen_code[(int) code];
10139 if (icode == CODE_FOR_nothing
10140 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10141 {
10142 /* We can only do this if it is one of the special cases that
10143 can be handled without an scc insn. */
10144 if ((code == LT && integer_zerop (arg1))
10145 || (! only_cheap && code == GE && integer_zerop (arg1)))
10146 ;
10147 else if (BRANCH_COST >= 0
10148 && ! only_cheap && (code == NE || code == EQ)
10149 && TREE_CODE (type) != REAL_TYPE
10150 && ((abs_optab->handlers[(int) operand_mode].insn_code
10151 != CODE_FOR_nothing)
10152 || (ffs_optab->handlers[(int) operand_mode].insn_code
10153 != CODE_FOR_nothing)))
10154 ;
10155 else
10156 return 0;
10157 }
10158
10159 preexpand_calls (exp);
10160 if (subtarget == 0 || GET_CODE (subtarget) != REG
10161 || GET_MODE (subtarget) != operand_mode
10162 || ! safe_from_p (subtarget, arg1, 1))
10163 subtarget = 0;
10164
10165 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10166 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10167
10168 if (target == 0)
10169 target = gen_reg_rtx (mode);
10170
10171 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10172 because, if the emit_store_flag does anything it will succeed and
10173 OP0 and OP1 will not be used subsequently. */
10174
10175 result = emit_store_flag (target, code,
10176 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10177 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10178 operand_mode, unsignedp, 1);
10179
10180 if (result)
10181 {
10182 if (invert)
10183 result = expand_binop (mode, xor_optab, result, const1_rtx,
10184 result, 0, OPTAB_LIB_WIDEN);
10185 return result;
10186 }
10187
10188 /* If this failed, we have to do this with set/compare/jump/set code. */
10189 if (GET_CODE (target) != REG
10190 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10191 target = gen_reg_rtx (GET_MODE (target));
10192
10193 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10194 result = compare_from_rtx (op0, op1, code, unsignedp,
10195 operand_mode, NULL_RTX, 0);
10196 if (GET_CODE (result) == CONST_INT)
10197 return (((result == const0_rtx && ! invert)
10198 || (result != const0_rtx && invert))
10199 ? const0_rtx : const1_rtx);
10200
10201 label = gen_label_rtx ();
10202 if (bcc_gen_fctn[(int) code] == 0)
10203 abort ();
10204
10205 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10206 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10207 emit_label (label);
10208
10209 return target;
10210 }
10211 \f
10212 /* Generate a tablejump instruction (used for switch statements). */
10213
10214 #ifdef HAVE_tablejump
10215
10216 /* INDEX is the value being switched on, with the lowest value
10217 in the table already subtracted.
10218 MODE is its expected mode (needed if INDEX is constant).
10219 RANGE is the length of the jump table.
10220 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10221
10222 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10223 index value is out of range. */
10224
10225 void
10226 do_tablejump (index, mode, range, table_label, default_label)
10227 rtx index, range, table_label, default_label;
10228 enum machine_mode mode;
10229 {
10230 register rtx temp, vector;
10231
10232 /* Do an unsigned comparison (in the proper mode) between the index
10233 expression and the value which represents the length of the range.
10234 Since we just finished subtracting the lower bound of the range
10235 from the index expression, this comparison allows us to simultaneously
10236 check that the original index expression value is both greater than
10237 or equal to the minimum value of the range and less than or equal to
10238 the maximum value of the range. */
10239
10240 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10241 0, default_label);
10242
10243 /* If index is in range, it must fit in Pmode.
10244 Convert to Pmode so we can index with it. */
10245 if (mode != Pmode)
10246 index = convert_to_mode (Pmode, index, 1);
10247
10248 /* Don't let a MEM slip thru, because then INDEX that comes
10249 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10250 and break_out_memory_refs will go to work on it and mess it up. */
10251 #ifdef PIC_CASE_VECTOR_ADDRESS
10252 if (flag_pic && GET_CODE (index) != REG)
10253 index = copy_to_mode_reg (Pmode, index);
10254 #endif
10255
10256 /* If flag_force_addr were to affect this address
10257 it could interfere with the tricky assumptions made
10258 about addresses that contain label-refs,
10259 which may be valid only very near the tablejump itself. */
10260 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10261 GET_MODE_SIZE, because this indicates how large insns are. The other
10262 uses should all be Pmode, because they are addresses. This code
10263 could fail if addresses and insns are not the same size. */
10264 index = gen_rtx_PLUS (Pmode,
10265 gen_rtx_MULT (Pmode, index,
10266 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10267 gen_rtx_LABEL_REF (Pmode, table_label));
10268 #ifdef PIC_CASE_VECTOR_ADDRESS
10269 if (flag_pic)
10270 index = PIC_CASE_VECTOR_ADDRESS (index);
10271 else
10272 #endif
10273 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10274 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10275 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10276 RTX_UNCHANGING_P (vector) = 1;
10277 convert_move (temp, vector, 0);
10278
10279 emit_jump_insn (gen_tablejump (temp, table_label));
10280
10281 /* If we are generating PIC code or if the table is PC-relative, the
10282 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10283 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10284 emit_barrier ();
10285 }
10286
10287 #endif /* HAVE_tablejump */