expr.c (store_field): Ensure ALIGN is no stricter than the alignment of EXP.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Don't check memory usage, since code is being emitted to check a memory
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
89 static int in_check_memory_usage;
90
91 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
92 static tree placeholder_list = 0;
93
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces
97 {
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 int to_struct;
103 int to_readonly;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 int from_struct;
109 int from_readonly;
110 int len;
111 int offset;
112 int reverse;
113 };
114
115 /* This structure is used by clear_by_pieces to describe the clear to
116 be performed. */
117
118 struct clear_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 int len;
126 int offset;
127 int reverse;
128 };
129
130 extern struct obstack permanent_obstack;
131
132 static rtx get_push_address PARAMS ((int));
133
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static int move_by_pieces_ninsns PARAMS ((unsigned int, int));
136 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *));
138 static void clear_by_pieces PARAMS ((rtx, int, int));
139 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
140 enum machine_mode,
141 struct clear_by_pieces *));
142 static int is_zeros_p PARAMS ((tree));
143 static int mostly_zeros_p PARAMS ((tree));
144 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
145 tree, tree, int, int));
146 static void store_constructor PARAMS ((tree, rtx, int, int, int));
147 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
148 tree, enum machine_mode, int, int,
149 int, int));
150 static enum memory_use_mode
151 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
152 static tree save_noncopied_parts PARAMS ((tree, tree));
153 static tree init_noncopied_parts PARAMS ((tree, tree));
154 static int safe_from_p PARAMS ((rtx, tree, int));
155 static int fixed_type_p PARAMS ((tree));
156 static rtx var_rtx PARAMS ((tree));
157 static int readonly_fields_p PARAMS ((tree));
158 static rtx expand_expr_unaligned PARAMS ((tree, int *));
159 static rtx expand_increment PARAMS ((tree, int, int));
160 static void preexpand_calls PARAMS ((tree));
161 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
162 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
163 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
164 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
165
166 /* Record for each mode whether we can move a register directly to or
167 from an object of that mode in memory. If we can't, we won't try
168 to use that mode directly when accessing a field of that mode. */
169
170 static char direct_load[NUM_MACHINE_MODES];
171 static char direct_store[NUM_MACHINE_MODES];
172
173 /* If a memory-to-memory move would take MOVE_RATIO or more simple
174 move-instruction sequences, we will do a movstr or libcall instead. */
175
176 #ifndef MOVE_RATIO
177 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
178 #define MOVE_RATIO 2
179 #else
180 /* If we are optimizing for space (-Os), cut down the default move ratio */
181 #define MOVE_RATIO (optimize_size ? 3 : 15)
182 #endif
183 #endif
184
185 /* This macro is used to determine whether move_by_pieces should be called
186 to perform a structure copy. */
187 #ifndef MOVE_BY_PIECES_P
188 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
189 (SIZE, ALIGN) < MOVE_RATIO)
190 #endif
191
192 /* This array records the insn_code of insns to perform block moves. */
193 enum insn_code movstr_optab[NUM_MACHINE_MODES];
194
195 /* This array records the insn_code of insns to perform block clears. */
196 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
197
198 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
199
200 #ifndef SLOW_UNALIGNED_ACCESS
201 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
202 #endif
203 \f
204 /* This is run once per compilation to set up which modes can be used
205 directly in memory and to initialize the block move optab. */
206
207 void
208 init_expr_once ()
209 {
210 rtx insn, pat;
211 enum machine_mode mode;
212 int num_clobbers;
213 rtx mem, mem1;
214 char *free_point;
215
216 start_sequence ();
217
218 /* Since we are on the permanent obstack, we must be sure we save this
219 spot AFTER we call start_sequence, since it will reuse the rtl it
220 makes. */
221 free_point = (char *) oballoc (0);
222
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
228
229 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
230 pat = PATTERN (insn);
231
232 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
233 mode = (enum machine_mode) ((int) mode + 1))
234 {
235 int regno;
236 rtx reg;
237
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
240 PUT_MODE (mem1, mode);
241
242 /* See if there is some register that can be used in this mode and
243 directly loaded or stored from memory. */
244
245 if (mode != VOIDmode && mode != BLKmode)
246 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
247 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
248 regno++)
249 {
250 if (! HARD_REGNO_MODE_OK (regno, mode))
251 continue;
252
253 reg = gen_rtx_REG (mode, regno);
254
255 SET_SRC (pat) = mem;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
259
260 SET_SRC (pat) = mem1;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem1;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
274 }
275 }
276
277 end_sequence ();
278 obfree (free_point);
279 }
280
281 /* This is run at the start of compiling a function. */
282
283 void
284 init_expr ()
285 {
286 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
287
288 pending_chain = 0;
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
291 saveregs_value = 0;
292 apply_args_value = 0;
293 forced_labels = 0;
294 }
295
296 void
297 mark_expr_status (p)
298 struct expr_status *p;
299 {
300 if (p == NULL)
301 return;
302
303 ggc_mark_rtx (p->x_saveregs_value);
304 ggc_mark_rtx (p->x_apply_args_value);
305 ggc_mark_rtx (p->x_forced_labels);
306 }
307
308 void
309 free_expr_status (f)
310 struct function *f;
311 {
312 free (f->expr);
313 f->expr = NULL;
314 }
315
316 /* Small sanity check that the queue is empty at the end of a function. */
317 void
318 finish_expr_for_function ()
319 {
320 if (pending_chain)
321 abort ();
322 }
323 \f
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334 static rtx
335 enqueue_insn (var, body)
336 rtx var, body;
337 {
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
341 }
342
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
349
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
353
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
357
358 rtx
359 protect_from_queue (x, modify)
360 register rtx x;
361 int modify;
362 {
363 register RTX_CODE code = GET_CODE (x);
364
365 #if 0 /* A QUEUED can hang around after the queue is forced out. */
366 /* Shortcut for most common case. */
367 if (pending_chain == 0)
368 return x;
369 #endif
370
371 if (code != QUEUED)
372 {
373 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
374 use of autoincrement. Make a copy of the contents of the memory
375 location rather than a copy of the address, but not if the value is
376 of mode BLKmode. Don't modify X in place since it might be
377 shared. */
378 if (code == MEM && GET_MODE (x) != BLKmode
379 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 {
381 register rtx y = XEXP (x, 0);
382 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
383
384 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
385 MEM_COPY_ATTRIBUTES (new, x);
386 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
387
388 if (QUEUED_INSN (y))
389 {
390 register rtx temp = gen_reg_rtx (GET_MODE (new));
391 emit_insn_before (gen_move_insn (temp, new),
392 QUEUED_INSN (y));
393 return temp;
394 }
395 return new;
396 }
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
408 else if (code == PLUS || code == MULT)
409 {
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. */
422 if (QUEUED_INSN (x) == 0)
423 return QUEUED_VAR (x);
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434 }
435
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
441 int
442 queued_subexp_p (x)
443 rtx x;
444 {
445 register enum rtx_code code = GET_CODE (x);
446 switch (code)
447 {
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
459 }
460 }
461
462 /* Perform all the pending incrementations. */
463
464 void
465 emit_queue ()
466 {
467 register rtx p;
468 while ((p = pending_chain))
469 {
470 rtx body = QUEUED_BODY (p);
471
472 if (GET_CODE (body) == SEQUENCE)
473 {
474 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
475 emit_insn (QUEUED_BODY (p));
476 }
477 else
478 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
479 pending_chain = QUEUED_NEXT (p);
480 }
481 }
482 \f
483 /* Copy data from FROM to TO, where the machine modes are not the same.
484 Both modes may be integer, or both may be floating.
485 UNSIGNEDP should be nonzero if FROM is an unsigned type.
486 This causes zero-extension instead of sign-extension. */
487
488 void
489 convert_move (to, from, unsignedp)
490 register rtx to, from;
491 int unsignedp;
492 {
493 enum machine_mode to_mode = GET_MODE (to);
494 enum machine_mode from_mode = GET_MODE (from);
495 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
496 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
497 enum insn_code code;
498 rtx libcall;
499
500 /* rtx code for making an equivalent value. */
501 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
502
503 to = protect_from_queue (to, 1);
504 from = protect_from_queue (from, 0);
505
506 if (to_real != from_real)
507 abort ();
508
509 /* If FROM is a SUBREG that indicates that we have already done at least
510 the required extension, strip it. We don't handle such SUBREGs as
511 TO here. */
512
513 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
515 >= GET_MODE_SIZE (to_mode))
516 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
517 from = gen_lowpart (to_mode, from), from_mode = to_mode;
518
519 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
520 abort ();
521
522 if (to_mode == from_mode
523 || (from_mode == VOIDmode && CONSTANT_P (from)))
524 {
525 emit_move_insn (to, from);
526 return;
527 }
528
529 if (to_real)
530 {
531 rtx value;
532
533 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
534 {
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, 0))
537 != CODE_FOR_nothing)
538 {
539 emit_unop_insn (code, to, from, UNKNOWN);
540 return;
541 }
542 }
543
544 #ifdef HAVE_trunchfqf2
545 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
546 {
547 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
548 return;
549 }
550 #endif
551 #ifdef HAVE_trunctqfqf2
552 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
555 return;
556 }
557 #endif
558 #ifdef HAVE_truncsfqf2
559 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
562 return;
563 }
564 #endif
565 #ifdef HAVE_truncdfqf2
566 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncxfqf2
573 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_trunctfqf2
580 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586
587 #ifdef HAVE_trunctqfhf2
588 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncsfhf2
595 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncdfhf2
602 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncxfhf2
609 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_trunctfhf2
616 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622
623 #ifdef HAVE_truncsftqf2
624 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncdftqf2
631 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncxftqf2
638 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_trunctftqf2
645 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651
652 #ifdef HAVE_truncdfsf2
653 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_truncxfsf2
660 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_trunctfsf2
667 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncxfdf2
674 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_trunctfdf2
681 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687
688 libcall = (rtx) 0;
689 switch (from_mode)
690 {
691 case SFmode:
692 switch (to_mode)
693 {
694 case DFmode:
695 libcall = extendsfdf2_libfunc;
696 break;
697
698 case XFmode:
699 libcall = extendsfxf2_libfunc;
700 break;
701
702 case TFmode:
703 libcall = extendsftf2_libfunc;
704 break;
705
706 default:
707 break;
708 }
709 break;
710
711 case DFmode:
712 switch (to_mode)
713 {
714 case SFmode:
715 libcall = truncdfsf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extenddfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extenddftf2_libfunc;
724 break;
725
726 default:
727 break;
728 }
729 break;
730
731 case XFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncxfsf2_libfunc;
736 break;
737
738 case DFmode:
739 libcall = truncxfdf2_libfunc;
740 break;
741
742 default:
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757
758 default:
759 break;
760 }
761 break;
762
763 default:
764 break;
765 }
766
767 if (libcall == (rtx) 0)
768 /* This conversion is not implemented yet. */
769 abort ();
770
771 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
772 1, from, from_mode);
773 emit_move_insn (to, value);
774 return;
775 }
776
777 /* Now both modes are integers. */
778
779 /* Handle expanding beyond a word. */
780 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
781 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
782 {
783 rtx insns;
784 rtx lowpart;
785 rtx fill_value;
786 rtx lowfrom;
787 int i;
788 enum machine_mode lowpart_mode;
789 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
790
791 /* Try converting directly if the insn is supported. */
792 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
793 != CODE_FOR_nothing)
794 {
795 /* If FROM is a SUBREG, put it into a register. Do this
796 so that we always generate the same set of insns for
797 better cse'ing; if an intermediate assignment occurred,
798 we won't be doing the operation directly on the SUBREG. */
799 if (optimize > 0 && GET_CODE (from) == SUBREG)
800 from = force_reg (from_mode, from);
801 emit_unop_insn (code, to, from, equiv_code);
802 return;
803 }
804 /* Next, try converting via full word. */
805 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
806 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
807 != CODE_FOR_nothing))
808 {
809 if (GET_CODE (to) == REG)
810 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
811 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
812 emit_unop_insn (code, to,
813 gen_lowpart (word_mode, to), equiv_code);
814 return;
815 }
816
817 /* No special multiword conversion insn; do it by hand. */
818 start_sequence ();
819
820 /* Since we will turn this into a no conflict block, we must ensure
821 that the source does not overlap the target. */
822
823 if (reg_overlap_mentioned_p (to, from))
824 from = force_reg (from_mode, from);
825
826 /* Get a copy of FROM widened to a word, if necessary. */
827 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
828 lowpart_mode = word_mode;
829 else
830 lowpart_mode = from_mode;
831
832 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
833
834 lowpart = gen_lowpart (lowpart_mode, to);
835 emit_move_insn (lowpart, lowfrom);
836
837 /* Compute the value to put in each remaining word. */
838 if (unsignedp)
839 fill_value = const0_rtx;
840 else
841 {
842 #ifdef HAVE_slt
843 if (HAVE_slt
844 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
845 && STORE_FLAG_VALUE == -1)
846 {
847 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
848 lowpart_mode, 0, 0);
849 fill_value = gen_reg_rtx (word_mode);
850 emit_insn (gen_slt (fill_value));
851 }
852 else
853 #endif
854 {
855 fill_value
856 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
857 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
858 NULL_RTX, 0);
859 fill_value = convert_to_mode (word_mode, fill_value, 1);
860 }
861 }
862
863 /* Fill the remaining words. */
864 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
865 {
866 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
867 rtx subword = operand_subword (to, index, 1, to_mode);
868
869 if (subword == 0)
870 abort ();
871
872 if (fill_value != subword)
873 emit_move_insn (subword, fill_value);
874 }
875
876 insns = get_insns ();
877 end_sequence ();
878
879 emit_no_conflict_block (insns, to, from, NULL_RTX,
880 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
881 return;
882 }
883
884 /* Truncating multi-word to a word or less. */
885 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
886 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
887 {
888 if (!((GET_CODE (from) == MEM
889 && ! MEM_VOLATILE_P (from)
890 && direct_load[(int) to_mode]
891 && ! mode_dependent_address_p (XEXP (from, 0)))
892 || GET_CODE (from) == REG
893 || GET_CODE (from) == SUBREG))
894 from = force_reg (from_mode, from);
895 convert_move (to, gen_lowpart (word_mode, from), 0);
896 return;
897 }
898
899 /* Handle pointer conversion */ /* SPEE 900220 */
900 if (to_mode == PQImode)
901 {
902 if (from_mode != QImode)
903 from = convert_to_mode (QImode, from, unsignedp);
904
905 #ifdef HAVE_truncqipqi2
906 if (HAVE_truncqipqi2)
907 {
908 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
909 return;
910 }
911 #endif /* HAVE_truncqipqi2 */
912 abort ();
913 }
914
915 if (from_mode == PQImode)
916 {
917 if (to_mode != QImode)
918 {
919 from = convert_to_mode (QImode, from, unsignedp);
920 from_mode = QImode;
921 }
922 else
923 {
924 #ifdef HAVE_extendpqiqi2
925 if (HAVE_extendpqiqi2)
926 {
927 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
928 return;
929 }
930 #endif /* HAVE_extendpqiqi2 */
931 abort ();
932 }
933 }
934
935 if (to_mode == PSImode)
936 {
937 if (from_mode != SImode)
938 from = convert_to_mode (SImode, from, unsignedp);
939
940 #ifdef HAVE_truncsipsi2
941 if (HAVE_truncsipsi2)
942 {
943 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncsipsi2 */
947 abort ();
948 }
949
950 if (from_mode == PSImode)
951 {
952 if (to_mode != SImode)
953 {
954 from = convert_to_mode (SImode, from, unsignedp);
955 from_mode = SImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpsisi2
960 if (HAVE_extendpsisi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpsisi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PDImode)
971 {
972 if (from_mode != DImode)
973 from = convert_to_mode (DImode, from, unsignedp);
974
975 #ifdef HAVE_truncdipdi2
976 if (HAVE_truncdipdi2)
977 {
978 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncdipdi2 */
982 abort ();
983 }
984
985 if (from_mode == PDImode)
986 {
987 if (to_mode != DImode)
988 {
989 from = convert_to_mode (DImode, from, unsignedp);
990 from_mode = DImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpdidi2
995 if (HAVE_extendpdidi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpdidi2 */
1001 abort ();
1002 }
1003 }
1004
1005 /* Now follow all the conversions between integers
1006 no more than a word long. */
1007
1008 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1009 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1010 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1011 GET_MODE_BITSIZE (from_mode)))
1012 {
1013 if (!((GET_CODE (from) == MEM
1014 && ! MEM_VOLATILE_P (from)
1015 && direct_load[(int) to_mode]
1016 && ! mode_dependent_address_p (XEXP (from, 0)))
1017 || GET_CODE (from) == REG
1018 || GET_CODE (from) == SUBREG))
1019 from = force_reg (from_mode, from);
1020 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1021 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1022 from = copy_to_reg (from);
1023 emit_move_insn (to, gen_lowpart (to_mode, from));
1024 return;
1025 }
1026
1027 /* Handle extension. */
1028 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1029 {
1030 /* Convert directly if that works. */
1031 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1032 != CODE_FOR_nothing)
1033 {
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
1040 rtx tmp;
1041 tree shift_amount;
1042
1043 /* Search for a mode to convert via. */
1044 for (intermediate = from_mode; intermediate != VOIDmode;
1045 intermediate = GET_MODE_WIDER_MODE (intermediate))
1046 if (((can_extend_p (to_mode, intermediate, unsignedp)
1047 != CODE_FOR_nothing)
1048 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (intermediate))))
1051 && (can_extend_p (intermediate, from_mode, unsignedp)
1052 != CODE_FOR_nothing))
1053 {
1054 convert_move (to, convert_to_mode (intermediate, from,
1055 unsignedp), unsignedp);
1056 return;
1057 }
1058
1059 /* No suitable intermediate mode.
1060 Generate what we need with shifts. */
1061 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1062 - GET_MODE_BITSIZE (from_mode), 0);
1063 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1064 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1065 to, unsignedp);
1066 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1067 to, unsignedp);
1068 if (tmp != to)
1069 emit_move_insn (to, tmp);
1070 return;
1071 }
1072 }
1073
1074 /* Support special truncate insns for certain modes. */
1075
1076 if (from_mode == DImode && to_mode == SImode)
1077 {
1078 #ifdef HAVE_truncdisi2
1079 if (HAVE_truncdisi2)
1080 {
1081 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1082 return;
1083 }
1084 #endif
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1086 return;
1087 }
1088
1089 if (from_mode == DImode && to_mode == HImode)
1090 {
1091 #ifdef HAVE_truncdihi2
1092 if (HAVE_truncdihi2)
1093 {
1094 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1095 return;
1096 }
1097 #endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1100 }
1101
1102 if (from_mode == DImode && to_mode == QImode)
1103 {
1104 #ifdef HAVE_truncdiqi2
1105 if (HAVE_truncdiqi2)
1106 {
1107 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1108 return;
1109 }
1110 #endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1113 }
1114
1115 if (from_mode == SImode && to_mode == HImode)
1116 {
1117 #ifdef HAVE_truncsihi2
1118 if (HAVE_truncsihi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1121 return;
1122 }
1123 #endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == SImode && to_mode == QImode)
1129 {
1130 #ifdef HAVE_truncsiqi2
1131 if (HAVE_truncsiqi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1134 return;
1135 }
1136 #endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == HImode && to_mode == QImode)
1142 {
1143 #ifdef HAVE_trunchiqi2
1144 if (HAVE_trunchiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149 #endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == TImode && to_mode == DImode)
1155 {
1156 #ifdef HAVE_trunctidi2
1157 if (HAVE_trunctidi2)
1158 {
1159 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1160 return;
1161 }
1162 #endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == TImode && to_mode == SImode)
1168 {
1169 #ifdef HAVE_trunctisi2
1170 if (HAVE_trunctisi2)
1171 {
1172 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1173 return;
1174 }
1175 #endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == TImode && to_mode == HImode)
1181 {
1182 #ifdef HAVE_trunctihi2
1183 if (HAVE_trunctihi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == QImode)
1194 {
1195 #ifdef HAVE_trunctiqi2
1196 if (HAVE_trunctiqi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 /* Handle truncation of volatile memrefs, and so on;
1207 the things that couldn't be truncated directly,
1208 and for which there was no special instruction. */
1209 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1210 {
1211 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1212 emit_move_insn (to, temp);
1213 return;
1214 }
1215
1216 /* Mode combination is not recognized. */
1217 abort ();
1218 }
1219
1220 /* Return an rtx for a value that would result
1221 from converting X to mode MODE.
1222 Both X and MODE may be floating, or both integer.
1223 UNSIGNEDP is nonzero if X is an unsigned value.
1224 This can be done by referring to a part of X in place
1225 or by copying to a new temporary with conversion.
1226
1227 This function *must not* call protect_from_queue
1228 except when putting X into an insn (in which case convert_move does it). */
1229
1230 rtx
1231 convert_to_mode (mode, x, unsignedp)
1232 enum machine_mode mode;
1233 rtx x;
1234 int unsignedp;
1235 {
1236 return convert_modes (mode, VOIDmode, x, unsignedp);
1237 }
1238
1239 /* Return an rtx for a value that would result
1240 from converting X from mode OLDMODE to mode MODE.
1241 Both modes may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1248
1249 This function *must not* call protect_from_queue
1250 except when putting X into an insn (in which case convert_move does it). */
1251
1252 rtx
1253 convert_modes (mode, oldmode, x, unsignedp)
1254 enum machine_mode mode, oldmode;
1255 rtx x;
1256 int unsignedp;
1257 {
1258 register rtx temp;
1259
1260 /* If FROM is a SUBREG that indicates that we have already done at least
1261 the required extension, strip it. */
1262
1263 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1264 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1265 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1266 x = gen_lowpart (mode, x);
1267
1268 if (GET_MODE (x) != VOIDmode)
1269 oldmode = GET_MODE (x);
1270
1271 if (mode == oldmode)
1272 return x;
1273
1274 /* There is one case that we must handle specially: If we are converting
1275 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1276 we are to interpret the constant as unsigned, gen_lowpart will do
1277 the wrong if the constant appears negative. What we want to do is
1278 make the high-order word of the constant zero, not all ones. */
1279
1280 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1281 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1282 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1283 {
1284 HOST_WIDE_INT val = INTVAL (x);
1285
1286 if (oldmode != VOIDmode
1287 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1288 {
1289 int width = GET_MODE_BITSIZE (oldmode);
1290
1291 /* We need to zero extend VAL. */
1292 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 }
1294
1295 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 }
1297
1298 /* We can do this with a gen_lowpart if both desired and current modes
1299 are integer, and this is either a constant integer, a register, or a
1300 non-volatile MEM. Except for the constant case where MODE is no
1301 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1302
1303 if ((GET_CODE (x) == CONST_INT
1304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1305 || (GET_MODE_CLASS (mode) == MODE_INT
1306 && GET_MODE_CLASS (oldmode) == MODE_INT
1307 && (GET_CODE (x) == CONST_DOUBLE
1308 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1309 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1310 && direct_load[(int) mode])
1311 || (GET_CODE (x) == REG
1312 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1313 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1314 {
1315 /* ?? If we don't know OLDMODE, we have to assume here that
1316 X does not need sign- or zero-extension. This may not be
1317 the case, but it's the best we can do. */
1318 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1319 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322 int width = GET_MODE_BITSIZE (oldmode);
1323
1324 /* We must sign or zero-extend in this case. Start by
1325 zero-extending, then sign extend if we need to. */
1326 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 if (! unsignedp
1328 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1329 val |= (HOST_WIDE_INT) (-1) << width;
1330
1331 return GEN_INT (val);
1332 }
1333
1334 return gen_lowpart (mode, x);
1335 }
1336
1337 temp = gen_reg_rtx (mode);
1338 convert_move (temp, x, unsignedp);
1339 return temp;
1340 }
1341 \f
1342
1343 /* This macro is used to determine what the largest unit size that
1344 move_by_pieces can use is. */
1345
1346 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1347 move efficiently, as opposed to MOVE_MAX which is the maximum
1348 number of bhytes we can move with a single instruction. */
1349
1350 #ifndef MOVE_MAX_PIECES
1351 #define MOVE_MAX_PIECES MOVE_MAX
1352 #endif
1353
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX_PIECES + 1;
1368 enum machine_mode mode = VOIDmode, tmode;
1369 enum insn_code icode;
1370
1371 data.offset = 0;
1372 data.to_addr = to_addr;
1373 data.from_addr = from_addr;
1374 data.to = to;
1375 data.from = from;
1376 data.autinc_to
1377 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1378 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1379 data.autinc_from
1380 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1381 || GET_CODE (from_addr) == POST_INC
1382 || GET_CODE (from_addr) == POST_DEC);
1383
1384 data.explicit_inc_from = 0;
1385 data.explicit_inc_to = 0;
1386 data.reverse
1387 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1388 if (data.reverse) data.offset = len;
1389 data.len = len;
1390
1391 data.to_struct = MEM_IN_STRUCT_P (to);
1392 data.from_struct = MEM_IN_STRUCT_P (from);
1393 data.to_readonly = RTX_UNCHANGING_P (to);
1394 data.from_readonly = RTX_UNCHANGING_P (from);
1395
1396 /* If copying requires more than two move insns,
1397 copy addresses to registers (to make displacements shorter)
1398 and use post-increment if available. */
1399 if (!(data.autinc_from && data.autinc_to)
1400 && move_by_pieces_ninsns (len, align) > 2)
1401 {
1402 /* Find the mode of the largest move... */
1403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1405 if (GET_MODE_SIZE (tmode) < max_size)
1406 mode = tmode;
1407
1408 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1409 {
1410 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1411 data.autinc_from = 1;
1412 data.explicit_inc_from = -1;
1413 }
1414 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1415 {
1416 data.from_addr = copy_addr_to_reg (from_addr);
1417 data.autinc_from = 1;
1418 data.explicit_inc_from = 1;
1419 }
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1423 {
1424 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1425 data.autinc_to = 1;
1426 data.explicit_inc_to = -1;
1427 }
1428 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1429 {
1430 data.to_addr = copy_addr_to_reg (to_addr);
1431 data.autinc_to = 1;
1432 data.explicit_inc_to = 1;
1433 }
1434 if (!data.autinc_to && CONSTANT_P (to_addr))
1435 data.to_addr = copy_addr_to_reg (to_addr);
1436 }
1437
1438 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1439 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 align = MOVE_MAX;
1441
1442 /* First move what we can in the largest integer mode, then go to
1443 successively smaller modes. */
1444
1445 while (max_size > 1)
1446 {
1447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1449 if (GET_MODE_SIZE (tmode) < max_size)
1450 mode = tmode;
1451
1452 if (mode == VOIDmode)
1453 break;
1454
1455 icode = mov_optab->handlers[(int) mode].insn_code;
1456 if (icode != CODE_FOR_nothing
1457 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1458 GET_MODE_SIZE (mode)))
1459 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1460
1461 max_size = GET_MODE_SIZE (mode);
1462 }
1463
1464 /* The code above should have handled everything. */
1465 if (data.len > 0)
1466 abort ();
1467 }
1468
1469 /* Return number of insns required to move L bytes by pieces.
1470 ALIGN (in bytes) is maximum alignment we can assume. */
1471
1472 static int
1473 move_by_pieces_ninsns (l, align)
1474 unsigned int l;
1475 int align;
1476 {
1477 register int n_insns = 0;
1478 int max_size = MOVE_MAX + 1;
1479
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1482 align = MOVE_MAX;
1483
1484 while (max_size > 1)
1485 {
1486 enum machine_mode mode = VOIDmode, tmode;
1487 enum insn_code icode;
1488
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1492 mode = tmode;
1493
1494 if (mode == VOIDmode)
1495 break;
1496
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing
1499 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1500 GET_MODE_SIZE (mode)))
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1502
1503 max_size = GET_MODE_SIZE (mode);
1504 }
1505
1506 return n_insns;
1507 }
1508
1509 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1512
1513 static void
1514 move_by_pieces_1 (genfun, mode, data)
1515 rtx (*genfun) PARAMS ((rtx, ...));
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1518 {
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1521
1522 while (data->len >= size)
1523 {
1524 if (data->reverse) data->offset -= size;
1525
1526 to1 = (data->autinc_to
1527 ? gen_rtx_MEM (mode, data->to_addr)
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1530 data->offset))));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 RTX_UNCHANGING_P (to1) = data->to_readonly;
1533
1534 from1
1535 = (data->autinc_from
1536 ? gen_rtx_MEM (mode, data->from_addr)
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1539 data->offset))));
1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541 RTX_UNCHANGING_P (from1) = data->from_readonly;
1542
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1547
1548 emit_insn ((*genfun) (to1, from1));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes.
1569
1570 Return the address of the new block, if memcpy is called and returns it,
1571 0 otherwise. */
1572
1573 rtx
1574 emit_block_move (x, y, size, align)
1575 rtx x, y;
1576 rtx size;
1577 int align;
1578 {
1579 rtx retval = 0;
1580 #ifdef TARGET_MEM_FUNCTIONS
1581 static tree fn;
1582 tree call_expr, arg_list;
1583 #endif
1584
1585 if (GET_MODE (x) != BLKmode)
1586 abort ();
1587
1588 if (GET_MODE (y) != BLKmode)
1589 abort ();
1590
1591 x = protect_from_queue (x, 1);
1592 y = protect_from_queue (y, 0);
1593 size = protect_from_queue (size, 0);
1594
1595 if (GET_CODE (x) != MEM)
1596 abort ();
1597 if (GET_CODE (y) != MEM)
1598 abort ();
1599 if (size == 0)
1600 abort ();
1601
1602 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1603 move_by_pieces (x, y, INTVAL (size), align);
1604 else
1605 {
1606 /* Try the most limited insn first, because there's no point
1607 including more than one in the machine description unless
1608 the more limited one has some advantage. */
1609
1610 rtx opalign = GEN_INT (align);
1611 enum machine_mode mode;
1612
1613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1614 mode = GET_MODE_WIDER_MODE (mode))
1615 {
1616 enum insn_code code = movstr_optab[(int) mode];
1617 insn_operand_predicate_fn pred;
1618
1619 if (code != CODE_FOR_nothing
1620 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1621 here because if SIZE is less than the mode mask, as it is
1622 returned by the macro, it will definitely be less than the
1623 actual mode mask. */
1624 && ((GET_CODE (size) == CONST_INT
1625 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1626 <= (GET_MODE_MASK (mode) >> 1)))
1627 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1628 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1629 || (*pred) (x, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1631 || (*pred) (y, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1633 || (*pred) (opalign, VOIDmode)))
1634 {
1635 rtx op2;
1636 rtx last = get_last_insn ();
1637 rtx pat;
1638
1639 op2 = convert_to_mode (mode, size, 1);
1640 pred = insn_data[(int) code].operand[2].predicate;
1641 if (pred != 0 && ! (*pred) (op2, mode))
1642 op2 = copy_to_mode_reg (mode, op2);
1643
1644 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 if (pat)
1646 {
1647 emit_insn (pat);
1648 return 0;
1649 }
1650 else
1651 delete_insns_since (last);
1652 }
1653 }
1654
1655 /* X, Y, or SIZE may have been passed through protect_from_queue.
1656
1657 It is unsafe to save the value generated by protect_from_queue
1658 and reuse it later. Consider what happens if emit_queue is
1659 called before the return value from protect_from_queue is used.
1660
1661 Expansion of the CALL_EXPR below will call emit_queue before
1662 we are finished emitting RTL for argument setup. So if we are
1663 not careful we could get the wrong value for an argument.
1664
1665 To avoid this problem we go ahead and emit code to copy X, Y &
1666 SIZE into new pseudos. We can then place those new pseudos
1667 into an RTL_EXPR and use them later, even after a call to
1668 emit_queue.
1669
1670 Note this is not strictly needed for library calls since they
1671 do not call emit_queue before loading their arguments. However,
1672 we may need to have library calls call emit_queue in the future
1673 since failing to do so could cause problems for targets which
1674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1675 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1676 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1677
1678 #ifdef TARGET_MEM_FUNCTIONS
1679 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1680 #else
1681 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1682 TREE_UNSIGNED (integer_type_node));
1683 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1684 #endif
1685
1686 #ifdef TARGET_MEM_FUNCTIONS
1687 /* It is incorrect to use the libcall calling conventions to call
1688 memcpy in this context.
1689
1690 This could be a user call to memcpy and the user may wish to
1691 examine the return value from memcpy.
1692
1693 For targets where libcalls and normal calls have different conventions
1694 for returning pointers, we could end up generating incorrect code.
1695
1696 So instead of using a libcall sequence we build up a suitable
1697 CALL_EXPR and expand the call in the normal fashion. */
1698 if (fn == NULL_TREE)
1699 {
1700 tree fntype;
1701
1702 /* This was copied from except.c, I don't know if all this is
1703 necessary in this context or not. */
1704 fn = get_identifier ("memcpy");
1705 push_obstacks_nochange ();
1706 end_temporary_allocation ();
1707 fntype = build_pointer_type (void_type_node);
1708 fntype = build_function_type (fntype, NULL_TREE);
1709 fn = build_decl (FUNCTION_DECL, fn, fntype);
1710 ggc_add_tree_root (&fn, 1);
1711 DECL_EXTERNAL (fn) = 1;
1712 TREE_PUBLIC (fn) = 1;
1713 DECL_ARTIFICIAL (fn) = 1;
1714 make_decl_rtl (fn, NULL_PTR, 1);
1715 assemble_external (fn);
1716 pop_obstacks ();
1717 }
1718
1719 /* We need to make an argument list for the function call.
1720
1721 memcpy has three arguments, the first two are void * addresses and
1722 the last is a size_t byte count for the copy. */
1723 arg_list
1724 = build_tree_list (NULL_TREE,
1725 make_tree (build_pointer_type (void_type_node), x));
1726 TREE_CHAIN (arg_list)
1727 = build_tree_list (NULL_TREE,
1728 make_tree (build_pointer_type (void_type_node), y));
1729 TREE_CHAIN (TREE_CHAIN (arg_list))
1730 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1731 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1732
1733 /* Now we have to build up the CALL_EXPR itself. */
1734 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1735 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1736 call_expr, arg_list, NULL_TREE);
1737 TREE_SIDE_EFFECTS (call_expr) = 1;
1738
1739 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1740 #else
1741 emit_library_call (bcopy_libfunc, 0,
1742 VOIDmode, 3, y, Pmode, x, Pmode,
1743 convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node)),
1745 TYPE_MODE (integer_type_node));
1746 #endif
1747 }
1748
1749 return retval;
1750 }
1751 \f
1752 /* Copy all or part of a value X into registers starting at REGNO.
1753 The number of registers to be filled is NREGS. */
1754
1755 void
1756 move_block_to_reg (regno, x, nregs, mode)
1757 int regno;
1758 rtx x;
1759 int nregs;
1760 enum machine_mode mode;
1761 {
1762 int i;
1763 #ifdef HAVE_load_multiple
1764 rtx pat;
1765 rtx last;
1766 #endif
1767
1768 if (nregs == 0)
1769 return;
1770
1771 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1772 x = validize_mem (force_const_mem (mode, x));
1773
1774 /* See if the machine can do this with a load multiple insn. */
1775 #ifdef HAVE_load_multiple
1776 if (HAVE_load_multiple)
1777 {
1778 last = get_last_insn ();
1779 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
1788 }
1789 #endif
1790
1791 for (i = 0; i < nregs; i++)
1792 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1793 operand_subword_force (x, i, mode));
1794 }
1795
1796 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1797 The number of registers to be filled is NREGS. SIZE indicates the number
1798 of bytes in the object X. */
1799
1800
1801 void
1802 move_block_from_reg (regno, x, nregs, size)
1803 int regno;
1804 rtx x;
1805 int nregs;
1806 int size;
1807 {
1808 int i;
1809 #ifdef HAVE_store_multiple
1810 rtx pat;
1811 rtx last;
1812 #endif
1813 enum machine_mode mode;
1814
1815 /* If SIZE is that of a mode no bigger than a word, just use that
1816 mode's store operation. */
1817 if (size <= UNITS_PER_WORD
1818 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1819 {
1820 emit_move_insn (change_address (x, mode, NULL),
1821 gen_rtx_REG (mode, regno));
1822 return;
1823 }
1824
1825 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1826 to the left before storing to memory. Note that the previous test
1827 doesn't handle all cases (e.g. SIZE == 3). */
1828 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1829 {
1830 rtx tem = operand_subword (x, 0, 1, BLKmode);
1831 rtx shift;
1832
1833 if (tem == 0)
1834 abort ();
1835
1836 shift = expand_shift (LSHIFT_EXPR, word_mode,
1837 gen_rtx_REG (word_mode, regno),
1838 build_int_2 ((UNITS_PER_WORD - size)
1839 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1840 emit_move_insn (tem, shift);
1841 return;
1842 }
1843
1844 /* See if the machine can do this with a store multiple insn. */
1845 #ifdef HAVE_store_multiple
1846 if (HAVE_store_multiple)
1847 {
1848 last = get_last_insn ();
1849 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1850 GEN_INT (nregs));
1851 if (pat)
1852 {
1853 emit_insn (pat);
1854 return;
1855 }
1856 else
1857 delete_insns_since (last);
1858 }
1859 #endif
1860
1861 for (i = 0; i < nregs; i++)
1862 {
1863 rtx tem = operand_subword (x, i, 1, BLKmode);
1864
1865 if (tem == 0)
1866 abort ();
1867
1868 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1869 }
1870 }
1871
1872 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1873 registers represented by a PARALLEL. SSIZE represents the total size of
1874 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1875 SRC in bits. */
1876 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1877 the balance will be in what would be the low-order memory addresses, i.e.
1878 left justified for big endian, right justified for little endian. This
1879 happens to be true for the targets currently using this support. If this
1880 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1881 would be needed. */
1882
1883 void
1884 emit_group_load (dst, orig_src, ssize, align)
1885 rtx dst, orig_src;
1886 int align, ssize;
1887 {
1888 rtx *tmps, src;
1889 int start, i;
1890
1891 if (GET_CODE (dst) != PARALLEL)
1892 abort ();
1893
1894 /* Check for a NULL entry, used to indicate that the parameter goes
1895 both on the stack and in registers. */
1896 if (XEXP (XVECEXP (dst, 0, 0), 0))
1897 start = 0;
1898 else
1899 start = 1;
1900
1901 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1902
1903 /* If we won't be loading directly from memory, protect the real source
1904 from strange tricks we might play. */
1905 src = orig_src;
1906 if (GET_CODE (src) != MEM)
1907 {
1908 if (GET_CODE (src) == VOIDmode)
1909 src = gen_reg_rtx (GET_MODE (dst));
1910 else
1911 src = gen_reg_rtx (GET_MODE (orig_src));
1912 emit_move_insn (src, orig_src);
1913 }
1914
1915 /* Process the pieces. */
1916 for (i = start; i < XVECLEN (dst, 0); i++)
1917 {
1918 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1919 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1920 int bytelen = GET_MODE_SIZE (mode);
1921 int shift = 0;
1922
1923 /* Handle trailing fragments that run over the size of the struct. */
1924 if (ssize >= 0 && bytepos + bytelen > ssize)
1925 {
1926 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1927 bytelen = ssize - bytepos;
1928 if (bytelen <= 0)
1929 abort();
1930 }
1931
1932 /* Optimize the access just a bit. */
1933 if (GET_CODE (src) == MEM
1934 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1935 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 {
1938 tmps[i] = gen_reg_rtx (mode);
1939 emit_move_insn (tmps[i],
1940 change_address (src, mode,
1941 plus_constant (XEXP (src, 0),
1942 bytepos)));
1943 }
1944 else if (GET_CODE (src) == CONCAT)
1945 {
1946 if (bytepos == 0
1947 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1948 tmps[i] = XEXP (src, 0);
1949 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1951 tmps[i] = XEXP (src, 1);
1952 else
1953 abort ();
1954 }
1955 else
1956 {
1957 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1958 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, align, ssize);
1960 }
1961
1962 if (BYTES_BIG_ENDIAN && shift)
1963 {
1964 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1965 tmps[i], 0, OPTAB_WIDEN);
1966 }
1967 }
1968 emit_queue();
1969
1970 /* Copy the extracted pieces into the proper (probable) hard regs. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1972 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 }
1974
1975 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1976 registers represented by a PARALLEL. SSIZE represents the total size of
1977 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1978
1979 void
1980 emit_group_store (orig_dst, src, ssize, align)
1981 rtx orig_dst, src;
1982 int ssize, align;
1983 {
1984 rtx *tmps, dst;
1985 int start, i;
1986
1987 if (GET_CODE (src) != PARALLEL)
1988 abort ();
1989
1990 /* Check for a NULL entry, used to indicate that the parameter goes
1991 both on the stack and in registers. */
1992 if (XEXP (XVECEXP (src, 0, 0), 0))
1993 start = 0;
1994 else
1995 start = 1;
1996
1997 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1998
1999 /* Copy the (probable) hard regs into pseudos. */
2000 for (i = start; i < XVECLEN (src, 0); i++)
2001 {
2002 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2003 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2004 emit_move_insn (tmps[i], reg);
2005 }
2006 emit_queue();
2007
2008 /* If we won't be storing directly into memory, protect the real destination
2009 from strange tricks we might play. */
2010 dst = orig_dst;
2011 if (GET_CODE (dst) == PARALLEL)
2012 {
2013 rtx temp;
2014
2015 /* We can get a PARALLEL dst if there is a conditional expression in
2016 a return statement. In that case, the dst and src are the same,
2017 so no action is necessary. */
2018 if (rtx_equal_p (dst, src))
2019 return;
2020
2021 /* It is unclear if we can ever reach here, but we may as well handle
2022 it. Allocate a temporary, and split this into a store/load to/from
2023 the temporary. */
2024
2025 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2026 emit_group_store (temp, src, ssize, align);
2027 emit_group_load (dst, temp, ssize, align);
2028 return;
2029 }
2030 else if (GET_CODE (dst) != MEM)
2031 {
2032 dst = gen_reg_rtx (GET_MODE (orig_dst));
2033 /* Make life a bit easier for combine. */
2034 emit_move_insn (dst, const0_rtx);
2035 }
2036 else if (! MEM_IN_STRUCT_P (dst))
2037 {
2038 /* store_bit_field requires that memory operations have
2039 mem_in_struct_p set; we might not. */
2040
2041 dst = copy_rtx (orig_dst);
2042 MEM_SET_IN_STRUCT_P (dst, 1);
2043 }
2044
2045 /* Process the pieces. */
2046 for (i = start; i < XVECLEN (src, 0); i++)
2047 {
2048 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2049 enum machine_mode mode = GET_MODE (tmps[i]);
2050 int bytelen = GET_MODE_SIZE (mode);
2051
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize >= 0 && bytepos + bytelen > ssize)
2054 {
2055 if (BYTES_BIG_ENDIAN)
2056 {
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2059 tmps[i], 0, OPTAB_WIDEN);
2060 }
2061 bytelen = ssize - bytepos;
2062 }
2063
2064 /* Optimize the access just a bit. */
2065 if (GET_CODE (dst) == MEM
2066 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2067 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2068 && bytelen == GET_MODE_SIZE (mode))
2069 {
2070 emit_move_insn (change_address (dst, mode,
2071 plus_constant (XEXP (dst, 0),
2072 bytepos)),
2073 tmps[i]);
2074 }
2075 else
2076 {
2077 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2079 }
2080 }
2081 emit_queue();
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment.
2096 */
2097
2098 rtx
2099 copy_blkmode_from_reg(tgtblk,srcreg,type)
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103 {
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167 }
2168
2169
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2172
2173 void
2174 use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176 {
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 abort();
2180
2181 *call_fusage
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 }
2185
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2188
2189 void
2190 use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
2192 int regno;
2193 int nregs;
2194 {
2195 int i;
2196
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2202 }
2203
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208 void
2209 use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212 {
2213 int i;
2214
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2224 }
2225 }
2226 \f
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232 static void
2233 clear_by_pieces (to, len, align)
2234 rtx to;
2235 int len, align;
2236 {
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
2242
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2249
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2255
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2257
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2263 {
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2269
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2271 {
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2275 }
2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2281 }
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2284 }
2285
2286 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2289
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2292
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2306 GET_MODE_SIZE (mode)))
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315 }
2316
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321 static void
2322 clear_by_pieces_1 (genfun, mode, data)
2323 rtx (*genfun) PARAMS ((rtx, ...));
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326 {
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
2335 ? gen_rtx_MEM (mode, data->to_addr)
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352 }
2353 \f
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2357
2358 If we call a function that returns the length of the block, return it. */
2359
2360 rtx
2361 clear_storage (object, size, align)
2362 rtx object;
2363 rtx size;
2364 int align;
2365 {
2366 #ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369 #endif
2370 rtx retval = 0;
2371
2372 if (GET_MODE (object) == BLKmode)
2373 {
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
2394 insn_operand_predicate_fn pred;
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2403 <= (GET_MODE_MASK (mode) >> 1)))
2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
2423 return 0;
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2431
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2435
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2439
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
2444
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2451
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454 #else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2458 #endif
2459
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2464
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2467
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
2486 ggc_add_tree_root (&fn, 1);
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2519 #else
2520 emit_library_call (bzero_libfunc, 0,
2521 VOIDmode, 2, object, Pmode, size,
2522 TYPE_MODE (integer_type_node));
2523 #endif
2524 }
2525 }
2526 else
2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2528
2529 return retval;
2530 }
2531
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539 rtx
2540 emit_move_insn (x, y)
2541 rtx x, y;
2542 {
2543 enum machine_mode mode = GET_MODE (x);
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
2575 return emit_move_insn_1 (x, y);
2576 }
2577
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582 rtx
2583 emit_move_insn_1 (x, y)
2584 rtx x, y;
2585 {
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
2610
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2616 if (stack)
2617 {
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2623 gen_imagpart (submode, y)));
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2626 gen_realpart (submode, y)));
2627 #else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_realpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_imagpart (submode, y)));
2634 #endif
2635 }
2636 else
2637 {
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2640
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2652 {
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655
2656 if (packed_dest_p || packed_src_p)
2657 {
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2660
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663
2664 if (reg_mode != BLKmode)
2665 {
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2668
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2670
2671 cfun->cannot_inline = "function uses short complex types";
2672
2673 if (packed_dest_p)
2674 {
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2678 }
2679 else
2680 {
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2684 }
2685 }
2686 }
2687 }
2688
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2693
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2698 if (x != y
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
2702 {
2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2704 }
2705
2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2707 (realpart_x, realpart_y));
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (imagpart_x, imagpart_y));
2710 }
2711
2712 return get_last_insn ();
2713 }
2714
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2719 {
2720 rtx last_insn = 0;
2721 rtx seq;
2722 int need_clobber;
2723
2724 #ifdef PUSH_ROUNDING
2725
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2729 {
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 }
2733 #endif
2734
2735 start_sequence ();
2736
2737 need_clobber = 0;
2738 for (i = 0;
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2740 i++)
2741 {
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2744
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2749 {
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2752 }
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2755
2756 if (xpart == 0 || ypart == 0)
2757 abort ();
2758
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760
2761 last_insn = emit_move_insn (xpart, ypart);
2762 }
2763
2764 seq = gen_sequence ();
2765 end_sequence ();
2766
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2771 if (x != y
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2774 {
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2776 }
2777
2778 emit_insn (seq);
2779
2780 return last_insn;
2781 }
2782 else
2783 abort ();
2784 }
2785 \f
2786 /* Pushing data onto the stack. */
2787
2788 /* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2792
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2796
2797 rtx
2798 push_block (size, extra, below)
2799 rtx size;
2800 int extra, below;
2801 {
2802 register rtx temp;
2803
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2809 else
2810 {
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2812 if (extra != 0)
2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2816 }
2817
2818 #if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2824 conventions). */
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2828 #else
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2835 else
2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2837 negate_rtx (Pmode, size));
2838 #endif
2839
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2841 }
2842
2843 rtx
2844 gen_push_operand ()
2845 {
2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2847 }
2848
2849 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2851
2852 static rtx
2853 get_push_address (size)
2854 int size;
2855 {
2856 register rtx temp;
2857
2858 if (STACK_PUSH_CODE == POST_DEC)
2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2860 else if (STACK_PUSH_CODE == POST_INC)
2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2862 else
2863 temp = stack_pointer_rtx;
2864
2865 return copy_to_reg (temp);
2866 }
2867
2868 /* Generate code to push X onto the stack, assuming it has mode MODE and
2869 type TYPE.
2870 MODE is redundant except when X is a CONST_INT (since they don't
2871 carry mode info).
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2874
2875 ALIGN (in bytes) is maximum alignment we can assume.
2876
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2884 registers.
2885
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
2887 This is ignored if an argument block has already been allocated.
2888
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2893
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2895
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
2899
2900 void
2901 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2902 args_addr, args_so_far, reg_parm_stack_space,
2903 alignment_pad)
2904 register rtx x;
2905 enum machine_mode mode;
2906 tree type;
2907 rtx size;
2908 int align;
2909 int partial;
2910 rtx reg;
2911 int extra;
2912 rtx args_addr;
2913 rtx args_so_far;
2914 int reg_parm_stack_space;
2915 rtx alignment_pad;
2916 {
2917 rtx xinner;
2918 enum direction stack_direction
2919 #ifdef STACK_GROWS_DOWNWARD
2920 = downward;
2921 #else
2922 = upward;
2923 #endif
2924
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2934
2935 xinner = x = protect_from_queue (x, 0);
2936
2937 if (mode == BLKmode)
2938 {
2939 /* Copy a block into the stack, entirely or partially. */
2940
2941 register rtx temp;
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2944 int skip;
2945
2946 if (size == 0)
2947 abort ();
2948
2949 used -= offset;
2950
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2953
2954 if (partial != 0)
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2957
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
2963
2964 #ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2968 if (args_addr == 0
2969 && GET_CODE (size) == CONST_INT
2970 && skip == 0
2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
2975 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2977 || PUSH_ROUNDING (align) == align)
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 {
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
2985 anti_adjust_stack (GEN_INT (extra));
2986
2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2988 INTVAL (size) - used, align);
2989
2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
2991 {
2992 rtx temp;
2993
2994 in_check_memory_usage = 1;
2995 temp = get_push_address (INTVAL(size) - used);
2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2998 temp, Pmode,
2999 XEXP (xinner, 0), Pmode,
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3002 else
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3004 temp, Pmode,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
3010 }
3011 }
3012 else
3013 #endif /* PUSH_ROUNDING */
3014 {
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3017
3018 /* Deduct words put into registers from the size we must copy. */
3019 if (partial != 0)
3020 {
3021 if (GET_CODE (size) == CONST_INT)
3022 size = GEN_INT (INTVAL (size) - used);
3023 else
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
3025 GEN_INT (used), NULL_RTX, 0,
3026 OPTAB_LIB_WIDEN);
3027 }
3028
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3032 if (! args_addr)
3033 {
3034 temp = push_block (size, extra, where_pad == downward);
3035 extra = 0;
3036 }
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3041 else
3042 temp = memory_address (BLKmode,
3043 plus_constant (gen_rtx_PLUS (Pmode,
3044 args_addr,
3045 args_so_far),
3046 skip));
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3048 {
3049 rtx target;
3050
3051 in_check_memory_usage = 1;
3052 target = copy_to_reg (temp);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3055 target, Pmode,
3056 XEXP (xinner, 0), Pmode,
3057 size, TYPE_MODE (sizetype));
3058 else
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3060 target, Pmode,
3061 size, TYPE_MODE (sizetype),
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
3065 }
3066
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
3069 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3070 {
3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3072 INTVAL (size), align);
3073 goto ret;
3074 }
3075 else
3076 {
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
3079 rtx target = gen_rtx_MEM (BLKmode, temp);
3080
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3082 mode != VOIDmode;
3083 mode = GET_MODE_WIDER_MODE (mode))
3084 {
3085 enum insn_code code = movstr_optab[(int) mode];
3086 insn_operand_predicate_fn pred;
3087
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
3099 {
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3102 rtx pat;
3103
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
3106 op2 = copy_to_mode_reg (mode, op2);
3107
3108 pat = GEN_FCN ((int) code) (target, xinner,
3109 op2, opalign);
3110 if (pat)
3111 {
3112 emit_insn (pat);
3113 goto ret;
3114 }
3115 else
3116 delete_insns_since (last);
3117 }
3118 }
3119 }
3120
3121 #ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3125
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3129 #endif
3130
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3133 NO_DEFER_POP;
3134 #ifdef TARGET_MEM_FUNCTIONS
3135 emit_library_call (memcpy_libfunc, 0,
3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
3139 TYPE_MODE (sizetype));
3140 #else
3141 emit_library_call (bcopy_libfunc, 0,
3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3144 size,
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
3147 #endif
3148 OK_DEFER_POP;
3149 }
3150 }
3151 else if (partial > 0)
3152 {
3153 /* Scalar partly in registers. */
3154
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3156 int i;
3157 int not_stack;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3162 int skip;
3163
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
3169 anti_adjust_stack (GEN_INT (extra));
3170
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3174 if (args_addr == 0)
3175 offset = 0;
3176
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3180
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3186
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3189
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3195
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199 #ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3201 #else
3202 for (i = size - 1; i >= not_stack; i--)
3203 #endif
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3207 0, args_addr,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
3209 * UNITS_PER_WORD)),
3210 reg_parm_stack_space, alignment_pad);
3211 }
3212 else
3213 {
3214 rtx addr;
3215 rtx target = NULL_RTX;
3216
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
3222 anti_adjust_stack (GEN_INT (extra));
3223
3224 #ifdef PUSH_ROUNDING
3225 if (args_addr == 0)
3226 addr = gen_push_operand ();
3227 else
3228 #endif
3229 {
3230 if (GET_CODE (args_so_far) == CONST_INT)
3231 addr
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3235 else
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3237 args_so_far));
3238 target = addr;
3239 }
3240
3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3242
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3244 {
3245 in_check_memory_usage = 1;
3246 if (target == 0)
3247 target = get_push_address (GET_MODE_SIZE (mode));
3248
3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3251 target, Pmode,
3252 XEXP (x, 0), Pmode,
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3255 else
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3257 target, Pmode,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
3263 }
3264 }
3265
3266 ret:
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
3270 if (partial > 0 && reg != 0)
3271 {
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
3275 emit_group_load (reg, x, -1, align); /* ??? size? */
3276 else
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3278 }
3279
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
3281 anti_adjust_stack (GEN_INT (extra));
3282
3283 if (alignment_pad)
3284 anti_adjust_stack (alignment_pad);
3285 }
3286 \f
3287 /* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
3292
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
3296 We now use WANT_VALUE to decide whether to do this. */
3297
3298 rtx
3299 expand_assignment (to, from, want_value, suggest_reg)
3300 tree to, from;
3301 int want_value;
3302 int suggest_reg ATTRIBUTE_UNUSED;
3303 {
3304 register rtx to_rtx = 0;
3305 rtx result;
3306
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3308
3309 if (TREE_CODE (to) == ERROR_MARK)
3310 {
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3313 }
3314
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3319 problem. */
3320
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
3323 {
3324 enum machine_mode mode1;
3325 int bitsize;
3326 int bitpos;
3327 tree offset;
3328 int unsignedp;
3329 int volatilep = 0;
3330 tree tem;
3331 int alignment;
3332
3333 push_temp_slots ();
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
3336
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3339
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3342
3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3344 if (offset != 0)
3345 {
3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3347
3348 if (GET_CODE (to_rtx) != MEM)
3349 abort ();
3350
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3352 {
3353 #ifdef POINTERS_EXTEND_UNSIGNED
3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3355 #else
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3357 #endif
3358 }
3359
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3365 && bitsize
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 {
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3372 (bitpos /
3373 BITS_PER_UNIT)));
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3375 to_rtx = temp;
3376 else
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3379 XEXP (temp, 0)));
3380 bitpos = 0;
3381 }
3382
3383 to_rtx = change_address (to_rtx, VOIDmode,
3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3385 force_reg (ptr_mode,
3386 offset_rtx)));
3387 }
3388
3389 if (volatilep)
3390 {
3391 if (GET_CODE (to_rtx) == MEM)
3392 {
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3396 if (offset == 0)
3397 to_rtx = copy_rtx (to_rtx);
3398
3399 MEM_VOLATILE_P (to_rtx) = 1;
3400 }
3401 #if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3404 else
3405 abort ();
3406 #endif
3407 }
3408
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 {
3412 if (offset == 0)
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3416 }
3417
3418 /* Check the access. */
3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3420 {
3421 rtx to_addr;
3422 int size;
3423 int best_mode_size;
3424 enum machine_mode best_mode;
3425
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3428 mode1, volatilep);
3429 if (best_mode == VOIDmode)
3430 best_mode = QImode;
3431
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3436
3437 /* Check the access right of the pointer. */
3438 if (size)
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3440 to_addr, Pmode,
3441 GEN_INT (size), TYPE_MODE (sizetype),
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
3444 }
3445
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3447 (want_value
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3450 : VOIDmode),
3451 unsignedp,
3452 /* Required alignment of containing datum. */
3453 alignment,
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
3456 preserve_temp_slots (result);
3457 free_temp_slots ();
3458 pop_temp_slots ();
3459
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3464 result,
3465 TREE_UNSIGNED (TREE_TYPE (to)))
3466 : NULL_RTX);
3467 }
3468
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
3473 requires loading up part of an address in a separate insn.
3474
3475 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3476 a promoted variable where the zero- or sign- extension needs to be done.
3477 Handling this in the normal way is safe because no computation is done
3478 before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3481 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3482 {
3483 rtx value;
3484
3485 push_temp_slots ();
3486 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3487 if (to_rtx == 0)
3488 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3489
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 if (GET_CODE (to_rtx) == PARALLEL)
3493 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3494 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3495 else if (GET_MODE (to_rtx) == BLKmode)
3496 emit_block_move (to_rtx, value, expr_size (from),
3497 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3498 else
3499 {
3500 #ifdef POINTERS_EXTEND_UNSIGNED
3501 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3502 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3503 value = convert_memory_address (GET_MODE (to_rtx), value);
3504 #endif
3505 emit_move_insn (to_rtx, value);
3506 }
3507 preserve_temp_slots (to_rtx);
3508 free_temp_slots ();
3509 pop_temp_slots ();
3510 return want_value ? to_rtx : NULL_RTX;
3511 }
3512
3513 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3514 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515
3516 if (to_rtx == 0)
3517 {
3518 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3519 if (GET_CODE (to_rtx) == MEM)
3520 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3521 }
3522
3523 /* Don't move directly into a return register. */
3524 if (TREE_CODE (to) == RESULT_DECL
3525 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3526 {
3527 rtx temp;
3528
3529 push_temp_slots ();
3530 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3531
3532 if (GET_CODE (to_rtx) == PARALLEL)
3533 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3534 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3535 else
3536 emit_move_insn (to_rtx, temp);
3537
3538 preserve_temp_slots (to_rtx);
3539 free_temp_slots ();
3540 pop_temp_slots ();
3541 return want_value ? to_rtx : NULL_RTX;
3542 }
3543
3544 /* In case we are returning the contents of an object which overlaps
3545 the place the value is being stored, use a safe function when copying
3546 a value through a pointer into a structure value return block. */
3547 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3548 && current_function_returns_struct
3549 && !current_function_returns_pcc_struct)
3550 {
3551 rtx from_rtx, size;
3552
3553 push_temp_slots ();
3554 size = expr_size (from);
3555 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3556 EXPAND_MEMORY_USE_DONT);
3557
3558 /* Copy the rights of the bitmap. */
3559 if (current_function_check_memory_usage)
3560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3561 XEXP (to_rtx, 0), Pmode,
3562 XEXP (from_rtx, 0), Pmode,
3563 convert_to_mode (TYPE_MODE (sizetype),
3564 size, TREE_UNSIGNED (sizetype)),
3565 TYPE_MODE (sizetype));
3566
3567 #ifdef TARGET_MEM_FUNCTIONS
3568 emit_library_call (memcpy_libfunc, 0,
3569 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3570 XEXP (from_rtx, 0), Pmode,
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
3573 TYPE_MODE (sizetype));
3574 #else
3575 emit_library_call (bcopy_libfunc, 0,
3576 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3577 XEXP (to_rtx, 0), Pmode,
3578 convert_to_mode (TYPE_MODE (integer_type_node),
3579 size, TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
3581 #endif
3582
3583 preserve_temp_slots (to_rtx);
3584 free_temp_slots ();
3585 pop_temp_slots ();
3586 return want_value ? to_rtx : NULL_RTX;
3587 }
3588
3589 /* Compute FROM and store the value in the rtx we got. */
3590
3591 push_temp_slots ();
3592 result = store_expr (from, to_rtx, want_value);
3593 preserve_temp_slots (result);
3594 free_temp_slots ();
3595 pop_temp_slots ();
3596 return want_value ? result : NULL_RTX;
3597 }
3598
3599 /* Generate code for computing expression EXP,
3600 and storing the value into TARGET.
3601 TARGET may contain a QUEUED rtx.
3602
3603 If WANT_VALUE is nonzero, return a copy of the value
3604 not in TARGET, so that we can be sure to use the proper
3605 value in a containing expression even if TARGET has something
3606 else stored in it. If possible, we copy the value through a pseudo
3607 and return that pseudo. Or, if the value is constant, we try to
3608 return the constant. In some cases, we return a pseudo
3609 copied *from* TARGET.
3610
3611 If the mode is BLKmode then we may return TARGET itself.
3612 It turns out that in BLKmode it doesn't cause a problem.
3613 because C has no operators that could combine two different
3614 assignments into the same BLKmode object with different values
3615 with no sequence point. Will other languages need this to
3616 be more thorough?
3617
3618 If WANT_VALUE is 0, we return NULL, to make sure
3619 to catch quickly any cases where the caller uses the value
3620 and fails to set WANT_VALUE. */
3621
3622 rtx
3623 store_expr (exp, target, want_value)
3624 register tree exp;
3625 register rtx target;
3626 int want_value;
3627 {
3628 register rtx temp;
3629 int dont_return_target = 0;
3630
3631 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 {
3633 /* Perform first part of compound expression, then assign from second
3634 part. */
3635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3636 emit_queue ();
3637 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3638 }
3639 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 {
3641 /* For conditional expression, get safe form of the target. Then
3642 test the condition, doing the appropriate assignment on either
3643 side. This avoids the creation of unnecessary temporaries.
3644 For non-BLKmode, it is more efficient not to do this. */
3645
3646 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647
3648 emit_queue ();
3649 target = protect_from_queue (target, 1);
3650
3651 do_pending_stack_adjust ();
3652 NO_DEFER_POP;
3653 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3654 start_cleanup_deferral ();
3655 store_expr (TREE_OPERAND (exp, 1), target, 0);
3656 end_cleanup_deferral ();
3657 emit_queue ();
3658 emit_jump_insn (gen_jump (lab2));
3659 emit_barrier ();
3660 emit_label (lab1);
3661 start_cleanup_deferral ();
3662 store_expr (TREE_OPERAND (exp, 2), target, 0);
3663 end_cleanup_deferral ();
3664 emit_queue ();
3665 emit_label (lab2);
3666 OK_DEFER_POP;
3667
3668 return want_value ? target : NULL_RTX;
3669 }
3670 else if (queued_subexp_p (target))
3671 /* If target contains a postincrement, let's not risk
3672 using it as the place to generate the rhs. */
3673 {
3674 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 {
3676 /* Expand EXP into a new pseudo. */
3677 temp = gen_reg_rtx (GET_MODE (target));
3678 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 }
3680 else
3681 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3682
3683 /* If target is volatile, ANSI requires accessing the value
3684 *from* the target, if it is accessed. So make that happen.
3685 In no case return the target itself. */
3686 if (! MEM_VOLATILE_P (target) && want_value)
3687 dont_return_target = 1;
3688 }
3689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3690 && GET_MODE (target) != BLKmode)
3691 /* If target is in memory and caller wants value in a register instead,
3692 arrange that. Pass TARGET as target for expand_expr so that,
3693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3694 We know expand_expr will not use the target in that case.
3695 Don't do this if TARGET is volatile because we are supposed
3696 to write it and then read it. */
3697 {
3698 temp = expand_expr (exp, target, GET_MODE (target), 0);
3699 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3700 temp = copy_to_reg (temp);
3701 dont_return_target = 1;
3702 }
3703 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3704 /* If this is an scalar in a register that is stored in a wider mode
3705 than the declared mode, compute the result into its declared mode
3706 and then convert to the wider mode. Our value is the computed
3707 expression. */
3708 {
3709 /* If we don't want a value, we can do the conversion inside EXP,
3710 which will often result in some optimizations. Do the conversion
3711 in two steps: first change the signedness, if needed, then
3712 the extend. But don't do this if the type of EXP is a subtype
3713 of something else since then the conversion might involve
3714 more than just converting modes. */
3715 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3716 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3717 {
3718 if (TREE_UNSIGNED (TREE_TYPE (exp))
3719 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 exp
3721 = convert
3722 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3723 TREE_TYPE (exp)),
3724 exp);
3725
3726 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3727 SUBREG_PROMOTED_UNSIGNED_P (target)),
3728 exp);
3729 }
3730
3731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3732
3733 /* If TEMP is a volatile MEM and we want a result value, make
3734 the access now so it gets done only once. Likewise if
3735 it contains TARGET. */
3736 if (GET_CODE (temp) == MEM && want_value
3737 && (MEM_VOLATILE_P (temp)
3738 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3739 temp = copy_to_reg (temp);
3740
3741 /* If TEMP is a VOIDmode constant, use convert_modes to make
3742 sure that we properly convert it. */
3743 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 TYPE_MODE (TREE_TYPE (exp)), temp,
3746 SUBREG_PROMOTED_UNSIGNED_P (target));
3747
3748 convert_move (SUBREG_REG (target), temp,
3749 SUBREG_PROMOTED_UNSIGNED_P (target));
3750
3751 /* If we promoted a constant, change the mode back down to match
3752 target. Otherwise, the caller might get confused by a result whose
3753 mode is larger than expected. */
3754
3755 if (want_value && GET_MODE (temp) != GET_MODE (target)
3756 && GET_MODE (temp) != VOIDmode)
3757 {
3758 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3759 SUBREG_PROMOTED_VAR_P (temp) = 1;
3760 SUBREG_PROMOTED_UNSIGNED_P (temp)
3761 = SUBREG_PROMOTED_UNSIGNED_P (target);
3762 }
3763
3764 return want_value ? temp : NULL_RTX;
3765 }
3766 else
3767 {
3768 temp = expand_expr (exp, target, GET_MODE (target), 0);
3769 /* Return TARGET if it's a specified hardware register.
3770 If TARGET is a volatile mem ref, either return TARGET
3771 or return a reg copied *from* TARGET; ANSI requires this.
3772
3773 Otherwise, if TEMP is not TARGET, return TEMP
3774 if it is constant (for efficiency),
3775 or if we really want the correct value. */
3776 if (!(target && GET_CODE (target) == REG
3777 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3778 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3779 && ! rtx_equal_p (temp, target)
3780 && (CONSTANT_P (temp) || want_value))
3781 dont_return_target = 1;
3782 }
3783
3784 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3785 the same as that of TARGET, adjust the constant. This is needed, for
3786 example, in case it is a CONST_DOUBLE and we want only a word-sized
3787 value. */
3788 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3789 && TREE_CODE (exp) != ERROR_MARK
3790 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3791 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3792 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793
3794 if (current_function_check_memory_usage
3795 && GET_CODE (target) == MEM
3796 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 {
3798 if (GET_CODE (temp) == MEM)
3799 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3800 XEXP (target, 0), Pmode,
3801 XEXP (temp, 0), Pmode,
3802 expr_size (exp), TYPE_MODE (sizetype));
3803 else
3804 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3805 XEXP (target, 0), Pmode,
3806 expr_size (exp), TYPE_MODE (sizetype),
3807 GEN_INT (MEMORY_USE_WO),
3808 TYPE_MODE (integer_type_node));
3809 }
3810
3811 /* If value was not generated in the target, store it there.
3812 Convert the value to TARGET's type first if nec. */
3813 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3814 one or both of them are volatile memory refs, we have to distinguish
3815 two cases:
3816 - expand_expr has used TARGET. In this case, we must not generate
3817 another copy. This can be detected by TARGET being equal according
3818 to == .
3819 - expand_expr has not used TARGET - that means that the source just
3820 happens to have the same RTX form. Since temp will have been created
3821 by expand_expr, it will compare unequal according to == .
3822 We must generate a copy in this case, to reach the correct number
3823 of volatile memory references. */
3824
3825 if ((! rtx_equal_p (temp, target)
3826 || (temp != target && (side_effects_p (temp)
3827 || side_effects_p (target))))
3828 && TREE_CODE (exp) != ERROR_MARK)
3829 {
3830 target = protect_from_queue (target, 1);
3831 if (GET_MODE (temp) != GET_MODE (target)
3832 && GET_MODE (temp) != VOIDmode)
3833 {
3834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3835 if (dont_return_target)
3836 {
3837 /* In this case, we will return TEMP,
3838 so make sure it has the proper mode.
3839 But don't forget to store the value into TARGET. */
3840 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3841 emit_move_insn (target, temp);
3842 }
3843 else
3844 convert_move (target, temp, unsignedp);
3845 }
3846
3847 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 {
3849 /* Handle copying a string constant into an array.
3850 The string constant may be shorter than the array.
3851 So copy just the string's actual length, and clear the rest. */
3852 rtx size;
3853 rtx addr;
3854
3855 /* Get the size of the data type of the string,
3856 which is actually the size of the target. */
3857 size = expr_size (exp);
3858 if (GET_CODE (size) == CONST_INT
3859 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3860 emit_block_move (target, temp, size,
3861 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3862 else
3863 {
3864 /* Compute the size of the data to copy from the string. */
3865 tree copy_size
3866 = size_binop (MIN_EXPR,
3867 make_tree (sizetype, size),
3868 convert (sizetype,
3869 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3870 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3871 VOIDmode, 0);
3872 rtx label = 0;
3873
3874 /* Copy that much. */
3875 emit_block_move (target, temp, copy_size_rtx,
3876 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3877
3878 /* Figure out how much is left in TARGET that we have to clear.
3879 Do all calculations in ptr_mode. */
3880
3881 addr = XEXP (target, 0);
3882 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3883
3884 if (GET_CODE (copy_size_rtx) == CONST_INT)
3885 {
3886 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3887 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3888 }
3889 else
3890 {
3891 addr = force_reg (ptr_mode, addr);
3892 addr = expand_binop (ptr_mode, add_optab, addr,
3893 copy_size_rtx, NULL_RTX, 0,
3894 OPTAB_LIB_WIDEN);
3895
3896 size = expand_binop (ptr_mode, sub_optab, size,
3897 copy_size_rtx, NULL_RTX, 0,
3898 OPTAB_LIB_WIDEN);
3899
3900 label = gen_label_rtx ();
3901 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3902 GET_MODE (size), 0, 0, label);
3903 }
3904
3905 if (size != const0_rtx)
3906 {
3907 /* Be sure we can write on ADDR. */
3908 if (current_function_check_memory_usage)
3909 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3910 addr, Pmode,
3911 size, TYPE_MODE (sizetype),
3912 GEN_INT (MEMORY_USE_WO),
3913 TYPE_MODE (integer_type_node));
3914 #ifdef TARGET_MEM_FUNCTIONS
3915 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3916 addr, ptr_mode,
3917 const0_rtx, TYPE_MODE (integer_type_node),
3918 convert_to_mode (TYPE_MODE (sizetype),
3919 size,
3920 TREE_UNSIGNED (sizetype)),
3921 TYPE_MODE (sizetype));
3922 #else
3923 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3924 addr, ptr_mode,
3925 convert_to_mode (TYPE_MODE (integer_type_node),
3926 size,
3927 TREE_UNSIGNED (integer_type_node)),
3928 TYPE_MODE (integer_type_node));
3929 #endif
3930 }
3931
3932 if (label)
3933 emit_label (label);
3934 }
3935 }
3936 /* Handle calls that return values in multiple non-contiguous locations.
3937 The Irix 6 ABI has examples of this. */
3938 else if (GET_CODE (target) == PARALLEL)
3939 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3940 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3941 else if (GET_MODE (temp) == BLKmode)
3942 emit_block_move (target, temp, expr_size (exp),
3943 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3944 else
3945 emit_move_insn (target, temp);
3946 }
3947
3948 /* If we don't want a value, return NULL_RTX. */
3949 if (! want_value)
3950 return NULL_RTX;
3951
3952 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3953 ??? The latter test doesn't seem to make sense. */
3954 else if (dont_return_target && GET_CODE (temp) != MEM)
3955 return temp;
3956
3957 /* Return TARGET itself if it is a hard register. */
3958 else if (want_value && GET_MODE (target) != BLKmode
3959 && ! (GET_CODE (target) == REG
3960 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3961 return copy_to_reg (target);
3962
3963 else
3964 return target;
3965 }
3966 \f
3967 /* Return 1 if EXP just contains zeros. */
3968
3969 static int
3970 is_zeros_p (exp)
3971 tree exp;
3972 {
3973 tree elt;
3974
3975 switch (TREE_CODE (exp))
3976 {
3977 case CONVERT_EXPR:
3978 case NOP_EXPR:
3979 case NON_LVALUE_EXPR:
3980 return is_zeros_p (TREE_OPERAND (exp, 0));
3981
3982 case INTEGER_CST:
3983 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3984
3985 case COMPLEX_CST:
3986 return
3987 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3988
3989 case REAL_CST:
3990 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3991
3992 case CONSTRUCTOR:
3993 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3994 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3995 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3996 if (! is_zeros_p (TREE_VALUE (elt)))
3997 return 0;
3998
3999 return 1;
4000
4001 default:
4002 return 0;
4003 }
4004 }
4005
4006 /* Return 1 if EXP contains mostly (3/4) zeros. */
4007
4008 static int
4009 mostly_zeros_p (exp)
4010 tree exp;
4011 {
4012 if (TREE_CODE (exp) == CONSTRUCTOR)
4013 {
4014 int elts = 0, zeros = 0;
4015 tree elt = CONSTRUCTOR_ELTS (exp);
4016 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4017 {
4018 /* If there are no ranges of true bits, it is all zero. */
4019 return elt == NULL_TREE;
4020 }
4021 for (; elt; elt = TREE_CHAIN (elt))
4022 {
4023 /* We do not handle the case where the index is a RANGE_EXPR,
4024 so the statistic will be somewhat inaccurate.
4025 We do make a more accurate count in store_constructor itself,
4026 so since this function is only used for nested array elements,
4027 this should be close enough. */
4028 if (mostly_zeros_p (TREE_VALUE (elt)))
4029 zeros++;
4030 elts++;
4031 }
4032
4033 return 4 * zeros >= 3 * elts;
4034 }
4035
4036 return is_zeros_p (exp);
4037 }
4038 \f
4039 /* Helper function for store_constructor.
4040 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4041 TYPE is the type of the CONSTRUCTOR, not the element type.
4042 ALIGN and CLEARED are as for store_constructor.
4043
4044 This provides a recursive shortcut back to store_constructor when it isn't
4045 necessary to go through store_field. This is so that we can pass through
4046 the cleared field to let store_constructor know that we may not have to
4047 clear a substructure if the outer structure has already been cleared. */
4048
4049 static void
4050 store_constructor_field (target, bitsize, bitpos,
4051 mode, exp, type, align, cleared)
4052 rtx target;
4053 int bitsize, bitpos;
4054 enum machine_mode mode;
4055 tree exp, type;
4056 int align;
4057 int cleared;
4058 {
4059 if (TREE_CODE (exp) == CONSTRUCTOR
4060 && bitpos % BITS_PER_UNIT == 0
4061 /* If we have a non-zero bitpos for a register target, then we just
4062 let store_field do the bitfield handling. This is unlikely to
4063 generate unnecessary clear instructions anyways. */
4064 && (bitpos == 0 || GET_CODE (target) == MEM))
4065 {
4066 if (bitpos != 0)
4067 target
4068 = change_address (target,
4069 GET_MODE (target) == BLKmode
4070 || 0 != (bitpos
4071 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4072 ? BLKmode : VOIDmode,
4073 plus_constant (XEXP (target, 0),
4074 bitpos / BITS_PER_UNIT));
4075 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4076 }
4077 else
4078 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4079 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4080 int_size_in_bytes (type), 0);
4081 }
4082
4083 /* Store the value of constructor EXP into the rtx TARGET.
4084 TARGET is either a REG or a MEM.
4085 ALIGN is the maximum known alignment for TARGET, in bits.
4086 CLEARED is true if TARGET is known to have been zero'd.
4087 SIZE is the number of bytes of TARGET we are allowed to modify: this
4088 may not be the same as the size of EXP if we are assigning to a field
4089 which has been packed to exclude padding bits. */
4090
4091 static void
4092 store_constructor (exp, target, align, cleared, size)
4093 tree exp;
4094 rtx target;
4095 int align;
4096 int cleared;
4097 int size;
4098 {
4099 tree type = TREE_TYPE (exp);
4100 #ifdef WORD_REGISTER_OPERATIONS
4101 rtx exp_size = expr_size (exp);
4102 #endif
4103
4104 /* We know our target cannot conflict, since safe_from_p has been called. */
4105 #if 0
4106 /* Don't try copying piece by piece into a hard register
4107 since that is vulnerable to being clobbered by EXP.
4108 Instead, construct in a pseudo register and then copy it all. */
4109 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4110 {
4111 rtx temp = gen_reg_rtx (GET_MODE (target));
4112 store_constructor (exp, temp, align, cleared, size);
4113 emit_move_insn (target, temp);
4114 return;
4115 }
4116 #endif
4117
4118 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4119 || TREE_CODE (type) == QUAL_UNION_TYPE)
4120 {
4121 register tree elt;
4122
4123 /* Inform later passes that the whole union value is dead. */
4124 if ((TREE_CODE (type) == UNION_TYPE
4125 || TREE_CODE (type) == QUAL_UNION_TYPE)
4126 && ! cleared)
4127 {
4128 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4129
4130 /* If the constructor is empty, clear the union. */
4131 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4132 clear_storage (target, expr_size (exp),
4133 TYPE_ALIGN (type) / BITS_PER_UNIT);
4134 }
4135
4136 /* If we are building a static constructor into a register,
4137 set the initial value as zero so we can fold the value into
4138 a constant. But if more than one register is involved,
4139 this probably loses. */
4140 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4142 {
4143 if (! cleared)
4144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4145
4146 cleared = 1;
4147 }
4148
4149 /* If the constructor has fewer fields than the structure
4150 or if we are initializing the structure to mostly zeros,
4151 clear the whole structure first. */
4152 else if (size > 0
4153 && ((list_length (CONSTRUCTOR_ELTS (exp))
4154 != list_length (TYPE_FIELDS (type)))
4155 || mostly_zeros_p (exp)))
4156 {
4157 if (! cleared)
4158 clear_storage (target, GEN_INT (size),
4159 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4160
4161 cleared = 1;
4162 }
4163 else if (! cleared)
4164 /* Inform later passes that the old value is dead. */
4165 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4166
4167 /* Store each element of the constructor into
4168 the corresponding field of TARGET. */
4169
4170 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4171 {
4172 register tree field = TREE_PURPOSE (elt);
4173 #ifdef WORD_REGISTER_OPERATIONS
4174 tree value = TREE_VALUE (elt);
4175 #endif
4176 register enum machine_mode mode;
4177 int bitsize;
4178 int bitpos = 0;
4179 int unsignedp;
4180 tree pos, constant = 0, offset = 0;
4181 rtx to_rtx = target;
4182
4183 /* Just ignore missing fields.
4184 We cleared the whole structure, above,
4185 if any fields are missing. */
4186 if (field == 0)
4187 continue;
4188
4189 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4190 continue;
4191
4192 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4193 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4194 else
4195 bitsize = -1;
4196
4197 unsignedp = TREE_UNSIGNED (field);
4198 mode = DECL_MODE (field);
4199 if (DECL_BIT_FIELD (field))
4200 mode = VOIDmode;
4201
4202 pos = DECL_FIELD_BITPOS (field);
4203 if (TREE_CODE (pos) == INTEGER_CST)
4204 constant = pos;
4205 else if (TREE_CODE (pos) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4207 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4208 else
4209 offset = pos;
4210
4211 if (constant)
4212 bitpos = TREE_INT_CST_LOW (constant);
4213
4214 if (offset)
4215 {
4216 rtx offset_rtx;
4217
4218 if (contains_placeholder_p (offset))
4219 offset = build (WITH_RECORD_EXPR, sizetype,
4220 offset, make_tree (TREE_TYPE (exp), target));
4221
4222 offset = size_binop (EXACT_DIV_EXPR, offset,
4223 size_int (BITS_PER_UNIT));
4224
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4227 abort ();
4228
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4230 {
4231 #ifdef POINTERS_EXTEND_UNSIGNED
4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4233 #else
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4235 #endif
4236 }
4237
4238 to_rtx
4239 = change_address (to_rtx, VOIDmode,
4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4241 force_reg (ptr_mode,
4242 offset_rtx)));
4243 }
4244
4245 if (TREE_READONLY (field))
4246 {
4247 if (GET_CODE (to_rtx) == MEM)
4248 to_rtx = copy_rtx (to_rtx);
4249
4250 RTX_UNCHANGING_P (to_rtx) = 1;
4251 }
4252
4253 #ifdef WORD_REGISTER_OPERATIONS
4254 /* If this initializes a field that is smaller than a word, at the
4255 start of a word, try to widen it to a full word.
4256 This special case allows us to output C++ member function
4257 initializations in a form that the optimizers can understand. */
4258 if (constant
4259 && GET_CODE (target) == REG
4260 && bitsize < BITS_PER_WORD
4261 && bitpos % BITS_PER_WORD == 0
4262 && GET_MODE_CLASS (mode) == MODE_INT
4263 && TREE_CODE (value) == INTEGER_CST
4264 && GET_CODE (exp_size) == CONST_INT
4265 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 {
4267 tree type = TREE_TYPE (value);
4268 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 {
4270 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4271 value = convert (type, value);
4272 }
4273 if (BYTES_BIG_ENDIAN)
4274 value
4275 = fold (build (LSHIFT_EXPR, type, value,
4276 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4277 bitsize = BITS_PER_WORD;
4278 mode = word_mode;
4279 }
4280 #endif
4281 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4282 TREE_VALUE (elt), type,
4283 MIN (align,
4284 DECL_ALIGN (TREE_PURPOSE (elt))),
4285 cleared);
4286 }
4287 }
4288 else if (TREE_CODE (type) == ARRAY_TYPE)
4289 {
4290 register tree elt;
4291 register int i;
4292 int need_to_clear;
4293 tree domain = TYPE_DOMAIN (type);
4294 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4295 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4296 tree elttype = TREE_TYPE (type);
4297
4298 /* If the constructor has fewer elements than the array,
4299 clear the whole array first. Similarly if this is
4300 static constructor of a non-BLKmode object. */
4301 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4302 need_to_clear = 1;
4303 else
4304 {
4305 HOST_WIDE_INT count = 0, zero_count = 0;
4306 need_to_clear = 0;
4307 /* This loop is a more accurate version of the loop in
4308 mostly_zeros_p (it handles RANGE_EXPR in an index).
4309 It is also needed to check for missing elements. */
4310 for (elt = CONSTRUCTOR_ELTS (exp);
4311 elt != NULL_TREE;
4312 elt = TREE_CHAIN (elt))
4313 {
4314 tree index = TREE_PURPOSE (elt);
4315 HOST_WIDE_INT this_node_count;
4316 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 {
4318 tree lo_index = TREE_OPERAND (index, 0);
4319 tree hi_index = TREE_OPERAND (index, 1);
4320 if (TREE_CODE (lo_index) != INTEGER_CST
4321 || TREE_CODE (hi_index) != INTEGER_CST)
4322 {
4323 need_to_clear = 1;
4324 break;
4325 }
4326 this_node_count = TREE_INT_CST_LOW (hi_index)
4327 - TREE_INT_CST_LOW (lo_index) + 1;
4328 }
4329 else
4330 this_node_count = 1;
4331 count += this_node_count;
4332 if (mostly_zeros_p (TREE_VALUE (elt)))
4333 zero_count += this_node_count;
4334 }
4335 /* Clear the entire array first if there are any missing elements,
4336 or if the incidence of zero elements is >= 75%. */
4337 if (count < maxelt - minelt + 1
4338 || 4 * zero_count >= 3 * count)
4339 need_to_clear = 1;
4340 }
4341 if (need_to_clear && size > 0)
4342 {
4343 if (! cleared)
4344 clear_storage (target, GEN_INT (size),
4345 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4346 cleared = 1;
4347 }
4348 else
4349 /* Inform later passes that the old value is dead. */
4350 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4351
4352 /* Store each element of the constructor into
4353 the corresponding element of TARGET, determined
4354 by counting the elements. */
4355 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4356 elt;
4357 elt = TREE_CHAIN (elt), i++)
4358 {
4359 register enum machine_mode mode;
4360 int bitsize;
4361 int bitpos;
4362 int unsignedp;
4363 tree value = TREE_VALUE (elt);
4364 int align = TYPE_ALIGN (TREE_TYPE (value));
4365 tree index = TREE_PURPOSE (elt);
4366 rtx xtarget = target;
4367
4368 if (cleared && is_zeros_p (value))
4369 continue;
4370
4371 unsignedp = TREE_UNSIGNED (elttype);
4372 mode = TYPE_MODE (elttype);
4373 if (mode == BLKmode)
4374 {
4375 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4376 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4377 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4378 else
4379 bitsize = -1;
4380 }
4381 else
4382 bitsize = GET_MODE_BITSIZE (mode);
4383
4384 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4385 {
4386 tree lo_index = TREE_OPERAND (index, 0);
4387 tree hi_index = TREE_OPERAND (index, 1);
4388 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4389 struct nesting *loop;
4390 HOST_WIDE_INT lo, hi, count;
4391 tree position;
4392
4393 /* If the range is constant and "small", unroll the loop. */
4394 if (TREE_CODE (lo_index) == INTEGER_CST
4395 && TREE_CODE (hi_index) == INTEGER_CST
4396 && (lo = TREE_INT_CST_LOW (lo_index),
4397 hi = TREE_INT_CST_LOW (hi_index),
4398 count = hi - lo + 1,
4399 (GET_CODE (target) != MEM
4400 || count <= 2
4401 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4402 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4403 <= 40 * 8))))
4404 {
4405 lo -= minelt; hi -= minelt;
4406 for (; lo <= hi; lo++)
4407 {
4408 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4409 store_constructor_field (target, bitsize, bitpos, mode,
4410 value, type, align, cleared);
4411 }
4412 }
4413 else
4414 {
4415 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4416 loop_top = gen_label_rtx ();
4417 loop_end = gen_label_rtx ();
4418
4419 unsignedp = TREE_UNSIGNED (domain);
4420
4421 index = build_decl (VAR_DECL, NULL_TREE, domain);
4422
4423 DECL_RTL (index) = index_r
4424 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4425 &unsignedp, 0));
4426
4427 if (TREE_CODE (value) == SAVE_EXPR
4428 && SAVE_EXPR_RTL (value) == 0)
4429 {
4430 /* Make sure value gets expanded once before the
4431 loop. */
4432 expand_expr (value, const0_rtx, VOIDmode, 0);
4433 emit_queue ();
4434 }
4435 store_expr (lo_index, index_r, 0);
4436 loop = expand_start_loop (0);
4437
4438 /* Assign value to element index. */
4439 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4440 size_int (BITS_PER_UNIT));
4441 position = size_binop (MULT_EXPR,
4442 size_binop (MINUS_EXPR, index,
4443 TYPE_MIN_VALUE (domain)),
4444 position);
4445 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4446 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4447 xtarget = change_address (target, mode, addr);
4448 if (TREE_CODE (value) == CONSTRUCTOR)
4449 store_constructor (value, xtarget, align, cleared,
4450 bitsize / BITS_PER_UNIT);
4451 else
4452 store_expr (value, xtarget, 0);
4453
4454 expand_exit_loop_if_false (loop,
4455 build (LT_EXPR, integer_type_node,
4456 index, hi_index));
4457
4458 expand_increment (build (PREINCREMENT_EXPR,
4459 TREE_TYPE (index),
4460 index, integer_one_node), 0, 0);
4461 expand_end_loop ();
4462 emit_label (loop_end);
4463
4464 /* Needed by stupid register allocation. to extend the
4465 lifetime of pseudo-regs used by target past the end
4466 of the loop. */
4467 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4468 }
4469 }
4470 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4471 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4472 {
4473 rtx pos_rtx, addr;
4474 tree position;
4475
4476 if (index == 0)
4477 index = size_int (i);
4478
4479 if (minelt)
4480 index = size_binop (MINUS_EXPR, index,
4481 TYPE_MIN_VALUE (domain));
4482 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4483 size_int (BITS_PER_UNIT));
4484 position = size_binop (MULT_EXPR, index, position);
4485 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4486 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4487 xtarget = change_address (target, mode, addr);
4488 store_expr (value, xtarget, 0);
4489 }
4490 else
4491 {
4492 if (index != 0)
4493 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4494 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4495 else
4496 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4497 store_constructor_field (target, bitsize, bitpos, mode, value,
4498 type, align, cleared);
4499 }
4500 }
4501 }
4502 /* set constructor assignments */
4503 else if (TREE_CODE (type) == SET_TYPE)
4504 {
4505 tree elt = CONSTRUCTOR_ELTS (exp);
4506 int nbytes = int_size_in_bytes (type), nbits;
4507 tree domain = TYPE_DOMAIN (type);
4508 tree domain_min, domain_max, bitlength;
4509
4510 /* The default implementation strategy is to extract the constant
4511 parts of the constructor, use that to initialize the target,
4512 and then "or" in whatever non-constant ranges we need in addition.
4513
4514 If a large set is all zero or all ones, it is
4515 probably better to set it using memset (if available) or bzero.
4516 Also, if a large set has just a single range, it may also be
4517 better to first clear all the first clear the set (using
4518 bzero/memset), and set the bits we want. */
4519
4520 /* Check for all zeros. */
4521 if (elt == NULL_TREE && size > 0)
4522 {
4523 if (!cleared)
4524 clear_storage (target, GEN_INT (size),
4525 TYPE_ALIGN (type) / BITS_PER_UNIT);
4526 return;
4527 }
4528
4529 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4530 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4531 bitlength = size_binop (PLUS_EXPR,
4532 size_binop (MINUS_EXPR, domain_max, domain_min),
4533 size_one_node);
4534
4535 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4536 abort ();
4537 nbits = TREE_INT_CST_LOW (bitlength);
4538
4539 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4540 are "complicated" (more than one range), initialize (the
4541 constant parts) by copying from a constant. */
4542 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4543 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4544 {
4545 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4546 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4547 char *bit_buffer = (char *) alloca (nbits);
4548 HOST_WIDE_INT word = 0;
4549 int bit_pos = 0;
4550 int ibit = 0;
4551 int offset = 0; /* In bytes from beginning of set. */
4552 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4553 for (;;)
4554 {
4555 if (bit_buffer[ibit])
4556 {
4557 if (BYTES_BIG_ENDIAN)
4558 word |= (1 << (set_word_size - 1 - bit_pos));
4559 else
4560 word |= 1 << bit_pos;
4561 }
4562 bit_pos++; ibit++;
4563 if (bit_pos >= set_word_size || ibit == nbits)
4564 {
4565 if (word != 0 || ! cleared)
4566 {
4567 rtx datum = GEN_INT (word);
4568 rtx to_rtx;
4569 /* The assumption here is that it is safe to use
4570 XEXP if the set is multi-word, but not if
4571 it's single-word. */
4572 if (GET_CODE (target) == MEM)
4573 {
4574 to_rtx = plus_constant (XEXP (target, 0), offset);
4575 to_rtx = change_address (target, mode, to_rtx);
4576 }
4577 else if (offset == 0)
4578 to_rtx = target;
4579 else
4580 abort ();
4581 emit_move_insn (to_rtx, datum);
4582 }
4583 if (ibit == nbits)
4584 break;
4585 word = 0;
4586 bit_pos = 0;
4587 offset += set_word_size / BITS_PER_UNIT;
4588 }
4589 }
4590 }
4591 else if (!cleared)
4592 {
4593 /* Don't bother clearing storage if the set is all ones. */
4594 if (TREE_CHAIN (elt) != NULL_TREE
4595 || (TREE_PURPOSE (elt) == NULL_TREE
4596 ? nbits != 1
4597 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4598 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4599 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4600 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4601 != nbits))))
4602 clear_storage (target, expr_size (exp),
4603 TYPE_ALIGN (type) / BITS_PER_UNIT);
4604 }
4605
4606 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4607 {
4608 /* start of range of element or NULL */
4609 tree startbit = TREE_PURPOSE (elt);
4610 /* end of range of element, or element value */
4611 tree endbit = TREE_VALUE (elt);
4612 #ifdef TARGET_MEM_FUNCTIONS
4613 HOST_WIDE_INT startb, endb;
4614 #endif
4615 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4616
4617 bitlength_rtx = expand_expr (bitlength,
4618 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4619
4620 /* handle non-range tuple element like [ expr ] */
4621 if (startbit == NULL_TREE)
4622 {
4623 startbit = save_expr (endbit);
4624 endbit = startbit;
4625 }
4626 startbit = convert (sizetype, startbit);
4627 endbit = convert (sizetype, endbit);
4628 if (! integer_zerop (domain_min))
4629 {
4630 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4631 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4632 }
4633 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4636 EXPAND_CONST_ADDRESS);
4637
4638 if (REG_P (target))
4639 {
4640 targetx = assign_stack_temp (GET_MODE (target),
4641 GET_MODE_SIZE (GET_MODE (target)),
4642 0);
4643 emit_move_insn (targetx, target);
4644 }
4645 else if (GET_CODE (target) == MEM)
4646 targetx = target;
4647 else
4648 abort ();
4649
4650 #ifdef TARGET_MEM_FUNCTIONS
4651 /* Optimization: If startbit and endbit are
4652 constants divisible by BITS_PER_UNIT,
4653 call memset instead. */
4654 if (TREE_CODE (startbit) == INTEGER_CST
4655 && TREE_CODE (endbit) == INTEGER_CST
4656 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4657 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4658 {
4659 emit_library_call (memset_libfunc, 0,
4660 VOIDmode, 3,
4661 plus_constant (XEXP (targetx, 0),
4662 startb / BITS_PER_UNIT),
4663 Pmode,
4664 constm1_rtx, TYPE_MODE (integer_type_node),
4665 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4666 TYPE_MODE (sizetype));
4667 }
4668 else
4669 #endif
4670 {
4671 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4672 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4673 bitlength_rtx, TYPE_MODE (sizetype),
4674 startbit_rtx, TYPE_MODE (sizetype),
4675 endbit_rtx, TYPE_MODE (sizetype));
4676 }
4677 if (REG_P (target))
4678 emit_move_insn (target, targetx);
4679 }
4680 }
4681
4682 else
4683 abort ();
4684 }
4685
4686 /* Store the value of EXP (an expression tree)
4687 into a subfield of TARGET which has mode MODE and occupies
4688 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4689 If MODE is VOIDmode, it means that we are storing into a bit-field.
4690
4691 If VALUE_MODE is VOIDmode, return nothing in particular.
4692 UNSIGNEDP is not used in this case.
4693
4694 Otherwise, return an rtx for the value stored. This rtx
4695 has mode VALUE_MODE if that is convenient to do.
4696 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4697
4698 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4699 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4700
4701 ALIAS_SET is the alias set for the destination. This value will
4702 (in general) be different from that for TARGET, since TARGET is a
4703 reference to the containing structure. */
4704
4705 static rtx
4706 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4707 unsignedp, align, total_size, alias_set)
4708 rtx target;
4709 int bitsize, bitpos;
4710 enum machine_mode mode;
4711 tree exp;
4712 enum machine_mode value_mode;
4713 int unsignedp;
4714 int align;
4715 int total_size;
4716 int alias_set;
4717 {
4718 HOST_WIDE_INT width_mask = 0;
4719
4720 if (TREE_CODE (exp) == ERROR_MARK)
4721 return const0_rtx;
4722
4723 if (bitsize < HOST_BITS_PER_WIDE_INT)
4724 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4725
4726 /* If we are storing into an unaligned field of an aligned union that is
4727 in a register, we may have the mode of TARGET being an integer mode but
4728 MODE == BLKmode. In that case, get an aligned object whose size and
4729 alignment are the same as TARGET and store TARGET into it (we can avoid
4730 the store if the field being stored is the entire width of TARGET). Then
4731 call ourselves recursively to store the field into a BLKmode version of
4732 that object. Finally, load from the object into TARGET. This is not
4733 very efficient in general, but should only be slightly more expensive
4734 than the otherwise-required unaligned accesses. Perhaps this can be
4735 cleaned up later. */
4736
4737 if (mode == BLKmode
4738 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4739 {
4740 rtx object = assign_stack_temp (GET_MODE (target),
4741 GET_MODE_SIZE (GET_MODE (target)), 0);
4742 rtx blk_object = copy_rtx (object);
4743
4744 MEM_SET_IN_STRUCT_P (object, 1);
4745 MEM_SET_IN_STRUCT_P (blk_object, 1);
4746 PUT_MODE (blk_object, BLKmode);
4747
4748 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4749 emit_move_insn (object, target);
4750
4751 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4752 align, total_size, alias_set);
4753
4754 /* Even though we aren't returning target, we need to
4755 give it the updated value. */
4756 emit_move_insn (target, object);
4757
4758 return blk_object;
4759 }
4760
4761 /* If the structure is in a register or if the component
4762 is a bit field, we cannot use addressing to access it.
4763 Use bit-field techniques or SUBREG to store in it. */
4764
4765 if (mode == VOIDmode
4766 || (mode != BLKmode && ! direct_store[(int) mode]
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4768 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4769 || GET_CODE (target) == REG
4770 || GET_CODE (target) == SUBREG
4771 /* If the field isn't aligned enough to store as an ordinary memref,
4772 store it as a bit field. */
4773 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4774 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4775 || bitpos % GET_MODE_ALIGNMENT (mode)))
4776 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4777 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4778 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4779 /* If the RHS and field are a constant size and the size of the
4780 RHS isn't the same size as the bitfield, we must use bitfield
4781 operations. */
4782 || ((bitsize >= 0
4783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4784 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4785 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4786 {
4787 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4788
4789 /* If BITSIZE is narrower than the size of the type of EXP
4790 we will be narrowing TEMP. Normally, what's wanted are the
4791 low-order bits. However, if EXP's type is a record and this is
4792 big-endian machine, we want the upper BITSIZE bits. */
4793 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4794 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4795 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4796 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4797 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4798 - bitsize),
4799 temp, 1);
4800
4801 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4802 MODE. */
4803 if (mode != VOIDmode && mode != BLKmode
4804 && mode != TYPE_MODE (TREE_TYPE (exp)))
4805 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4806
4807 /* If the modes of TARGET and TEMP are both BLKmode, both
4808 must be in memory and BITPOS must be aligned on a byte
4809 boundary. If so, we simply do a block copy. */
4810 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4811 {
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
4818 bitpos / BITS_PER_UNIT));
4819
4820 /* Make sure that ALIGN is no stricter than the alignment of
4821 EXP. */
4822 if (TREE_CODE (exp) == VAR_DECL)
4823 align = MIN (DECL_ALIGN (exp) / BITS_PER_UNIT, align);
4824 else
4825 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT, align);
4826
4827 /* Find an alignment that is consistent with the bit position. */
4828 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4829 align >>= 1;
4830
4831 emit_block_move (target, temp,
4832 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4833 / BITS_PER_UNIT),
4834 align);
4835
4836 return value_mode == VOIDmode ? const0_rtx : target;
4837 }
4838
4839 /* Store the value in the bitfield. */
4840 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4841 if (value_mode != VOIDmode)
4842 {
4843 /* The caller wants an rtx for the value. */
4844 /* If possible, avoid refetching from the bitfield itself. */
4845 if (width_mask != 0
4846 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4847 {
4848 tree count;
4849 enum machine_mode tmode;
4850
4851 if (unsignedp)
4852 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4853 tmode = GET_MODE (temp);
4854 if (tmode == VOIDmode)
4855 tmode = value_mode;
4856 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4857 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4858 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4859 }
4860 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4861 NULL_RTX, value_mode, 0, align,
4862 total_size);
4863 }
4864 return const0_rtx;
4865 }
4866 else
4867 {
4868 rtx addr = XEXP (target, 0);
4869 rtx to_rtx;
4870
4871 /* If a value is wanted, it must be the lhs;
4872 so make the address stable for multiple use. */
4873
4874 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4875 && ! CONSTANT_ADDRESS_P (addr)
4876 /* A frame-pointer reference is already stable. */
4877 && ! (GET_CODE (addr) == PLUS
4878 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4879 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4880 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4881 addr = copy_to_reg (addr);
4882
4883 /* Now build a reference to just the desired component. */
4884
4885 to_rtx = copy_rtx (change_address (target, mode,
4886 plus_constant (addr,
4887 (bitpos
4888 / BITS_PER_UNIT))));
4889 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4890 MEM_ALIAS_SET (to_rtx) = alias_set;
4891
4892 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4893 }
4894 }
4895 \f
4896 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4897 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4898 ARRAY_REFs and find the ultimate containing object, which we return.
4899
4900 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4901 bit position, and *PUNSIGNEDP to the signedness of the field.
4902 If the position of the field is variable, we store a tree
4903 giving the variable offset (in units) in *POFFSET.
4904 This offset is in addition to the bit position.
4905 If the position is not variable, we store 0 in *POFFSET.
4906 We set *PALIGNMENT to the alignment in bytes of the address that will be
4907 computed. This is the alignment of the thing we return if *POFFSET
4908 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4909
4910 If any of the extraction expressions is volatile,
4911 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4912
4913 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4914 is a mode that can be used to access the field. In that case, *PBITSIZE
4915 is redundant.
4916
4917 If the field describes a variable-sized object, *PMODE is set to
4918 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4919 this case, but the address of the object can be found. */
4920
4921 tree
4922 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4923 punsignedp, pvolatilep, palignment)
4924 tree exp;
4925 int *pbitsize;
4926 int *pbitpos;
4927 tree *poffset;
4928 enum machine_mode *pmode;
4929 int *punsignedp;
4930 int *pvolatilep;
4931 int *palignment;
4932 {
4933 tree orig_exp = exp;
4934 tree size_tree = 0;
4935 enum machine_mode mode = VOIDmode;
4936 tree offset = integer_zero_node;
4937 unsigned int alignment = BIGGEST_ALIGNMENT;
4938
4939 if (TREE_CODE (exp) == COMPONENT_REF)
4940 {
4941 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4942 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4943 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4944 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4945 }
4946 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4947 {
4948 size_tree = TREE_OPERAND (exp, 1);
4949 *punsignedp = TREE_UNSIGNED (exp);
4950 }
4951 else
4952 {
4953 mode = TYPE_MODE (TREE_TYPE (exp));
4954 if (mode == BLKmode)
4955 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4956
4957 *pbitsize = GET_MODE_BITSIZE (mode);
4958 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4959 }
4960
4961 if (size_tree)
4962 {
4963 if (TREE_CODE (size_tree) != INTEGER_CST)
4964 mode = BLKmode, *pbitsize = -1;
4965 else
4966 *pbitsize = TREE_INT_CST_LOW (size_tree);
4967 }
4968
4969 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4970 and find the ultimate containing object. */
4971
4972 *pbitpos = 0;
4973
4974 while (1)
4975 {
4976 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4977 {
4978 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4979 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4980 : TREE_OPERAND (exp, 2));
4981 tree constant = integer_zero_node, var = pos;
4982
4983 /* If this field hasn't been filled in yet, don't go
4984 past it. This should only happen when folding expressions
4985 made during type construction. */
4986 if (pos == 0)
4987 break;
4988
4989 /* Assume here that the offset is a multiple of a unit.
4990 If not, there should be an explicitly added constant. */
4991 if (TREE_CODE (pos) == PLUS_EXPR
4992 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4993 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4994 else if (TREE_CODE (pos) == INTEGER_CST)
4995 constant = pos, var = integer_zero_node;
4996
4997 *pbitpos += TREE_INT_CST_LOW (constant);
4998 offset = size_binop (PLUS_EXPR, offset,
4999 size_binop (EXACT_DIV_EXPR, var,
5000 size_int (BITS_PER_UNIT)));
5001 }
5002
5003 else if (TREE_CODE (exp) == ARRAY_REF)
5004 {
5005 /* This code is based on the code in case ARRAY_REF in expand_expr
5006 below. We assume here that the size of an array element is
5007 always an integral multiple of BITS_PER_UNIT. */
5008
5009 tree index = TREE_OPERAND (exp, 1);
5010 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5011 tree low_bound
5012 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5013 tree index_type = TREE_TYPE (index);
5014 tree xindex;
5015
5016 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5017 {
5018 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5019 index);
5020 index_type = TREE_TYPE (index);
5021 }
5022
5023 /* Optimize the special-case of a zero lower bound.
5024
5025 We convert the low_bound to sizetype to avoid some problems
5026 with constant folding. (E.g. suppose the lower bound is 1,
5027 and its mode is QI. Without the conversion, (ARRAY
5028 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5029 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5030
5031 But sizetype isn't quite right either (especially if
5032 the lowbound is negative). FIXME */
5033
5034 if (! integer_zerop (low_bound))
5035 index = fold (build (MINUS_EXPR, index_type, index,
5036 convert (sizetype, low_bound)));
5037
5038 if (TREE_CODE (index) == INTEGER_CST)
5039 {
5040 index = convert (sbitsizetype, index);
5041 index_type = TREE_TYPE (index);
5042 }
5043
5044 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5045 convert (sbitsizetype,
5046 TYPE_SIZE (TREE_TYPE (exp)))));
5047
5048 if (TREE_CODE (xindex) == INTEGER_CST
5049 && TREE_INT_CST_HIGH (xindex) == 0)
5050 *pbitpos += TREE_INT_CST_LOW (xindex);
5051 else
5052 {
5053 /* Either the bit offset calculated above is not constant, or
5054 it overflowed. In either case, redo the multiplication
5055 against the size in units. This is especially important
5056 in the non-constant case to avoid a division at runtime. */
5057 xindex = fold (build (MULT_EXPR, ssizetype, index,
5058 convert (ssizetype,
5059 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5060
5061 if (contains_placeholder_p (xindex))
5062 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5063
5064 offset = size_binop (PLUS_EXPR, offset, xindex);
5065 }
5066 }
5067 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5068 && ! ((TREE_CODE (exp) == NOP_EXPR
5069 || TREE_CODE (exp) == CONVERT_EXPR)
5070 && (TYPE_MODE (TREE_TYPE (exp))
5071 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5072 break;
5073
5074 /* If any reference in the chain is volatile, the effect is volatile. */
5075 if (TREE_THIS_VOLATILE (exp))
5076 *pvolatilep = 1;
5077
5078 /* If the offset is non-constant already, then we can't assume any
5079 alignment more than the alignment here. */
5080 if (! integer_zerop (offset))
5081 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5082
5083 exp = TREE_OPERAND (exp, 0);
5084 }
5085
5086 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5087 alignment = MIN (alignment, DECL_ALIGN (exp));
5088 else if (TREE_TYPE (exp) != 0)
5089 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5090
5091 if (integer_zerop (offset))
5092 offset = 0;
5093
5094 if (offset != 0 && contains_placeholder_p (offset))
5095 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5096
5097 *pmode = mode;
5098 *poffset = offset;
5099 *palignment = alignment / BITS_PER_UNIT;
5100 return exp;
5101 }
5102
5103 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5104 static enum memory_use_mode
5105 get_memory_usage_from_modifier (modifier)
5106 enum expand_modifier modifier;
5107 {
5108 switch (modifier)
5109 {
5110 case EXPAND_NORMAL:
5111 case EXPAND_SUM:
5112 return MEMORY_USE_RO;
5113 break;
5114 case EXPAND_MEMORY_USE_WO:
5115 return MEMORY_USE_WO;
5116 break;
5117 case EXPAND_MEMORY_USE_RW:
5118 return MEMORY_USE_RW;
5119 break;
5120 case EXPAND_MEMORY_USE_DONT:
5121 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5122 MEMORY_USE_DONT, because they are modifiers to a call of
5123 expand_expr in the ADDR_EXPR case of expand_expr. */
5124 case EXPAND_CONST_ADDRESS:
5125 case EXPAND_INITIALIZER:
5126 return MEMORY_USE_DONT;
5127 case EXPAND_MEMORY_USE_BAD:
5128 default:
5129 abort ();
5130 }
5131 }
5132 \f
5133 /* Given an rtx VALUE that may contain additions and multiplications,
5134 return an equivalent value that just refers to a register or memory.
5135 This is done by generating instructions to perform the arithmetic
5136 and returning a pseudo-register containing the value.
5137
5138 The returned value may be a REG, SUBREG, MEM or constant. */
5139
5140 rtx
5141 force_operand (value, target)
5142 rtx value, target;
5143 {
5144 register optab binoptab = 0;
5145 /* Use a temporary to force order of execution of calls to
5146 `force_operand'. */
5147 rtx tmp;
5148 register rtx op2;
5149 /* Use subtarget as the target for operand 0 of a binary operation. */
5150 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5151
5152 /* Check for a PIC address load. */
5153 if (flag_pic
5154 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5155 && XEXP (value, 0) == pic_offset_table_rtx
5156 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5157 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5158 || GET_CODE (XEXP (value, 1)) == CONST))
5159 {
5160 if (!subtarget)
5161 subtarget = gen_reg_rtx (GET_MODE (value));
5162 emit_move_insn (subtarget, value);
5163 return subtarget;
5164 }
5165
5166 if (GET_CODE (value) == PLUS)
5167 binoptab = add_optab;
5168 else if (GET_CODE (value) == MINUS)
5169 binoptab = sub_optab;
5170 else if (GET_CODE (value) == MULT)
5171 {
5172 op2 = XEXP (value, 1);
5173 if (!CONSTANT_P (op2)
5174 && !(GET_CODE (op2) == REG && op2 != subtarget))
5175 subtarget = 0;
5176 tmp = force_operand (XEXP (value, 0), subtarget);
5177 return expand_mult (GET_MODE (value), tmp,
5178 force_operand (op2, NULL_RTX),
5179 target, 0);
5180 }
5181
5182 if (binoptab)
5183 {
5184 op2 = XEXP (value, 1);
5185 if (!CONSTANT_P (op2)
5186 && !(GET_CODE (op2) == REG && op2 != subtarget))
5187 subtarget = 0;
5188 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5189 {
5190 binoptab = add_optab;
5191 op2 = negate_rtx (GET_MODE (value), op2);
5192 }
5193
5194 /* Check for an addition with OP2 a constant integer and our first
5195 operand a PLUS of a virtual register and something else. In that
5196 case, we want to emit the sum of the virtual register and the
5197 constant first and then add the other value. This allows virtual
5198 register instantiation to simply modify the constant rather than
5199 creating another one around this addition. */
5200 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5201 && GET_CODE (XEXP (value, 0)) == PLUS
5202 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5203 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5204 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5205 {
5206 rtx temp = expand_binop (GET_MODE (value), binoptab,
5207 XEXP (XEXP (value, 0), 0), op2,
5208 subtarget, 0, OPTAB_LIB_WIDEN);
5209 return expand_binop (GET_MODE (value), binoptab, temp,
5210 force_operand (XEXP (XEXP (value, 0), 1), 0),
5211 target, 0, OPTAB_LIB_WIDEN);
5212 }
5213
5214 tmp = force_operand (XEXP (value, 0), subtarget);
5215 return expand_binop (GET_MODE (value), binoptab, tmp,
5216 force_operand (op2, NULL_RTX),
5217 target, 0, OPTAB_LIB_WIDEN);
5218 /* We give UNSIGNEDP = 0 to expand_binop
5219 because the only operations we are expanding here are signed ones. */
5220 }
5221 return value;
5222 }
5223 \f
5224 /* Subroutine of expand_expr:
5225 save the non-copied parts (LIST) of an expr (LHS), and return a list
5226 which can restore these values to their previous values,
5227 should something modify their storage. */
5228
5229 static tree
5230 save_noncopied_parts (lhs, list)
5231 tree lhs;
5232 tree list;
5233 {
5234 tree tail;
5235 tree parts = 0;
5236
5237 for (tail = list; tail; tail = TREE_CHAIN (tail))
5238 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5239 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5240 else
5241 {
5242 tree part = TREE_VALUE (tail);
5243 tree part_type = TREE_TYPE (part);
5244 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5245 rtx target = assign_temp (part_type, 0, 1, 1);
5246 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5247 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5248 parts = tree_cons (to_be_saved,
5249 build (RTL_EXPR, part_type, NULL_TREE,
5250 (tree) target),
5251 parts);
5252 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5253 }
5254 return parts;
5255 }
5256
5257 /* Subroutine of expand_expr:
5258 record the non-copied parts (LIST) of an expr (LHS), and return a list
5259 which specifies the initial values of these parts. */
5260
5261 static tree
5262 init_noncopied_parts (lhs, list)
5263 tree lhs;
5264 tree list;
5265 {
5266 tree tail;
5267 tree parts = 0;
5268
5269 for (tail = list; tail; tail = TREE_CHAIN (tail))
5270 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5271 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5272 else if (TREE_PURPOSE (tail))
5273 {
5274 tree part = TREE_VALUE (tail);
5275 tree part_type = TREE_TYPE (part);
5276 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5277 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5278 }
5279 return parts;
5280 }
5281
5282 /* Subroutine of expand_expr: return nonzero iff there is no way that
5283 EXP can reference X, which is being modified. TOP_P is nonzero if this
5284 call is going to be used to determine whether we need a temporary
5285 for EXP, as opposed to a recursive call to this function.
5286
5287 It is always safe for this routine to return zero since it merely
5288 searches for optimization opportunities. */
5289
5290 static int
5291 safe_from_p (x, exp, top_p)
5292 rtx x;
5293 tree exp;
5294 int top_p;
5295 {
5296 rtx exp_rtl = 0;
5297 int i, nops;
5298 static int save_expr_count;
5299 static int save_expr_size = 0;
5300 static tree *save_expr_rewritten;
5301 static tree save_expr_trees[256];
5302
5303 if (x == 0
5304 /* If EXP has varying size, we MUST use a target since we currently
5305 have no way of allocating temporaries of variable size
5306 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5307 So we assume here that something at a higher level has prevented a
5308 clash. This is somewhat bogus, but the best we can do. Only
5309 do this when X is BLKmode and when we are at the top level. */
5310 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5312 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5313 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5314 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5315 != INTEGER_CST)
5316 && GET_MODE (x) == BLKmode))
5317 return 1;
5318
5319 if (top_p && save_expr_size == 0)
5320 {
5321 int rtn;
5322
5323 save_expr_count = 0;
5324 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5325 save_expr_rewritten = &save_expr_trees[0];
5326
5327 rtn = safe_from_p (x, exp, 1);
5328
5329 for (i = 0; i < save_expr_count; ++i)
5330 {
5331 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5332 abort ();
5333 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5334 }
5335
5336 save_expr_size = 0;
5337
5338 return rtn;
5339 }
5340
5341 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5342 find the underlying pseudo. */
5343 if (GET_CODE (x) == SUBREG)
5344 {
5345 x = SUBREG_REG (x);
5346 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5347 return 0;
5348 }
5349
5350 /* If X is a location in the outgoing argument area, it is always safe. */
5351 if (GET_CODE (x) == MEM
5352 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5353 || (GET_CODE (XEXP (x, 0)) == PLUS
5354 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5355 return 1;
5356
5357 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5358 {
5359 case 'd':
5360 exp_rtl = DECL_RTL (exp);
5361 break;
5362
5363 case 'c':
5364 return 1;
5365
5366 case 'x':
5367 if (TREE_CODE (exp) == TREE_LIST)
5368 return ((TREE_VALUE (exp) == 0
5369 || safe_from_p (x, TREE_VALUE (exp), 0))
5370 && (TREE_CHAIN (exp) == 0
5371 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5372 else if (TREE_CODE (exp) == ERROR_MARK)
5373 return 1; /* An already-visited SAVE_EXPR? */
5374 else
5375 return 0;
5376
5377 case '1':
5378 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5379
5380 case '2':
5381 case '<':
5382 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5383 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5384
5385 case 'e':
5386 case 'r':
5387 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5388 the expression. If it is set, we conflict iff we are that rtx or
5389 both are in memory. Otherwise, we check all operands of the
5390 expression recursively. */
5391
5392 switch (TREE_CODE (exp))
5393 {
5394 case ADDR_EXPR:
5395 return (staticp (TREE_OPERAND (exp, 0))
5396 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5397 || TREE_STATIC (exp));
5398
5399 case INDIRECT_REF:
5400 if (GET_CODE (x) == MEM)
5401 return 0;
5402 break;
5403
5404 case CALL_EXPR:
5405 exp_rtl = CALL_EXPR_RTL (exp);
5406 if (exp_rtl == 0)
5407 {
5408 /* Assume that the call will clobber all hard registers and
5409 all of memory. */
5410 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5411 || GET_CODE (x) == MEM)
5412 return 0;
5413 }
5414
5415 break;
5416
5417 case RTL_EXPR:
5418 /* If a sequence exists, we would have to scan every instruction
5419 in the sequence to see if it was safe. This is probably not
5420 worthwhile. */
5421 if (RTL_EXPR_SEQUENCE (exp))
5422 return 0;
5423
5424 exp_rtl = RTL_EXPR_RTL (exp);
5425 break;
5426
5427 case WITH_CLEANUP_EXPR:
5428 exp_rtl = RTL_EXPR_RTL (exp);
5429 break;
5430
5431 case CLEANUP_POINT_EXPR:
5432 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5433
5434 case SAVE_EXPR:
5435 exp_rtl = SAVE_EXPR_RTL (exp);
5436 if (exp_rtl)
5437 break;
5438
5439 /* This SAVE_EXPR might appear many times in the top-level
5440 safe_from_p() expression, and if it has a complex
5441 subexpression, examining it multiple times could result
5442 in a combinatorial explosion. E.g. on an Alpha
5443 running at least 200MHz, a Fortran test case compiled with
5444 optimization took about 28 minutes to compile -- even though
5445 it was only a few lines long, and the complicated line causing
5446 so much time to be spent in the earlier version of safe_from_p()
5447 had only 293 or so unique nodes.
5448
5449 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5450 where it is so we can turn it back in the top-level safe_from_p()
5451 when we're done. */
5452
5453 /* For now, don't bother re-sizing the array. */
5454 if (save_expr_count >= save_expr_size)
5455 return 0;
5456 save_expr_rewritten[save_expr_count++] = exp;
5457
5458 nops = tree_code_length[(int) SAVE_EXPR];
5459 for (i = 0; i < nops; i++)
5460 {
5461 tree operand = TREE_OPERAND (exp, i);
5462 if (operand == NULL_TREE)
5463 continue;
5464 TREE_SET_CODE (exp, ERROR_MARK);
5465 if (!safe_from_p (x, operand, 0))
5466 return 0;
5467 TREE_SET_CODE (exp, SAVE_EXPR);
5468 }
5469 TREE_SET_CODE (exp, ERROR_MARK);
5470 return 1;
5471
5472 case BIND_EXPR:
5473 /* The only operand we look at is operand 1. The rest aren't
5474 part of the expression. */
5475 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5476
5477 case METHOD_CALL_EXPR:
5478 /* This takes a rtx argument, but shouldn't appear here. */
5479 abort ();
5480
5481 default:
5482 break;
5483 }
5484
5485 /* If we have an rtx, we do not need to scan our operands. */
5486 if (exp_rtl)
5487 break;
5488
5489 nops = tree_code_length[(int) TREE_CODE (exp)];
5490 for (i = 0; i < nops; i++)
5491 if (TREE_OPERAND (exp, i) != 0
5492 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5493 return 0;
5494 }
5495
5496 /* If we have an rtl, find any enclosed object. Then see if we conflict
5497 with it. */
5498 if (exp_rtl)
5499 {
5500 if (GET_CODE (exp_rtl) == SUBREG)
5501 {
5502 exp_rtl = SUBREG_REG (exp_rtl);
5503 if (GET_CODE (exp_rtl) == REG
5504 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5505 return 0;
5506 }
5507
5508 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5509 are memory and EXP is not readonly. */
5510 return ! (rtx_equal_p (x, exp_rtl)
5511 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5512 && ! TREE_READONLY (exp)));
5513 }
5514
5515 /* If we reach here, it is safe. */
5516 return 1;
5517 }
5518
5519 /* Subroutine of expand_expr: return nonzero iff EXP is an
5520 expression whose type is statically determinable. */
5521
5522 static int
5523 fixed_type_p (exp)
5524 tree exp;
5525 {
5526 if (TREE_CODE (exp) == PARM_DECL
5527 || TREE_CODE (exp) == VAR_DECL
5528 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5529 || TREE_CODE (exp) == COMPONENT_REF
5530 || TREE_CODE (exp) == ARRAY_REF)
5531 return 1;
5532 return 0;
5533 }
5534
5535 /* Subroutine of expand_expr: return rtx if EXP is a
5536 variable or parameter; else return 0. */
5537
5538 static rtx
5539 var_rtx (exp)
5540 tree exp;
5541 {
5542 STRIP_NOPS (exp);
5543 switch (TREE_CODE (exp))
5544 {
5545 case PARM_DECL:
5546 case VAR_DECL:
5547 return DECL_RTL (exp);
5548 default:
5549 return 0;
5550 }
5551 }
5552
5553 #ifdef MAX_INTEGER_COMPUTATION_MODE
5554 void
5555 check_max_integer_computation_mode (exp)
5556 tree exp;
5557 {
5558 enum tree_code code;
5559 enum machine_mode mode;
5560
5561 /* Strip any NOPs that don't change the mode. */
5562 STRIP_NOPS (exp);
5563 code = TREE_CODE (exp);
5564
5565 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5566 if (code == NOP_EXPR
5567 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5568 return;
5569
5570 /* First check the type of the overall operation. We need only look at
5571 unary, binary and relational operations. */
5572 if (TREE_CODE_CLASS (code) == '1'
5573 || TREE_CODE_CLASS (code) == '2'
5574 || TREE_CODE_CLASS (code) == '<')
5575 {
5576 mode = TYPE_MODE (TREE_TYPE (exp));
5577 if (GET_MODE_CLASS (mode) == MODE_INT
5578 && mode > MAX_INTEGER_COMPUTATION_MODE)
5579 fatal ("unsupported wide integer operation");
5580 }
5581
5582 /* Check operand of a unary op. */
5583 if (TREE_CODE_CLASS (code) == '1')
5584 {
5585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5586 if (GET_MODE_CLASS (mode) == MODE_INT
5587 && mode > MAX_INTEGER_COMPUTATION_MODE)
5588 fatal ("unsupported wide integer operation");
5589 }
5590
5591 /* Check operands of a binary/comparison op. */
5592 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5593 {
5594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5595 if (GET_MODE_CLASS (mode) == MODE_INT
5596 && mode > MAX_INTEGER_COMPUTATION_MODE)
5597 fatal ("unsupported wide integer operation");
5598
5599 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5600 if (GET_MODE_CLASS (mode) == MODE_INT
5601 && mode > MAX_INTEGER_COMPUTATION_MODE)
5602 fatal ("unsupported wide integer operation");
5603 }
5604 }
5605 #endif
5606
5607 \f
5608 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5609 has any readonly fields. If any of the fields have types that
5610 contain readonly fields, return true as well. */
5611
5612 static int
5613 readonly_fields_p (type)
5614 tree type;
5615 {
5616 tree field;
5617
5618 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5619 if (TREE_CODE (field) == FIELD_DECL
5620 && (TREE_READONLY (field)
5621 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5622 && readonly_fields_p (TREE_TYPE (field)))))
5623 return 1;
5624
5625 return 0;
5626 }
5627 \f
5628 /* expand_expr: generate code for computing expression EXP.
5629 An rtx for the computed value is returned. The value is never null.
5630 In the case of a void EXP, const0_rtx is returned.
5631
5632 The value may be stored in TARGET if TARGET is nonzero.
5633 TARGET is just a suggestion; callers must assume that
5634 the rtx returned may not be the same as TARGET.
5635
5636 If TARGET is CONST0_RTX, it means that the value will be ignored.
5637
5638 If TMODE is not VOIDmode, it suggests generating the
5639 result in mode TMODE. But this is done only when convenient.
5640 Otherwise, TMODE is ignored and the value generated in its natural mode.
5641 TMODE is just a suggestion; callers must assume that
5642 the rtx returned may not have mode TMODE.
5643
5644 Note that TARGET may have neither TMODE nor MODE. In that case, it
5645 probably will not be used.
5646
5647 If MODIFIER is EXPAND_SUM then when EXP is an addition
5648 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5649 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5650 products as above, or REG or MEM, or constant.
5651 Ordinarily in such cases we would output mul or add instructions
5652 and then return a pseudo reg containing the sum.
5653
5654 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5655 it also marks a label as absolutely required (it can't be dead).
5656 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5657 This is used for outputting expressions used in initializers.
5658
5659 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5660 with a constant address even if that address is not normally legitimate.
5661 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5662
5663 rtx
5664 expand_expr (exp, target, tmode, modifier)
5665 register tree exp;
5666 rtx target;
5667 enum machine_mode tmode;
5668 enum expand_modifier modifier;
5669 {
5670 register rtx op0, op1, temp;
5671 tree type = TREE_TYPE (exp);
5672 int unsignedp = TREE_UNSIGNED (type);
5673 register enum machine_mode mode;
5674 register enum tree_code code = TREE_CODE (exp);
5675 optab this_optab;
5676 rtx subtarget, original_target;
5677 int ignore;
5678 tree context;
5679 /* Used by check-memory-usage to make modifier read only. */
5680 enum expand_modifier ro_modifier;
5681
5682 /* Handle ERROR_MARK before anybody tries to access its type. */
5683 if (TREE_CODE (exp) == ERROR_MARK)
5684 {
5685 op0 = CONST0_RTX (tmode);
5686 if (op0 != 0)
5687 return op0;
5688 return const0_rtx;
5689 }
5690
5691 mode = TYPE_MODE (type);
5692 /* Use subtarget as the target for operand 0 of a binary operation. */
5693 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5694 original_target = target;
5695 ignore = (target == const0_rtx
5696 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5697 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5698 || code == COND_EXPR)
5699 && TREE_CODE (type) == VOID_TYPE));
5700
5701 /* Make a read-only version of the modifier. */
5702 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5703 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5704 ro_modifier = modifier;
5705 else
5706 ro_modifier = EXPAND_NORMAL;
5707
5708 /* Don't use hard regs as subtargets, because the combiner
5709 can only handle pseudo regs. */
5710 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5711 subtarget = 0;
5712 /* Avoid subtargets inside loops,
5713 since they hide some invariant expressions. */
5714 if (preserve_subexpressions_p ())
5715 subtarget = 0;
5716
5717 /* If we are going to ignore this result, we need only do something
5718 if there is a side-effect somewhere in the expression. If there
5719 is, short-circuit the most common cases here. Note that we must
5720 not call expand_expr with anything but const0_rtx in case this
5721 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5722
5723 if (ignore)
5724 {
5725 if (! TREE_SIDE_EFFECTS (exp))
5726 return const0_rtx;
5727
5728 /* Ensure we reference a volatile object even if value is ignored, but
5729 don't do this if all we are doing is taking its address. */
5730 if (TREE_THIS_VOLATILE (exp)
5731 && TREE_CODE (exp) != FUNCTION_DECL
5732 && mode != VOIDmode && mode != BLKmode
5733 && modifier != EXPAND_CONST_ADDRESS)
5734 {
5735 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5736 if (GET_CODE (temp) == MEM)
5737 temp = copy_to_reg (temp);
5738 return const0_rtx;
5739 }
5740
5741 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5742 || code == INDIRECT_REF || code == BUFFER_REF)
5743 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5744 VOIDmode, ro_modifier);
5745 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5746 || code == ARRAY_REF)
5747 {
5748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5749 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5750 return const0_rtx;
5751 }
5752 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5753 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5754 /* If the second operand has no side effects, just evaluate
5755 the first. */
5756 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5757 VOIDmode, ro_modifier);
5758 else if (code == BIT_FIELD_REF)
5759 {
5760 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5761 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5762 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5763 return const0_rtx;
5764 }
5765 ;
5766 target = 0;
5767 }
5768
5769 #ifdef MAX_INTEGER_COMPUTATION_MODE
5770 /* Only check stuff here if the mode we want is different from the mode
5771 of the expression; if it's the same, check_max_integer_computiation_mode
5772 will handle it. Do we really need to check this stuff at all? */
5773
5774 if (target
5775 && GET_MODE (target) != mode
5776 && TREE_CODE (exp) != INTEGER_CST
5777 && TREE_CODE (exp) != PARM_DECL
5778 && TREE_CODE (exp) != ARRAY_REF
5779 && TREE_CODE (exp) != COMPONENT_REF
5780 && TREE_CODE (exp) != BIT_FIELD_REF
5781 && TREE_CODE (exp) != INDIRECT_REF
5782 && TREE_CODE (exp) != CALL_EXPR
5783 && TREE_CODE (exp) != VAR_DECL
5784 && TREE_CODE (exp) != RTL_EXPR)
5785 {
5786 enum machine_mode mode = GET_MODE (target);
5787
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 fatal ("unsupported wide integer operation");
5791 }
5792
5793 if (tmode != mode
5794 && TREE_CODE (exp) != INTEGER_CST
5795 && TREE_CODE (exp) != PARM_DECL
5796 && TREE_CODE (exp) != ARRAY_REF
5797 && TREE_CODE (exp) != COMPONENT_REF
5798 && TREE_CODE (exp) != BIT_FIELD_REF
5799 && TREE_CODE (exp) != INDIRECT_REF
5800 && TREE_CODE (exp) != VAR_DECL
5801 && TREE_CODE (exp) != CALL_EXPR
5802 && TREE_CODE (exp) != RTL_EXPR
5803 && GET_MODE_CLASS (tmode) == MODE_INT
5804 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5805 fatal ("unsupported wide integer operation");
5806
5807 check_max_integer_computation_mode (exp);
5808 #endif
5809
5810 /* If will do cse, generate all results into pseudo registers
5811 since 1) that allows cse to find more things
5812 and 2) otherwise cse could produce an insn the machine
5813 cannot support. */
5814
5815 if (! cse_not_expected && mode != BLKmode && target
5816 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5817 target = subtarget;
5818
5819 switch (code)
5820 {
5821 case LABEL_DECL:
5822 {
5823 tree function = decl_function_context (exp);
5824 /* Handle using a label in a containing function. */
5825 if (function != current_function_decl
5826 && function != inline_function_decl && function != 0)
5827 {
5828 struct function *p = find_function_data (function);
5829 /* Allocate in the memory associated with the function
5830 that the label is in. */
5831 push_obstacks (p->function_obstack,
5832 p->function_maybepermanent_obstack);
5833
5834 p->expr->x_forced_labels
5835 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5836 p->expr->x_forced_labels);
5837 pop_obstacks ();
5838 }
5839 else
5840 {
5841 if (modifier == EXPAND_INITIALIZER)
5842 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5843 label_rtx (exp),
5844 forced_labels);
5845 }
5846
5847 temp = gen_rtx_MEM (FUNCTION_MODE,
5848 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5849 if (function != current_function_decl
5850 && function != inline_function_decl && function != 0)
5851 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5852 return temp;
5853 }
5854
5855 case PARM_DECL:
5856 if (DECL_RTL (exp) == 0)
5857 {
5858 error_with_decl (exp, "prior parameter's size depends on `%s'");
5859 return CONST0_RTX (mode);
5860 }
5861
5862 /* ... fall through ... */
5863
5864 case VAR_DECL:
5865 /* If a static var's type was incomplete when the decl was written,
5866 but the type is complete now, lay out the decl now. */
5867 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5868 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5869 {
5870 push_obstacks_nochange ();
5871 end_temporary_allocation ();
5872 layout_decl (exp, 0);
5873 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5874 pop_obstacks ();
5875 }
5876
5877 /* Although static-storage variables start off initialized, according to
5878 ANSI C, a memcpy could overwrite them with uninitialized values. So
5879 we check them too. This also lets us check for read-only variables
5880 accessed via a non-const declaration, in case it won't be detected
5881 any other way (e.g., in an embedded system or OS kernel without
5882 memory protection).
5883
5884 Aggregates are not checked here; they're handled elsewhere. */
5885 if (cfun && current_function_check_memory_usage
5886 && code == VAR_DECL
5887 && GET_CODE (DECL_RTL (exp)) == MEM
5888 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5889 {
5890 enum memory_use_mode memory_usage;
5891 memory_usage = get_memory_usage_from_modifier (modifier);
5892
5893 if (memory_usage != MEMORY_USE_DONT)
5894 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5895 XEXP (DECL_RTL (exp), 0), Pmode,
5896 GEN_INT (int_size_in_bytes (type)),
5897 TYPE_MODE (sizetype),
5898 GEN_INT (memory_usage),
5899 TYPE_MODE (integer_type_node));
5900 }
5901
5902 /* ... fall through ... */
5903
5904 case FUNCTION_DECL:
5905 case RESULT_DECL:
5906 if (DECL_RTL (exp) == 0)
5907 abort ();
5908
5909 /* Ensure variable marked as used even if it doesn't go through
5910 a parser. If it hasn't be used yet, write out an external
5911 definition. */
5912 if (! TREE_USED (exp))
5913 {
5914 assemble_external (exp);
5915 TREE_USED (exp) = 1;
5916 }
5917
5918 /* Show we haven't gotten RTL for this yet. */
5919 temp = 0;
5920
5921 /* Handle variables inherited from containing functions. */
5922 context = decl_function_context (exp);
5923
5924 /* We treat inline_function_decl as an alias for the current function
5925 because that is the inline function whose vars, types, etc.
5926 are being merged into the current function.
5927 See expand_inline_function. */
5928
5929 if (context != 0 && context != current_function_decl
5930 && context != inline_function_decl
5931 /* If var is static, we don't need a static chain to access it. */
5932 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5933 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5934 {
5935 rtx addr;
5936
5937 /* Mark as non-local and addressable. */
5938 DECL_NONLOCAL (exp) = 1;
5939 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5940 abort ();
5941 mark_addressable (exp);
5942 if (GET_CODE (DECL_RTL (exp)) != MEM)
5943 abort ();
5944 addr = XEXP (DECL_RTL (exp), 0);
5945 if (GET_CODE (addr) == MEM)
5946 addr = gen_rtx_MEM (Pmode,
5947 fix_lexical_addr (XEXP (addr, 0), exp));
5948 else
5949 addr = fix_lexical_addr (addr, exp);
5950 temp = change_address (DECL_RTL (exp), mode, addr);
5951 }
5952
5953 /* This is the case of an array whose size is to be determined
5954 from its initializer, while the initializer is still being parsed.
5955 See expand_decl. */
5956
5957 else if (GET_CODE (DECL_RTL (exp)) == MEM
5958 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5959 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5960 XEXP (DECL_RTL (exp), 0));
5961
5962 /* If DECL_RTL is memory, we are in the normal case and either
5963 the address is not valid or it is not a register and -fforce-addr
5964 is specified, get the address into a register. */
5965
5966 else if (GET_CODE (DECL_RTL (exp)) == MEM
5967 && modifier != EXPAND_CONST_ADDRESS
5968 && modifier != EXPAND_SUM
5969 && modifier != EXPAND_INITIALIZER
5970 && (! memory_address_p (DECL_MODE (exp),
5971 XEXP (DECL_RTL (exp), 0))
5972 || (flag_force_addr
5973 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5974 temp = change_address (DECL_RTL (exp), VOIDmode,
5975 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5976
5977 /* If we got something, return it. But first, set the alignment
5978 the address is a register. */
5979 if (temp != 0)
5980 {
5981 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5982 mark_reg_pointer (XEXP (temp, 0),
5983 DECL_ALIGN (exp) / BITS_PER_UNIT);
5984
5985 return temp;
5986 }
5987
5988 /* If the mode of DECL_RTL does not match that of the decl, it
5989 must be a promoted value. We return a SUBREG of the wanted mode,
5990 but mark it so that we know that it was already extended. */
5991
5992 if (GET_CODE (DECL_RTL (exp)) == REG
5993 && GET_MODE (DECL_RTL (exp)) != mode)
5994 {
5995 /* Get the signedness used for this variable. Ensure we get the
5996 same mode we got when the variable was declared. */
5997 if (GET_MODE (DECL_RTL (exp))
5998 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5999 abort ();
6000
6001 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6002 SUBREG_PROMOTED_VAR_P (temp) = 1;
6003 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6004 return temp;
6005 }
6006
6007 return DECL_RTL (exp);
6008
6009 case INTEGER_CST:
6010 return immed_double_const (TREE_INT_CST_LOW (exp),
6011 TREE_INT_CST_HIGH (exp),
6012 mode);
6013
6014 case CONST_DECL:
6015 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6016 EXPAND_MEMORY_USE_BAD);
6017
6018 case REAL_CST:
6019 /* If optimized, generate immediate CONST_DOUBLE
6020 which will be turned into memory by reload if necessary.
6021
6022 We used to force a register so that loop.c could see it. But
6023 this does not allow gen_* patterns to perform optimizations with
6024 the constants. It also produces two insns in cases like "x = 1.0;".
6025 On most machines, floating-point constants are not permitted in
6026 many insns, so we'd end up copying it to a register in any case.
6027
6028 Now, we do the copying in expand_binop, if appropriate. */
6029 return immed_real_const (exp);
6030
6031 case COMPLEX_CST:
6032 case STRING_CST:
6033 if (! TREE_CST_RTL (exp))
6034 output_constant_def (exp);
6035
6036 /* TREE_CST_RTL probably contains a constant address.
6037 On RISC machines where a constant address isn't valid,
6038 make some insns to get that address into a register. */
6039 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6040 && modifier != EXPAND_CONST_ADDRESS
6041 && modifier != EXPAND_INITIALIZER
6042 && modifier != EXPAND_SUM
6043 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6044 || (flag_force_addr
6045 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6046 return change_address (TREE_CST_RTL (exp), VOIDmode,
6047 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6048 return TREE_CST_RTL (exp);
6049
6050 case EXPR_WITH_FILE_LOCATION:
6051 {
6052 rtx to_return;
6053 char *saved_input_filename = input_filename;
6054 int saved_lineno = lineno;
6055 input_filename = EXPR_WFL_FILENAME (exp);
6056 lineno = EXPR_WFL_LINENO (exp);
6057 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6058 emit_line_note (input_filename, lineno);
6059 /* Possibly avoid switching back and force here */
6060 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6061 input_filename = saved_input_filename;
6062 lineno = saved_lineno;
6063 return to_return;
6064 }
6065
6066 case SAVE_EXPR:
6067 context = decl_function_context (exp);
6068
6069 /* If this SAVE_EXPR was at global context, assume we are an
6070 initialization function and move it into our context. */
6071 if (context == 0)
6072 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6073
6074 /* We treat inline_function_decl as an alias for the current function
6075 because that is the inline function whose vars, types, etc.
6076 are being merged into the current function.
6077 See expand_inline_function. */
6078 if (context == current_function_decl || context == inline_function_decl)
6079 context = 0;
6080
6081 /* If this is non-local, handle it. */
6082 if (context)
6083 {
6084 /* The following call just exists to abort if the context is
6085 not of a containing function. */
6086 find_function_data (context);
6087
6088 temp = SAVE_EXPR_RTL (exp);
6089 if (temp && GET_CODE (temp) == REG)
6090 {
6091 put_var_into_stack (exp);
6092 temp = SAVE_EXPR_RTL (exp);
6093 }
6094 if (temp == 0 || GET_CODE (temp) != MEM)
6095 abort ();
6096 return change_address (temp, mode,
6097 fix_lexical_addr (XEXP (temp, 0), exp));
6098 }
6099 if (SAVE_EXPR_RTL (exp) == 0)
6100 {
6101 if (mode == VOIDmode)
6102 temp = const0_rtx;
6103 else
6104 temp = assign_temp (type, 3, 0, 0);
6105
6106 SAVE_EXPR_RTL (exp) = temp;
6107 if (!optimize && GET_CODE (temp) == REG)
6108 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6109 save_expr_regs);
6110
6111 /* If the mode of TEMP does not match that of the expression, it
6112 must be a promoted value. We pass store_expr a SUBREG of the
6113 wanted mode but mark it so that we know that it was already
6114 extended. Note that `unsignedp' was modified above in
6115 this case. */
6116
6117 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6118 {
6119 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6120 SUBREG_PROMOTED_VAR_P (temp) = 1;
6121 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6122 }
6123
6124 if (temp == const0_rtx)
6125 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6126 EXPAND_MEMORY_USE_BAD);
6127 else
6128 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6129
6130 TREE_USED (exp) = 1;
6131 }
6132
6133 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6134 must be a promoted value. We return a SUBREG of the wanted mode,
6135 but mark it so that we know that it was already extended. */
6136
6137 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6138 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6139 {
6140 /* Compute the signedness and make the proper SUBREG. */
6141 promote_mode (type, mode, &unsignedp, 0);
6142 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6143 SUBREG_PROMOTED_VAR_P (temp) = 1;
6144 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6145 return temp;
6146 }
6147
6148 return SAVE_EXPR_RTL (exp);
6149
6150 case UNSAVE_EXPR:
6151 {
6152 rtx temp;
6153 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6154 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6155 return temp;
6156 }
6157
6158 case PLACEHOLDER_EXPR:
6159 {
6160 tree placeholder_expr;
6161
6162 /* If there is an object on the head of the placeholder list,
6163 see if some object in it of type TYPE or a pointer to it. For
6164 further information, see tree.def. */
6165 for (placeholder_expr = placeholder_list;
6166 placeholder_expr != 0;
6167 placeholder_expr = TREE_CHAIN (placeholder_expr))
6168 {
6169 tree need_type = TYPE_MAIN_VARIANT (type);
6170 tree object = 0;
6171 tree old_list = placeholder_list;
6172 tree elt;
6173
6174 /* Find the outermost reference that is of the type we want.
6175 If none, see if any object has a type that is a pointer to
6176 the type we want. */
6177 for (elt = TREE_PURPOSE (placeholder_expr);
6178 elt != 0 && object == 0;
6179 elt
6180 = ((TREE_CODE (elt) == COMPOUND_EXPR
6181 || TREE_CODE (elt) == COND_EXPR)
6182 ? TREE_OPERAND (elt, 1)
6183 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6184 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6186 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6187 ? TREE_OPERAND (elt, 0) : 0))
6188 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6189 object = elt;
6190
6191 for (elt = TREE_PURPOSE (placeholder_expr);
6192 elt != 0 && object == 0;
6193 elt
6194 = ((TREE_CODE (elt) == COMPOUND_EXPR
6195 || TREE_CODE (elt) == COND_EXPR)
6196 ? TREE_OPERAND (elt, 1)
6197 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6198 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6200 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6201 ? TREE_OPERAND (elt, 0) : 0))
6202 if (POINTER_TYPE_P (TREE_TYPE (elt))
6203 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6204 == need_type))
6205 object = build1 (INDIRECT_REF, need_type, elt);
6206
6207 if (object != 0)
6208 {
6209 /* Expand this object skipping the list entries before
6210 it was found in case it is also a PLACEHOLDER_EXPR.
6211 In that case, we want to translate it using subsequent
6212 entries. */
6213 placeholder_list = TREE_CHAIN (placeholder_expr);
6214 temp = expand_expr (object, original_target, tmode,
6215 ro_modifier);
6216 placeholder_list = old_list;
6217 return temp;
6218 }
6219 }
6220 }
6221
6222 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6223 abort ();
6224
6225 case WITH_RECORD_EXPR:
6226 /* Put the object on the placeholder list, expand our first operand,
6227 and pop the list. */
6228 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6229 placeholder_list);
6230 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6231 tmode, ro_modifier);
6232 placeholder_list = TREE_CHAIN (placeholder_list);
6233 return target;
6234
6235 case GOTO_EXPR:
6236 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6237 expand_goto (TREE_OPERAND (exp, 0));
6238 else
6239 expand_computed_goto (TREE_OPERAND (exp, 0));
6240 return const0_rtx;
6241
6242 case EXIT_EXPR:
6243 expand_exit_loop_if_false (NULL_PTR,
6244 invert_truthvalue (TREE_OPERAND (exp, 0)));
6245 return const0_rtx;
6246
6247 case LABELED_BLOCK_EXPR:
6248 if (LABELED_BLOCK_BODY (exp))
6249 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6250 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6251 return const0_rtx;
6252
6253 case EXIT_BLOCK_EXPR:
6254 if (EXIT_BLOCK_RETURN (exp))
6255 sorry ("returned value in block_exit_expr");
6256 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6257 return const0_rtx;
6258
6259 case LOOP_EXPR:
6260 push_temp_slots ();
6261 expand_start_loop (1);
6262 expand_expr_stmt (TREE_OPERAND (exp, 0));
6263 expand_end_loop ();
6264 pop_temp_slots ();
6265
6266 return const0_rtx;
6267
6268 case BIND_EXPR:
6269 {
6270 tree vars = TREE_OPERAND (exp, 0);
6271 int vars_need_expansion = 0;
6272
6273 /* Need to open a binding contour here because
6274 if there are any cleanups they must be contained here. */
6275 expand_start_bindings (2);
6276
6277 /* Mark the corresponding BLOCK for output in its proper place. */
6278 if (TREE_OPERAND (exp, 2) != 0
6279 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6280 insert_block (TREE_OPERAND (exp, 2));
6281
6282 /* If VARS have not yet been expanded, expand them now. */
6283 while (vars)
6284 {
6285 if (DECL_RTL (vars) == 0)
6286 {
6287 vars_need_expansion = 1;
6288 expand_decl (vars);
6289 }
6290 expand_decl_init (vars);
6291 vars = TREE_CHAIN (vars);
6292 }
6293
6294 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6295
6296 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6297
6298 return temp;
6299 }
6300
6301 case RTL_EXPR:
6302 if (RTL_EXPR_SEQUENCE (exp))
6303 {
6304 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6305 abort ();
6306 emit_insns (RTL_EXPR_SEQUENCE (exp));
6307 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6308 }
6309 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6310 free_temps_for_rtl_expr (exp);
6311 return RTL_EXPR_RTL (exp);
6312
6313 case CONSTRUCTOR:
6314 /* If we don't need the result, just ensure we evaluate any
6315 subexpressions. */
6316 if (ignore)
6317 {
6318 tree elt;
6319 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6320 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6321 EXPAND_MEMORY_USE_BAD);
6322 return const0_rtx;
6323 }
6324
6325 /* All elts simple constants => refer to a constant in memory. But
6326 if this is a non-BLKmode mode, let it store a field at a time
6327 since that should make a CONST_INT or CONST_DOUBLE when we
6328 fold. Likewise, if we have a target we can use, it is best to
6329 store directly into the target unless the type is large enough
6330 that memcpy will be used. If we are making an initializer and
6331 all operands are constant, put it in memory as well. */
6332 else if ((TREE_STATIC (exp)
6333 && ((mode == BLKmode
6334 && ! (target != 0 && safe_from_p (target, exp, 1)))
6335 || TREE_ADDRESSABLE (exp)
6336 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6337 && (!MOVE_BY_PIECES_P
6338 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6339 TYPE_ALIGN (type) / BITS_PER_UNIT))
6340 && ! mostly_zeros_p (exp))))
6341 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6342 {
6343 rtx constructor = output_constant_def (exp);
6344 if (modifier != EXPAND_CONST_ADDRESS
6345 && modifier != EXPAND_INITIALIZER
6346 && modifier != EXPAND_SUM
6347 && (! memory_address_p (GET_MODE (constructor),
6348 XEXP (constructor, 0))
6349 || (flag_force_addr
6350 && GET_CODE (XEXP (constructor, 0)) != REG)))
6351 constructor = change_address (constructor, VOIDmode,
6352 XEXP (constructor, 0));
6353 return constructor;
6354 }
6355
6356 else
6357 {
6358 /* Handle calls that pass values in multiple non-contiguous
6359 locations. The Irix 6 ABI has examples of this. */
6360 if (target == 0 || ! safe_from_p (target, exp, 1)
6361 || GET_CODE (target) == PARALLEL)
6362 {
6363 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6364 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6365 else
6366 target = assign_temp (type, 0, 1, 1);
6367 }
6368
6369 if (TREE_READONLY (exp))
6370 {
6371 if (GET_CODE (target) == MEM)
6372 target = copy_rtx (target);
6373
6374 RTX_UNCHANGING_P (target) = 1;
6375 }
6376
6377 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6378 int_size_in_bytes (TREE_TYPE (exp)));
6379 return target;
6380 }
6381
6382 case INDIRECT_REF:
6383 {
6384 tree exp1 = TREE_OPERAND (exp, 0);
6385 tree exp2;
6386 tree index;
6387 tree string = string_constant (exp1, &index);
6388 int i;
6389
6390 /* Try to optimize reads from const strings. */
6391 if (string
6392 && TREE_CODE (string) == STRING_CST
6393 && TREE_CODE (index) == INTEGER_CST
6394 && !TREE_INT_CST_HIGH (index)
6395 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6396 && GET_MODE_CLASS (mode) == MODE_INT
6397 && GET_MODE_SIZE (mode) == 1
6398 && modifier != EXPAND_MEMORY_USE_WO)
6399 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6400
6401 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6402 op0 = memory_address (mode, op0);
6403
6404 if (cfun && current_function_check_memory_usage
6405 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6406 {
6407 enum memory_use_mode memory_usage;
6408 memory_usage = get_memory_usage_from_modifier (modifier);
6409
6410 if (memory_usage != MEMORY_USE_DONT)
6411 {
6412 in_check_memory_usage = 1;
6413 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6414 op0, Pmode,
6415 GEN_INT (int_size_in_bytes (type)),
6416 TYPE_MODE (sizetype),
6417 GEN_INT (memory_usage),
6418 TYPE_MODE (integer_type_node));
6419 in_check_memory_usage = 0;
6420 }
6421 }
6422
6423 temp = gen_rtx_MEM (mode, op0);
6424 /* If address was computed by addition,
6425 mark this as an element of an aggregate. */
6426 if (TREE_CODE (exp1) == PLUS_EXPR
6427 || (TREE_CODE (exp1) == SAVE_EXPR
6428 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6429 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6430 || (TREE_CODE (exp1) == ADDR_EXPR
6431 && (exp2 = TREE_OPERAND (exp1, 0))
6432 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6433 MEM_SET_IN_STRUCT_P (temp, 1);
6434
6435 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6436 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6437
6438 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6439 here, because, in C and C++, the fact that a location is accessed
6440 through a pointer to const does not mean that the value there can
6441 never change. Languages where it can never change should
6442 also set TREE_STATIC. */
6443 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6444
6445 /* If we are writing to this object and its type is a record with
6446 readonly fields, we must mark it as readonly so it will
6447 conflict with readonly references to those fields. */
6448 if (modifier == EXPAND_MEMORY_USE_WO
6449 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6450 RTX_UNCHANGING_P (temp) = 1;
6451
6452 return temp;
6453 }
6454
6455 case ARRAY_REF:
6456 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6457 abort ();
6458
6459 {
6460 tree array = TREE_OPERAND (exp, 0);
6461 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6462 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6463 tree index = TREE_OPERAND (exp, 1);
6464 tree index_type = TREE_TYPE (index);
6465 HOST_WIDE_INT i;
6466
6467 /* Optimize the special-case of a zero lower bound.
6468
6469 We convert the low_bound to sizetype to avoid some problems
6470 with constant folding. (E.g. suppose the lower bound is 1,
6471 and its mode is QI. Without the conversion, (ARRAY
6472 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6473 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6474
6475 But sizetype isn't quite right either (especially if
6476 the lowbound is negative). FIXME */
6477
6478 if (! integer_zerop (low_bound))
6479 index = fold (build (MINUS_EXPR, index_type, index,
6480 convert (sizetype, low_bound)));
6481
6482 /* Fold an expression like: "foo"[2].
6483 This is not done in fold so it won't happen inside &.
6484 Don't fold if this is for wide characters since it's too
6485 difficult to do correctly and this is a very rare case. */
6486
6487 if (TREE_CODE (array) == STRING_CST
6488 && TREE_CODE (index) == INTEGER_CST
6489 && !TREE_INT_CST_HIGH (index)
6490 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6491 && GET_MODE_CLASS (mode) == MODE_INT
6492 && GET_MODE_SIZE (mode) == 1)
6493 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6494
6495 /* If this is a constant index into a constant array,
6496 just get the value from the array. Handle both the cases when
6497 we have an explicit constructor and when our operand is a variable
6498 that was declared const. */
6499
6500 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6501 {
6502 if (TREE_CODE (index) == INTEGER_CST
6503 && TREE_INT_CST_HIGH (index) == 0)
6504 {
6505 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6506
6507 i = TREE_INT_CST_LOW (index);
6508 while (elem && i--)
6509 elem = TREE_CHAIN (elem);
6510 if (elem)
6511 return expand_expr (fold (TREE_VALUE (elem)), target,
6512 tmode, ro_modifier);
6513 }
6514 }
6515
6516 else if (optimize >= 1
6517 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6518 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6519 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6520 {
6521 if (TREE_CODE (index) == INTEGER_CST)
6522 {
6523 tree init = DECL_INITIAL (array);
6524
6525 i = TREE_INT_CST_LOW (index);
6526 if (TREE_CODE (init) == CONSTRUCTOR)
6527 {
6528 tree elem = CONSTRUCTOR_ELTS (init);
6529
6530 while (elem
6531 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6532 elem = TREE_CHAIN (elem);
6533 if (elem)
6534 return expand_expr (fold (TREE_VALUE (elem)), target,
6535 tmode, ro_modifier);
6536 }
6537 else if (TREE_CODE (init) == STRING_CST
6538 && TREE_INT_CST_HIGH (index) == 0
6539 && (TREE_INT_CST_LOW (index)
6540 < TREE_STRING_LENGTH (init)))
6541 return (GEN_INT
6542 (TREE_STRING_POINTER
6543 (init)[TREE_INT_CST_LOW (index)]));
6544 }
6545 }
6546 }
6547
6548 /* ... fall through ... */
6549
6550 case COMPONENT_REF:
6551 case BIT_FIELD_REF:
6552 /* If the operand is a CONSTRUCTOR, we can just extract the
6553 appropriate field if it is present. Don't do this if we have
6554 already written the data since we want to refer to that copy
6555 and varasm.c assumes that's what we'll do. */
6556 if (code != ARRAY_REF
6557 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6558 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6559 {
6560 tree elt;
6561
6562 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6563 elt = TREE_CHAIN (elt))
6564 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6565 /* We can normally use the value of the field in the
6566 CONSTRUCTOR. However, if this is a bitfield in
6567 an integral mode that we can fit in a HOST_WIDE_INT,
6568 we must mask only the number of bits in the bitfield,
6569 since this is done implicitly by the constructor. If
6570 the bitfield does not meet either of those conditions,
6571 we can't do this optimization. */
6572 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6573 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6574 == MODE_INT)
6575 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6576 <= HOST_BITS_PER_WIDE_INT))))
6577 {
6578 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6579 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6580 {
6581 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6582
6583 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6584 {
6585 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6586 op0 = expand_and (op0, op1, target);
6587 }
6588 else
6589 {
6590 enum machine_mode imode
6591 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6592 tree count
6593 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6594 0);
6595
6596 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6597 target, 0);
6598 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6599 target, 0);
6600 }
6601 }
6602
6603 return op0;
6604 }
6605 }
6606
6607 {
6608 enum machine_mode mode1;
6609 int bitsize;
6610 int bitpos;
6611 tree offset;
6612 int volatilep = 0;
6613 int alignment;
6614 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6615 &mode1, &unsignedp, &volatilep,
6616 &alignment);
6617
6618 /* If we got back the original object, something is wrong. Perhaps
6619 we are evaluating an expression too early. In any event, don't
6620 infinitely recurse. */
6621 if (tem == exp)
6622 abort ();
6623
6624 /* If TEM's type is a union of variable size, pass TARGET to the inner
6625 computation, since it will need a temporary and TARGET is known
6626 to have to do. This occurs in unchecked conversion in Ada. */
6627
6628 op0 = expand_expr (tem,
6629 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6630 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6631 != INTEGER_CST)
6632 ? target : NULL_RTX),
6633 VOIDmode,
6634 (modifier == EXPAND_INITIALIZER
6635 || modifier == EXPAND_CONST_ADDRESS)
6636 ? modifier : EXPAND_NORMAL);
6637
6638 /* If this is a constant, put it into a register if it is a
6639 legitimate constant and OFFSET is 0 and memory if it isn't. */
6640 if (CONSTANT_P (op0))
6641 {
6642 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6643 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6644 && offset == 0)
6645 op0 = force_reg (mode, op0);
6646 else
6647 op0 = validize_mem (force_const_mem (mode, op0));
6648 }
6649
6650 if (offset != 0)
6651 {
6652 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6653
6654 /* If this object is in memory, put it into a register.
6655 This case can't occur in C, but can in Ada if we have
6656 unchecked conversion of an expression from a scalar type to
6657 an array or record type. */
6658 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6659 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6660 {
6661 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6662
6663 mark_temp_addr_taken (memloc);
6664 emit_move_insn (memloc, op0);
6665 op0 = memloc;
6666 }
6667
6668 if (GET_CODE (op0) != MEM)
6669 abort ();
6670
6671 if (GET_MODE (offset_rtx) != ptr_mode)
6672 {
6673 #ifdef POINTERS_EXTEND_UNSIGNED
6674 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6675 #else
6676 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6677 #endif
6678 }
6679
6680 /* A constant address in OP0 can have VOIDmode, we must not try
6681 to call force_reg for that case. Avoid that case. */
6682 if (GET_CODE (op0) == MEM
6683 && GET_MODE (op0) == BLKmode
6684 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6685 && bitsize != 0
6686 && (bitpos % bitsize) == 0
6687 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6688 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6689 {
6690 rtx temp = change_address (op0, mode1,
6691 plus_constant (XEXP (op0, 0),
6692 (bitpos /
6693 BITS_PER_UNIT)));
6694 if (GET_CODE (XEXP (temp, 0)) == REG)
6695 op0 = temp;
6696 else
6697 op0 = change_address (op0, mode1,
6698 force_reg (GET_MODE (XEXP (temp, 0)),
6699 XEXP (temp, 0)));
6700 bitpos = 0;
6701 }
6702
6703
6704 op0 = change_address (op0, VOIDmode,
6705 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6706 force_reg (ptr_mode,
6707 offset_rtx)));
6708 }
6709
6710 /* Don't forget about volatility even if this is a bitfield. */
6711 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6712 {
6713 op0 = copy_rtx (op0);
6714 MEM_VOLATILE_P (op0) = 1;
6715 }
6716
6717 /* Check the access. */
6718 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6719 {
6720 enum memory_use_mode memory_usage;
6721 memory_usage = get_memory_usage_from_modifier (modifier);
6722
6723 if (memory_usage != MEMORY_USE_DONT)
6724 {
6725 rtx to;
6726 int size;
6727
6728 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6729 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6730
6731 /* Check the access right of the pointer. */
6732 if (size > BITS_PER_UNIT)
6733 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6734 to, Pmode,
6735 GEN_INT (size / BITS_PER_UNIT),
6736 TYPE_MODE (sizetype),
6737 GEN_INT (memory_usage),
6738 TYPE_MODE (integer_type_node));
6739 }
6740 }
6741
6742 /* In cases where an aligned union has an unaligned object
6743 as a field, we might be extracting a BLKmode value from
6744 an integer-mode (e.g., SImode) object. Handle this case
6745 by doing the extract into an object as wide as the field
6746 (which we know to be the width of a basic mode), then
6747 storing into memory, and changing the mode to BLKmode.
6748 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6749 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6750 if (mode1 == VOIDmode
6751 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6752 || (modifier != EXPAND_CONST_ADDRESS
6753 && modifier != EXPAND_INITIALIZER
6754 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6755 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6756 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6757 /* If the field isn't aligned enough to fetch as a memref,
6758 fetch it as a bit field. */
6759 || (mode1 != BLKmode
6760 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6761 && ((TYPE_ALIGN (TREE_TYPE (tem))
6762 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6763 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6764 || (modifier != EXPAND_CONST_ADDRESS
6765 && modifier != EXPAND_INITIALIZER
6766 && mode == BLKmode
6767 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6768 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6769 || bitpos % TYPE_ALIGN (type) != 0)))
6770 {
6771 enum machine_mode ext_mode = mode;
6772
6773 if (ext_mode == BLKmode
6774 && ! (target != 0 && GET_CODE (op0) == MEM
6775 && GET_CODE (target) == MEM
6776 && bitpos % BITS_PER_UNIT == 0))
6777 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6778
6779 if (ext_mode == BLKmode)
6780 {
6781 /* In this case, BITPOS must start at a byte boundary and
6782 TARGET, if specified, must be a MEM. */
6783 if (GET_CODE (op0) != MEM
6784 || (target != 0 && GET_CODE (target) != MEM)
6785 || bitpos % BITS_PER_UNIT != 0)
6786 abort ();
6787
6788 op0 = change_address (op0, VOIDmode,
6789 plus_constant (XEXP (op0, 0),
6790 bitpos / BITS_PER_UNIT));
6791 if (target == 0)
6792 target = assign_temp (type, 0, 1, 1);
6793
6794 emit_block_move (target, op0,
6795 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6796 / BITS_PER_UNIT),
6797 1);
6798
6799 return target;
6800 }
6801
6802 op0 = validize_mem (op0);
6803
6804 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6805 mark_reg_pointer (XEXP (op0, 0), alignment);
6806
6807 op0 = extract_bit_field (op0, bitsize, bitpos,
6808 unsignedp, target, ext_mode, ext_mode,
6809 alignment,
6810 int_size_in_bytes (TREE_TYPE (tem)));
6811
6812 /* If the result is a record type and BITSIZE is narrower than
6813 the mode of OP0, an integral mode, and this is a big endian
6814 machine, we must put the field into the high-order bits. */
6815 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6816 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6817 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6818 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6819 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6820 - bitsize),
6821 op0, 1);
6822
6823 if (mode == BLKmode)
6824 {
6825 rtx new = assign_stack_temp (ext_mode,
6826 bitsize / BITS_PER_UNIT, 0);
6827
6828 emit_move_insn (new, op0);
6829 op0 = copy_rtx (new);
6830 PUT_MODE (op0, BLKmode);
6831 MEM_SET_IN_STRUCT_P (op0, 1);
6832 }
6833
6834 return op0;
6835 }
6836
6837 /* If the result is BLKmode, use that to access the object
6838 now as well. */
6839 if (mode == BLKmode)
6840 mode1 = BLKmode;
6841
6842 /* Get a reference to just this component. */
6843 if (modifier == EXPAND_CONST_ADDRESS
6844 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6845 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6846 (bitpos / BITS_PER_UNIT)));
6847 else
6848 op0 = change_address (op0, mode1,
6849 plus_constant (XEXP (op0, 0),
6850 (bitpos / BITS_PER_UNIT)));
6851
6852 if (GET_CODE (op0) == MEM)
6853 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6854
6855 if (GET_CODE (XEXP (op0, 0)) == REG)
6856 mark_reg_pointer (XEXP (op0, 0), alignment);
6857
6858 MEM_SET_IN_STRUCT_P (op0, 1);
6859 MEM_VOLATILE_P (op0) |= volatilep;
6860 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6861 || modifier == EXPAND_CONST_ADDRESS
6862 || modifier == EXPAND_INITIALIZER)
6863 return op0;
6864 else if (target == 0)
6865 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6866
6867 convert_move (target, op0, unsignedp);
6868 return target;
6869 }
6870
6871 /* Intended for a reference to a buffer of a file-object in Pascal.
6872 But it's not certain that a special tree code will really be
6873 necessary for these. INDIRECT_REF might work for them. */
6874 case BUFFER_REF:
6875 abort ();
6876
6877 case IN_EXPR:
6878 {
6879 /* Pascal set IN expression.
6880
6881 Algorithm:
6882 rlo = set_low - (set_low%bits_per_word);
6883 the_word = set [ (index - rlo)/bits_per_word ];
6884 bit_index = index % bits_per_word;
6885 bitmask = 1 << bit_index;
6886 return !!(the_word & bitmask); */
6887
6888 tree set = TREE_OPERAND (exp, 0);
6889 tree index = TREE_OPERAND (exp, 1);
6890 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6891 tree set_type = TREE_TYPE (set);
6892 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6893 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6894 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6895 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6896 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6897 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6898 rtx setaddr = XEXP (setval, 0);
6899 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6900 rtx rlow;
6901 rtx diff, quo, rem, addr, bit, result;
6902
6903 preexpand_calls (exp);
6904
6905 /* If domain is empty, answer is no. Likewise if index is constant
6906 and out of bounds. */
6907 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6908 && TREE_CODE (set_low_bound) == INTEGER_CST
6909 && tree_int_cst_lt (set_high_bound, set_low_bound))
6910 || (TREE_CODE (index) == INTEGER_CST
6911 && TREE_CODE (set_low_bound) == INTEGER_CST
6912 && tree_int_cst_lt (index, set_low_bound))
6913 || (TREE_CODE (set_high_bound) == INTEGER_CST
6914 && TREE_CODE (index) == INTEGER_CST
6915 && tree_int_cst_lt (set_high_bound, index))))
6916 return const0_rtx;
6917
6918 if (target == 0)
6919 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6920
6921 /* If we get here, we have to generate the code for both cases
6922 (in range and out of range). */
6923
6924 op0 = gen_label_rtx ();
6925 op1 = gen_label_rtx ();
6926
6927 if (! (GET_CODE (index_val) == CONST_INT
6928 && GET_CODE (lo_r) == CONST_INT))
6929 {
6930 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6931 GET_MODE (index_val), iunsignedp, 0, op1);
6932 }
6933
6934 if (! (GET_CODE (index_val) == CONST_INT
6935 && GET_CODE (hi_r) == CONST_INT))
6936 {
6937 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6938 GET_MODE (index_val), iunsignedp, 0, op1);
6939 }
6940
6941 /* Calculate the element number of bit zero in the first word
6942 of the set. */
6943 if (GET_CODE (lo_r) == CONST_INT)
6944 rlow = GEN_INT (INTVAL (lo_r)
6945 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6946 else
6947 rlow = expand_binop (index_mode, and_optab, lo_r,
6948 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6949 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6950
6951 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6952 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6953
6954 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6955 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6956 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6957 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6958
6959 addr = memory_address (byte_mode,
6960 expand_binop (index_mode, add_optab, diff,
6961 setaddr, NULL_RTX, iunsignedp,
6962 OPTAB_LIB_WIDEN));
6963
6964 /* Extract the bit we want to examine */
6965 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6966 gen_rtx_MEM (byte_mode, addr),
6967 make_tree (TREE_TYPE (index), rem),
6968 NULL_RTX, 1);
6969 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6970 GET_MODE (target) == byte_mode ? target : 0,
6971 1, OPTAB_LIB_WIDEN);
6972
6973 if (result != target)
6974 convert_move (target, result, 1);
6975
6976 /* Output the code to handle the out-of-range case. */
6977 emit_jump (op0);
6978 emit_label (op1);
6979 emit_move_insn (target, const0_rtx);
6980 emit_label (op0);
6981 return target;
6982 }
6983
6984 case WITH_CLEANUP_EXPR:
6985 if (RTL_EXPR_RTL (exp) == 0)
6986 {
6987 RTL_EXPR_RTL (exp)
6988 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6989 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6990
6991 /* That's it for this cleanup. */
6992 TREE_OPERAND (exp, 2) = 0;
6993 }
6994 return RTL_EXPR_RTL (exp);
6995
6996 case CLEANUP_POINT_EXPR:
6997 {
6998 /* Start a new binding layer that will keep track of all cleanup
6999 actions to be performed. */
7000 expand_start_bindings (2);
7001
7002 target_temp_slot_level = temp_slot_level;
7003
7004 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7005 /* If we're going to use this value, load it up now. */
7006 if (! ignore)
7007 op0 = force_not_mem (op0);
7008 preserve_temp_slots (op0);
7009 expand_end_bindings (NULL_TREE, 0, 0);
7010 }
7011 return op0;
7012
7013 case CALL_EXPR:
7014 /* Check for a built-in function. */
7015 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7016 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7017 == FUNCTION_DECL)
7018 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7019 return expand_builtin (exp, target, subtarget, tmode, ignore);
7020
7021 /* If this call was expanded already by preexpand_calls,
7022 just return the result we got. */
7023 if (CALL_EXPR_RTL (exp) != 0)
7024 return CALL_EXPR_RTL (exp);
7025
7026 return expand_call (exp, target, ignore);
7027
7028 case NON_LVALUE_EXPR:
7029 case NOP_EXPR:
7030 case CONVERT_EXPR:
7031 case REFERENCE_EXPR:
7032 if (TREE_CODE (type) == UNION_TYPE)
7033 {
7034 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7035
7036 /* If both input and output are BLKmode, this conversion
7037 isn't actually doing anything unless we need to make the
7038 alignment stricter. */
7039 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7040 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7041 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7042 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7043 modifier);
7044
7045 if (target == 0)
7046 {
7047 if (mode != BLKmode)
7048 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7049 else
7050 target = assign_temp (type, 0, 1, 1);
7051 }
7052
7053 if (GET_CODE (target) == MEM)
7054 /* Store data into beginning of memory target. */
7055 store_expr (TREE_OPERAND (exp, 0),
7056 change_address (target, TYPE_MODE (valtype), 0), 0);
7057
7058 else if (GET_CODE (target) == REG)
7059 /* Store this field into a union of the proper type. */
7060 store_field (target,
7061 MIN ((int_size_in_bytes (TREE_TYPE
7062 (TREE_OPERAND (exp, 0)))
7063 * BITS_PER_UNIT),
7064 GET_MODE_BITSIZE (mode)),
7065 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7066 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7067 else
7068 abort ();
7069
7070 /* Return the entire union. */
7071 return target;
7072 }
7073
7074 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7075 {
7076 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7077 ro_modifier);
7078
7079 /* If the signedness of the conversion differs and OP0 is
7080 a promoted SUBREG, clear that indication since we now
7081 have to do the proper extension. */
7082 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7083 && GET_CODE (op0) == SUBREG)
7084 SUBREG_PROMOTED_VAR_P (op0) = 0;
7085
7086 return op0;
7087 }
7088
7089 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7090 if (GET_MODE (op0) == mode)
7091 return op0;
7092
7093 /* If OP0 is a constant, just convert it into the proper mode. */
7094 if (CONSTANT_P (op0))
7095 return
7096 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7097 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7098
7099 if (modifier == EXPAND_INITIALIZER)
7100 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7101
7102 if (target == 0)
7103 return
7104 convert_to_mode (mode, op0,
7105 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7106 else
7107 convert_move (target, op0,
7108 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7109 return target;
7110
7111 case PLUS_EXPR:
7112 /* We come here from MINUS_EXPR when the second operand is a
7113 constant. */
7114 plus_expr:
7115 this_optab = add_optab;
7116
7117 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7118 something else, make sure we add the register to the constant and
7119 then to the other thing. This case can occur during strength
7120 reduction and doing it this way will produce better code if the
7121 frame pointer or argument pointer is eliminated.
7122
7123 fold-const.c will ensure that the constant is always in the inner
7124 PLUS_EXPR, so the only case we need to do anything about is if
7125 sp, ap, or fp is our second argument, in which case we must swap
7126 the innermost first argument and our second argument. */
7127
7128 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7129 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7130 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7131 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7132 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7133 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7134 {
7135 tree t = TREE_OPERAND (exp, 1);
7136
7137 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7138 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7139 }
7140
7141 /* If the result is to be ptr_mode and we are adding an integer to
7142 something, we might be forming a constant. So try to use
7143 plus_constant. If it produces a sum and we can't accept it,
7144 use force_operand. This allows P = &ARR[const] to generate
7145 efficient code on machines where a SYMBOL_REF is not a valid
7146 address.
7147
7148 If this is an EXPAND_SUM call, always return the sum. */
7149 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7150 || mode == ptr_mode)
7151 {
7152 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7154 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7155 {
7156 rtx constant_part;
7157
7158 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7159 EXPAND_SUM);
7160 /* Use immed_double_const to ensure that the constant is
7161 truncated according to the mode of OP1, then sign extended
7162 to a HOST_WIDE_INT. Using the constant directly can result
7163 in non-canonical RTL in a 64x32 cross compile. */
7164 constant_part
7165 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7166 (HOST_WIDE_INT) 0,
7167 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7168 op1 = plus_constant (op1, INTVAL (constant_part));
7169 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7170 op1 = force_operand (op1, target);
7171 return op1;
7172 }
7173
7174 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7175 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7176 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7177 {
7178 rtx constant_part;
7179
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7181 EXPAND_SUM);
7182 if (! CONSTANT_P (op0))
7183 {
7184 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7185 VOIDmode, modifier);
7186 /* Don't go to both_summands if modifier
7187 says it's not right to return a PLUS. */
7188 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7189 goto binop2;
7190 goto both_summands;
7191 }
7192 /* Use immed_double_const to ensure that the constant is
7193 truncated according to the mode of OP1, then sign extended
7194 to a HOST_WIDE_INT. Using the constant directly can result
7195 in non-canonical RTL in a 64x32 cross compile. */
7196 constant_part
7197 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7198 (HOST_WIDE_INT) 0,
7199 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7200 op0 = plus_constant (op0, INTVAL (constant_part));
7201 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7202 op0 = force_operand (op0, target);
7203 return op0;
7204 }
7205 }
7206
7207 /* No sense saving up arithmetic to be done
7208 if it's all in the wrong mode to form part of an address.
7209 And force_operand won't know whether to sign-extend or
7210 zero-extend. */
7211 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7212 || mode != ptr_mode)
7213 goto binop;
7214
7215 preexpand_calls (exp);
7216 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7217 subtarget = 0;
7218
7219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7221
7222 both_summands:
7223 /* Make sure any term that's a sum with a constant comes last. */
7224 if (GET_CODE (op0) == PLUS
7225 && CONSTANT_P (XEXP (op0, 1)))
7226 {
7227 temp = op0;
7228 op0 = op1;
7229 op1 = temp;
7230 }
7231 /* If adding to a sum including a constant,
7232 associate it to put the constant outside. */
7233 if (GET_CODE (op1) == PLUS
7234 && CONSTANT_P (XEXP (op1, 1)))
7235 {
7236 rtx constant_term = const0_rtx;
7237
7238 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7239 if (temp != 0)
7240 op0 = temp;
7241 /* Ensure that MULT comes first if there is one. */
7242 else if (GET_CODE (op0) == MULT)
7243 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7244 else
7245 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7246
7247 /* Let's also eliminate constants from op0 if possible. */
7248 op0 = eliminate_constant_term (op0, &constant_term);
7249
7250 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7251 their sum should be a constant. Form it into OP1, since the
7252 result we want will then be OP0 + OP1. */
7253
7254 temp = simplify_binary_operation (PLUS, mode, constant_term,
7255 XEXP (op1, 1));
7256 if (temp != 0)
7257 op1 = temp;
7258 else
7259 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7260 }
7261
7262 /* Put a constant term last and put a multiplication first. */
7263 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7264 temp = op1, op1 = op0, op0 = temp;
7265
7266 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7267 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7268
7269 case MINUS_EXPR:
7270 /* For initializers, we are allowed to return a MINUS of two
7271 symbolic constants. Here we handle all cases when both operands
7272 are constant. */
7273 /* Handle difference of two symbolic constants,
7274 for the sake of an initializer. */
7275 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7276 && really_constant_p (TREE_OPERAND (exp, 0))
7277 && really_constant_p (TREE_OPERAND (exp, 1)))
7278 {
7279 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7280 VOIDmode, ro_modifier);
7281 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7282 VOIDmode, ro_modifier);
7283
7284 /* If the last operand is a CONST_INT, use plus_constant of
7285 the negated constant. Else make the MINUS. */
7286 if (GET_CODE (op1) == CONST_INT)
7287 return plus_constant (op0, - INTVAL (op1));
7288 else
7289 return gen_rtx_MINUS (mode, op0, op1);
7290 }
7291 /* Convert A - const to A + (-const). */
7292 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7293 {
7294 tree negated = fold (build1 (NEGATE_EXPR, type,
7295 TREE_OPERAND (exp, 1)));
7296
7297 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7298 /* If we can't negate the constant in TYPE, leave it alone and
7299 expand_binop will negate it for us. We used to try to do it
7300 here in the signed version of TYPE, but that doesn't work
7301 on POINTER_TYPEs. */;
7302 else
7303 {
7304 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7305 goto plus_expr;
7306 }
7307 }
7308 this_optab = sub_optab;
7309 goto binop;
7310
7311 case MULT_EXPR:
7312 preexpand_calls (exp);
7313 /* If first operand is constant, swap them.
7314 Thus the following special case checks need only
7315 check the second operand. */
7316 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7317 {
7318 register tree t1 = TREE_OPERAND (exp, 0);
7319 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7320 TREE_OPERAND (exp, 1) = t1;
7321 }
7322
7323 /* Attempt to return something suitable for generating an
7324 indexed address, for machines that support that. */
7325
7326 if (modifier == EXPAND_SUM && mode == ptr_mode
7327 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7329 {
7330 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7331 EXPAND_SUM);
7332
7333 /* Apply distributive law if OP0 is x+c. */
7334 if (GET_CODE (op0) == PLUS
7335 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7336 return
7337 gen_rtx_PLUS
7338 (mode,
7339 gen_rtx_MULT
7340 (mode, XEXP (op0, 0),
7341 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7342 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7343 * INTVAL (XEXP (op0, 1))));
7344
7345 if (GET_CODE (op0) != REG)
7346 op0 = force_operand (op0, NULL_RTX);
7347 if (GET_CODE (op0) != REG)
7348 op0 = copy_to_mode_reg (mode, op0);
7349
7350 return
7351 gen_rtx_MULT (mode, op0,
7352 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7353 }
7354
7355 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7356 subtarget = 0;
7357
7358 /* Check for multiplying things that have been extended
7359 from a narrower type. If this machine supports multiplying
7360 in that narrower type with a result in the desired type,
7361 do it that way, and avoid the explicit type-conversion. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7363 && TREE_CODE (type) == INTEGER_TYPE
7364 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7365 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7366 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7367 && int_fits_type_p (TREE_OPERAND (exp, 1),
7368 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7369 /* Don't use a widening multiply if a shift will do. */
7370 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7371 > HOST_BITS_PER_WIDE_INT)
7372 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7373 ||
7374 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7375 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7376 ==
7377 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7378 /* If both operands are extended, they must either both
7379 be zero-extended or both be sign-extended. */
7380 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7381 ==
7382 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7383 {
7384 enum machine_mode innermode
7385 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7386 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7387 ? smul_widen_optab : umul_widen_optab);
7388 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7389 ? umul_widen_optab : smul_widen_optab);
7390 if (mode == GET_MODE_WIDER_MODE (innermode))
7391 {
7392 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7393 {
7394 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7395 NULL_RTX, VOIDmode, 0);
7396 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7397 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7398 VOIDmode, 0);
7399 else
7400 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7401 NULL_RTX, VOIDmode, 0);
7402 goto binop2;
7403 }
7404 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7405 && innermode == word_mode)
7406 {
7407 rtx htem;
7408 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7409 NULL_RTX, VOIDmode, 0);
7410 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7411 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7412 VOIDmode, 0);
7413 else
7414 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7415 NULL_RTX, VOIDmode, 0);
7416 temp = expand_binop (mode, other_optab, op0, op1, target,
7417 unsignedp, OPTAB_LIB_WIDEN);
7418 htem = expand_mult_highpart_adjust (innermode,
7419 gen_highpart (innermode, temp),
7420 op0, op1,
7421 gen_highpart (innermode, temp),
7422 unsignedp);
7423 emit_move_insn (gen_highpart (innermode, temp), htem);
7424 return temp;
7425 }
7426 }
7427 }
7428 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7429 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7430 return expand_mult (mode, op0, op1, target, unsignedp);
7431
7432 case TRUNC_DIV_EXPR:
7433 case FLOOR_DIV_EXPR:
7434 case CEIL_DIV_EXPR:
7435 case ROUND_DIV_EXPR:
7436 case EXACT_DIV_EXPR:
7437 preexpand_calls (exp);
7438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7439 subtarget = 0;
7440 /* Possible optimization: compute the dividend with EXPAND_SUM
7441 then if the divisor is constant can optimize the case
7442 where some terms of the dividend have coeffs divisible by it. */
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7444 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7445 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7446
7447 case RDIV_EXPR:
7448 this_optab = flodiv_optab;
7449 goto binop;
7450
7451 case TRUNC_MOD_EXPR:
7452 case FLOOR_MOD_EXPR:
7453 case CEIL_MOD_EXPR:
7454 case ROUND_MOD_EXPR:
7455 preexpand_calls (exp);
7456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7457 subtarget = 0;
7458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7460 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7461
7462 case FIX_ROUND_EXPR:
7463 case FIX_FLOOR_EXPR:
7464 case FIX_CEIL_EXPR:
7465 abort (); /* Not used for C. */
7466
7467 case FIX_TRUNC_EXPR:
7468 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7469 if (target == 0)
7470 target = gen_reg_rtx (mode);
7471 expand_fix (target, op0, unsignedp);
7472 return target;
7473
7474 case FLOAT_EXPR:
7475 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7476 if (target == 0)
7477 target = gen_reg_rtx (mode);
7478 /* expand_float can't figure out what to do if FROM has VOIDmode.
7479 So give it the correct mode. With -O, cse will optimize this. */
7480 if (GET_MODE (op0) == VOIDmode)
7481 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7482 op0);
7483 expand_float (target, op0,
7484 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7485 return target;
7486
7487 case NEGATE_EXPR:
7488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7489 temp = expand_unop (mode, neg_optab, op0, target, 0);
7490 if (temp == 0)
7491 abort ();
7492 return temp;
7493
7494 case ABS_EXPR:
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7496
7497 /* Handle complex values specially. */
7498 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7499 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7500 return expand_complex_abs (mode, op0, target, unsignedp);
7501
7502 /* Unsigned abs is simply the operand. Testing here means we don't
7503 risk generating incorrect code below. */
7504 if (TREE_UNSIGNED (type))
7505 return op0;
7506
7507 return expand_abs (mode, op0, target,
7508 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7509
7510 case MAX_EXPR:
7511 case MIN_EXPR:
7512 target = original_target;
7513 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7514 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7515 || GET_MODE (target) != mode
7516 || (GET_CODE (target) == REG
7517 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7518 target = gen_reg_rtx (mode);
7519 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7520 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7521
7522 /* First try to do it with a special MIN or MAX instruction.
7523 If that does not win, use a conditional jump to select the proper
7524 value. */
7525 this_optab = (TREE_UNSIGNED (type)
7526 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7527 : (code == MIN_EXPR ? smin_optab : smax_optab));
7528
7529 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7530 OPTAB_WIDEN);
7531 if (temp != 0)
7532 return temp;
7533
7534 /* At this point, a MEM target is no longer useful; we will get better
7535 code without it. */
7536
7537 if (GET_CODE (target) == MEM)
7538 target = gen_reg_rtx (mode);
7539
7540 if (target != op0)
7541 emit_move_insn (target, op0);
7542
7543 op0 = gen_label_rtx ();
7544
7545 /* If this mode is an integer too wide to compare properly,
7546 compare word by word. Rely on cse to optimize constant cases. */
7547 if (GET_MODE_CLASS (mode) == MODE_INT
7548 && ! can_compare_p (GE, mode, ccp_jump))
7549 {
7550 if (code == MAX_EXPR)
7551 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7552 target, op1, NULL_RTX, op0);
7553 else
7554 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7555 op1, target, NULL_RTX, op0);
7556 }
7557 else
7558 {
7559 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7560 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7561 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7562 op0);
7563 }
7564 emit_move_insn (target, op1);
7565 emit_label (op0);
7566 return target;
7567
7568 case BIT_NOT_EXPR:
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7570 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7571 if (temp == 0)
7572 abort ();
7573 return temp;
7574
7575 case FFS_EXPR:
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7577 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7578 if (temp == 0)
7579 abort ();
7580 return temp;
7581
7582 /* ??? Can optimize bitwise operations with one arg constant.
7583 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7584 and (a bitwise1 b) bitwise2 b (etc)
7585 but that is probably not worth while. */
7586
7587 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7588 boolean values when we want in all cases to compute both of them. In
7589 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7590 as actual zero-or-1 values and then bitwise anding. In cases where
7591 there cannot be any side effects, better code would be made by
7592 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7593 how to recognize those cases. */
7594
7595 case TRUTH_AND_EXPR:
7596 case BIT_AND_EXPR:
7597 this_optab = and_optab;
7598 goto binop;
7599
7600 case TRUTH_OR_EXPR:
7601 case BIT_IOR_EXPR:
7602 this_optab = ior_optab;
7603 goto binop;
7604
7605 case TRUTH_XOR_EXPR:
7606 case BIT_XOR_EXPR:
7607 this_optab = xor_optab;
7608 goto binop;
7609
7610 case LSHIFT_EXPR:
7611 case RSHIFT_EXPR:
7612 case LROTATE_EXPR:
7613 case RROTATE_EXPR:
7614 preexpand_calls (exp);
7615 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7616 subtarget = 0;
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7618 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7619 unsignedp);
7620
7621 /* Could determine the answer when only additive constants differ. Also,
7622 the addition of one can be handled by changing the condition. */
7623 case LT_EXPR:
7624 case LE_EXPR:
7625 case GT_EXPR:
7626 case GE_EXPR:
7627 case EQ_EXPR:
7628 case NE_EXPR:
7629 case UNORDERED_EXPR:
7630 case ORDERED_EXPR:
7631 case UNLT_EXPR:
7632 case UNLE_EXPR:
7633 case UNGT_EXPR:
7634 case UNGE_EXPR:
7635 case UNEQ_EXPR:
7636 preexpand_calls (exp);
7637 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7638 if (temp != 0)
7639 return temp;
7640
7641 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7642 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7643 && original_target
7644 && GET_CODE (original_target) == REG
7645 && (GET_MODE (original_target)
7646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7647 {
7648 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7649 VOIDmode, 0);
7650
7651 if (temp != original_target)
7652 temp = copy_to_reg (temp);
7653
7654 op1 = gen_label_rtx ();
7655 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7656 GET_MODE (temp), unsignedp, 0, op1);
7657 emit_move_insn (temp, const1_rtx);
7658 emit_label (op1);
7659 return temp;
7660 }
7661
7662 /* If no set-flag instruction, must generate a conditional
7663 store into a temporary variable. Drop through
7664 and handle this like && and ||. */
7665
7666 case TRUTH_ANDIF_EXPR:
7667 case TRUTH_ORIF_EXPR:
7668 if (! ignore
7669 && (target == 0 || ! safe_from_p (target, exp, 1)
7670 /* Make sure we don't have a hard reg (such as function's return
7671 value) live across basic blocks, if not optimizing. */
7672 || (!optimize && GET_CODE (target) == REG
7673 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7674 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7675
7676 if (target)
7677 emit_clr_insn (target);
7678
7679 op1 = gen_label_rtx ();
7680 jumpifnot (exp, op1);
7681
7682 if (target)
7683 emit_0_to_1_insn (target);
7684
7685 emit_label (op1);
7686 return ignore ? const0_rtx : target;
7687
7688 case TRUTH_NOT_EXPR:
7689 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7690 /* The parser is careful to generate TRUTH_NOT_EXPR
7691 only with operands that are always zero or one. */
7692 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7693 target, 1, OPTAB_LIB_WIDEN);
7694 if (temp == 0)
7695 abort ();
7696 return temp;
7697
7698 case COMPOUND_EXPR:
7699 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7700 emit_queue ();
7701 return expand_expr (TREE_OPERAND (exp, 1),
7702 (ignore ? const0_rtx : target),
7703 VOIDmode, 0);
7704
7705 case COND_EXPR:
7706 /* If we would have a "singleton" (see below) were it not for a
7707 conversion in each arm, bring that conversion back out. */
7708 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7709 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7710 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7711 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7712 {
7713 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7714 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7715
7716 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7717 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7718 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7719 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7720 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7721 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7722 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7723 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7724 return expand_expr (build1 (NOP_EXPR, type,
7725 build (COND_EXPR, TREE_TYPE (true),
7726 TREE_OPERAND (exp, 0),
7727 true, false)),
7728 target, tmode, modifier);
7729 }
7730
7731 {
7732 /* Note that COND_EXPRs whose type is a structure or union
7733 are required to be constructed to contain assignments of
7734 a temporary variable, so that we can evaluate them here
7735 for side effect only. If type is void, we must do likewise. */
7736
7737 /* If an arm of the branch requires a cleanup,
7738 only that cleanup is performed. */
7739
7740 tree singleton = 0;
7741 tree binary_op = 0, unary_op = 0;
7742
7743 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7744 convert it to our mode, if necessary. */
7745 if (integer_onep (TREE_OPERAND (exp, 1))
7746 && integer_zerop (TREE_OPERAND (exp, 2))
7747 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7748 {
7749 if (ignore)
7750 {
7751 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7752 ro_modifier);
7753 return const0_rtx;
7754 }
7755
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7757 if (GET_MODE (op0) == mode)
7758 return op0;
7759
7760 if (target == 0)
7761 target = gen_reg_rtx (mode);
7762 convert_move (target, op0, unsignedp);
7763 return target;
7764 }
7765
7766 /* Check for X ? A + B : A. If we have this, we can copy A to the
7767 output and conditionally add B. Similarly for unary operations.
7768 Don't do this if X has side-effects because those side effects
7769 might affect A or B and the "?" operation is a sequence point in
7770 ANSI. (operand_equal_p tests for side effects.) */
7771
7772 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7773 && operand_equal_p (TREE_OPERAND (exp, 2),
7774 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7775 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7776 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7777 && operand_equal_p (TREE_OPERAND (exp, 1),
7778 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7779 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7780 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7781 && operand_equal_p (TREE_OPERAND (exp, 2),
7782 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7783 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7784 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7785 && operand_equal_p (TREE_OPERAND (exp, 1),
7786 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7787 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7788
7789 /* If we are not to produce a result, we have no target. Otherwise,
7790 if a target was specified use it; it will not be used as an
7791 intermediate target unless it is safe. If no target, use a
7792 temporary. */
7793
7794 if (ignore)
7795 temp = 0;
7796 else if (original_target
7797 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7798 || (singleton && GET_CODE (original_target) == REG
7799 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7800 && original_target == var_rtx (singleton)))
7801 && GET_MODE (original_target) == mode
7802 #ifdef HAVE_conditional_move
7803 && (! can_conditionally_move_p (mode)
7804 || GET_CODE (original_target) == REG
7805 || TREE_ADDRESSABLE (type))
7806 #endif
7807 && ! (GET_CODE (original_target) == MEM
7808 && MEM_VOLATILE_P (original_target)))
7809 temp = original_target;
7810 else if (TREE_ADDRESSABLE (type))
7811 abort ();
7812 else
7813 temp = assign_temp (type, 0, 0, 1);
7814
7815 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7816 do the test of X as a store-flag operation, do this as
7817 A + ((X != 0) << log C). Similarly for other simple binary
7818 operators. Only do for C == 1 if BRANCH_COST is low. */
7819 if (temp && singleton && binary_op
7820 && (TREE_CODE (binary_op) == PLUS_EXPR
7821 || TREE_CODE (binary_op) == MINUS_EXPR
7822 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7823 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7824 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7825 : integer_onep (TREE_OPERAND (binary_op, 1)))
7826 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7827 {
7828 rtx result;
7829 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7830 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7831 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7832 : xor_optab);
7833
7834 /* If we had X ? A : A + 1, do this as A + (X == 0).
7835
7836 We have to invert the truth value here and then put it
7837 back later if do_store_flag fails. We cannot simply copy
7838 TREE_OPERAND (exp, 0) to another variable and modify that
7839 because invert_truthvalue can modify the tree pointed to
7840 by its argument. */
7841 if (singleton == TREE_OPERAND (exp, 1))
7842 TREE_OPERAND (exp, 0)
7843 = invert_truthvalue (TREE_OPERAND (exp, 0));
7844
7845 result = do_store_flag (TREE_OPERAND (exp, 0),
7846 (safe_from_p (temp, singleton, 1)
7847 ? temp : NULL_RTX),
7848 mode, BRANCH_COST <= 1);
7849
7850 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7851 result = expand_shift (LSHIFT_EXPR, mode, result,
7852 build_int_2 (tree_log2
7853 (TREE_OPERAND
7854 (binary_op, 1)),
7855 0),
7856 (safe_from_p (temp, singleton, 1)
7857 ? temp : NULL_RTX), 0);
7858
7859 if (result)
7860 {
7861 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7862 return expand_binop (mode, boptab, op1, result, temp,
7863 unsignedp, OPTAB_LIB_WIDEN);
7864 }
7865 else if (singleton == TREE_OPERAND (exp, 1))
7866 TREE_OPERAND (exp, 0)
7867 = invert_truthvalue (TREE_OPERAND (exp, 0));
7868 }
7869
7870 do_pending_stack_adjust ();
7871 NO_DEFER_POP;
7872 op0 = gen_label_rtx ();
7873
7874 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7875 {
7876 if (temp != 0)
7877 {
7878 /* If the target conflicts with the other operand of the
7879 binary op, we can't use it. Also, we can't use the target
7880 if it is a hard register, because evaluating the condition
7881 might clobber it. */
7882 if ((binary_op
7883 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7884 || (GET_CODE (temp) == REG
7885 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7886 temp = gen_reg_rtx (mode);
7887 store_expr (singleton, temp, 0);
7888 }
7889 else
7890 expand_expr (singleton,
7891 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7892 if (singleton == TREE_OPERAND (exp, 1))
7893 jumpif (TREE_OPERAND (exp, 0), op0);
7894 else
7895 jumpifnot (TREE_OPERAND (exp, 0), op0);
7896
7897 start_cleanup_deferral ();
7898 if (binary_op && temp == 0)
7899 /* Just touch the other operand. */
7900 expand_expr (TREE_OPERAND (binary_op, 1),
7901 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7902 else if (binary_op)
7903 store_expr (build (TREE_CODE (binary_op), type,
7904 make_tree (type, temp),
7905 TREE_OPERAND (binary_op, 1)),
7906 temp, 0);
7907 else
7908 store_expr (build1 (TREE_CODE (unary_op), type,
7909 make_tree (type, temp)),
7910 temp, 0);
7911 op1 = op0;
7912 }
7913 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7914 comparison operator. If we have one of these cases, set the
7915 output to A, branch on A (cse will merge these two references),
7916 then set the output to FOO. */
7917 else if (temp
7918 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7919 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7920 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7921 TREE_OPERAND (exp, 1), 0)
7922 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7923 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7924 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7925 {
7926 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7927 temp = gen_reg_rtx (mode);
7928 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7929 jumpif (TREE_OPERAND (exp, 0), op0);
7930
7931 start_cleanup_deferral ();
7932 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7933 op1 = op0;
7934 }
7935 else if (temp
7936 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7937 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7938 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7939 TREE_OPERAND (exp, 2), 0)
7940 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7941 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7942 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7943 {
7944 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7945 temp = gen_reg_rtx (mode);
7946 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7947 jumpifnot (TREE_OPERAND (exp, 0), op0);
7948
7949 start_cleanup_deferral ();
7950 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7951 op1 = op0;
7952 }
7953 else
7954 {
7955 op1 = gen_label_rtx ();
7956 jumpifnot (TREE_OPERAND (exp, 0), op0);
7957
7958 start_cleanup_deferral ();
7959
7960 /* One branch of the cond can be void, if it never returns. For
7961 example A ? throw : E */
7962 if (temp != 0
7963 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7964 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7965 else
7966 expand_expr (TREE_OPERAND (exp, 1),
7967 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7968 end_cleanup_deferral ();
7969 emit_queue ();
7970 emit_jump_insn (gen_jump (op1));
7971 emit_barrier ();
7972 emit_label (op0);
7973 start_cleanup_deferral ();
7974 if (temp != 0
7975 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7976 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7977 else
7978 expand_expr (TREE_OPERAND (exp, 2),
7979 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7980 }
7981
7982 end_cleanup_deferral ();
7983
7984 emit_queue ();
7985 emit_label (op1);
7986 OK_DEFER_POP;
7987
7988 return temp;
7989 }
7990
7991 case TARGET_EXPR:
7992 {
7993 /* Something needs to be initialized, but we didn't know
7994 where that thing was when building the tree. For example,
7995 it could be the return value of a function, or a parameter
7996 to a function which lays down in the stack, or a temporary
7997 variable which must be passed by reference.
7998
7999 We guarantee that the expression will either be constructed
8000 or copied into our original target. */
8001
8002 tree slot = TREE_OPERAND (exp, 0);
8003 tree cleanups = NULL_TREE;
8004 tree exp1;
8005
8006 if (TREE_CODE (slot) != VAR_DECL)
8007 abort ();
8008
8009 if (! ignore)
8010 target = original_target;
8011
8012 /* Set this here so that if we get a target that refers to a
8013 register variable that's already been used, put_reg_into_stack
8014 knows that it should fix up those uses. */
8015 TREE_USED (slot) = 1;
8016
8017 if (target == 0)
8018 {
8019 if (DECL_RTL (slot) != 0)
8020 {
8021 target = DECL_RTL (slot);
8022 /* If we have already expanded the slot, so don't do
8023 it again. (mrs) */
8024 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8025 return target;
8026 }
8027 else
8028 {
8029 target = assign_temp (type, 2, 0, 1);
8030 /* All temp slots at this level must not conflict. */
8031 preserve_temp_slots (target);
8032 DECL_RTL (slot) = target;
8033 if (TREE_ADDRESSABLE (slot))
8034 {
8035 TREE_ADDRESSABLE (slot) = 0;
8036 mark_addressable (slot);
8037 }
8038
8039 /* Since SLOT is not known to the called function
8040 to belong to its stack frame, we must build an explicit
8041 cleanup. This case occurs when we must build up a reference
8042 to pass the reference as an argument. In this case,
8043 it is very likely that such a reference need not be
8044 built here. */
8045
8046 if (TREE_OPERAND (exp, 2) == 0)
8047 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8048 cleanups = TREE_OPERAND (exp, 2);
8049 }
8050 }
8051 else
8052 {
8053 /* This case does occur, when expanding a parameter which
8054 needs to be constructed on the stack. The target
8055 is the actual stack address that we want to initialize.
8056 The function we call will perform the cleanup in this case. */
8057
8058 /* If we have already assigned it space, use that space,
8059 not target that we were passed in, as our target
8060 parameter is only a hint. */
8061 if (DECL_RTL (slot) != 0)
8062 {
8063 target = DECL_RTL (slot);
8064 /* If we have already expanded the slot, so don't do
8065 it again. (mrs) */
8066 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8067 return target;
8068 }
8069 else
8070 {
8071 DECL_RTL (slot) = target;
8072 /* If we must have an addressable slot, then make sure that
8073 the RTL that we just stored in slot is OK. */
8074 if (TREE_ADDRESSABLE (slot))
8075 {
8076 TREE_ADDRESSABLE (slot) = 0;
8077 mark_addressable (slot);
8078 }
8079 }
8080 }
8081
8082 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8083 /* Mark it as expanded. */
8084 TREE_OPERAND (exp, 1) = NULL_TREE;
8085
8086 store_expr (exp1, target, 0);
8087
8088 expand_decl_cleanup (NULL_TREE, cleanups);
8089
8090 return target;
8091 }
8092
8093 case INIT_EXPR:
8094 {
8095 tree lhs = TREE_OPERAND (exp, 0);
8096 tree rhs = TREE_OPERAND (exp, 1);
8097 tree noncopied_parts = 0;
8098 tree lhs_type = TREE_TYPE (lhs);
8099
8100 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8101 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8102 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8103 TYPE_NONCOPIED_PARTS (lhs_type));
8104 while (noncopied_parts != 0)
8105 {
8106 expand_assignment (TREE_VALUE (noncopied_parts),
8107 TREE_PURPOSE (noncopied_parts), 0, 0);
8108 noncopied_parts = TREE_CHAIN (noncopied_parts);
8109 }
8110 return temp;
8111 }
8112
8113 case MODIFY_EXPR:
8114 {
8115 /* If lhs is complex, expand calls in rhs before computing it.
8116 That's so we don't compute a pointer and save it over a call.
8117 If lhs is simple, compute it first so we can give it as a
8118 target if the rhs is just a call. This avoids an extra temp and copy
8119 and that prevents a partial-subsumption which makes bad code.
8120 Actually we could treat component_ref's of vars like vars. */
8121
8122 tree lhs = TREE_OPERAND (exp, 0);
8123 tree rhs = TREE_OPERAND (exp, 1);
8124 tree noncopied_parts = 0;
8125 tree lhs_type = TREE_TYPE (lhs);
8126
8127 temp = 0;
8128
8129 if (TREE_CODE (lhs) != VAR_DECL
8130 && TREE_CODE (lhs) != RESULT_DECL
8131 && TREE_CODE (lhs) != PARM_DECL
8132 && ! (TREE_CODE (lhs) == INDIRECT_REF
8133 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8134 preexpand_calls (exp);
8135
8136 /* Check for |= or &= of a bitfield of size one into another bitfield
8137 of size 1. In this case, (unless we need the result of the
8138 assignment) we can do this more efficiently with a
8139 test followed by an assignment, if necessary.
8140
8141 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8142 things change so we do, this code should be enhanced to
8143 support it. */
8144 if (ignore
8145 && TREE_CODE (lhs) == COMPONENT_REF
8146 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8147 || TREE_CODE (rhs) == BIT_AND_EXPR)
8148 && TREE_OPERAND (rhs, 0) == lhs
8149 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8150 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8151 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8152 {
8153 rtx label = gen_label_rtx ();
8154
8155 do_jump (TREE_OPERAND (rhs, 1),
8156 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8157 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8158 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8159 (TREE_CODE (rhs) == BIT_IOR_EXPR
8160 ? integer_one_node
8161 : integer_zero_node)),
8162 0, 0);
8163 do_pending_stack_adjust ();
8164 emit_label (label);
8165 return const0_rtx;
8166 }
8167
8168 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8169 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8170 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8171 TYPE_NONCOPIED_PARTS (lhs_type));
8172
8173 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8174 while (noncopied_parts != 0)
8175 {
8176 expand_assignment (TREE_PURPOSE (noncopied_parts),
8177 TREE_VALUE (noncopied_parts), 0, 0);
8178 noncopied_parts = TREE_CHAIN (noncopied_parts);
8179 }
8180 return temp;
8181 }
8182
8183 case RETURN_EXPR:
8184 if (!TREE_OPERAND (exp, 0))
8185 expand_null_return ();
8186 else
8187 expand_return (TREE_OPERAND (exp, 0));
8188 return const0_rtx;
8189
8190 case PREINCREMENT_EXPR:
8191 case PREDECREMENT_EXPR:
8192 return expand_increment (exp, 0, ignore);
8193
8194 case POSTINCREMENT_EXPR:
8195 case POSTDECREMENT_EXPR:
8196 /* Faster to treat as pre-increment if result is not used. */
8197 return expand_increment (exp, ! ignore, ignore);
8198
8199 case ADDR_EXPR:
8200 /* If nonzero, TEMP will be set to the address of something that might
8201 be a MEM corresponding to a stack slot. */
8202 temp = 0;
8203
8204 /* Are we taking the address of a nested function? */
8205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8206 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8207 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8208 && ! TREE_STATIC (exp))
8209 {
8210 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8211 op0 = force_operand (op0, target);
8212 }
8213 /* If we are taking the address of something erroneous, just
8214 return a zero. */
8215 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8216 return const0_rtx;
8217 else
8218 {
8219 /* We make sure to pass const0_rtx down if we came in with
8220 ignore set, to avoid doing the cleanups twice for something. */
8221 op0 = expand_expr (TREE_OPERAND (exp, 0),
8222 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8223 (modifier == EXPAND_INITIALIZER
8224 ? modifier : EXPAND_CONST_ADDRESS));
8225
8226 /* If we are going to ignore the result, OP0 will have been set
8227 to const0_rtx, so just return it. Don't get confused and
8228 think we are taking the address of the constant. */
8229 if (ignore)
8230 return op0;
8231
8232 op0 = protect_from_queue (op0, 0);
8233
8234 /* We would like the object in memory. If it is a constant, we can
8235 have it be statically allocated into memory. For a non-constant,
8236 we need to allocate some memory and store the value into it. */
8237
8238 if (CONSTANT_P (op0))
8239 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8240 op0);
8241 else if (GET_CODE (op0) == MEM)
8242 {
8243 mark_temp_addr_taken (op0);
8244 temp = XEXP (op0, 0);
8245 }
8246
8247 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8248 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8249 {
8250 /* If this object is in a register, it must be not
8251 be BLKmode. */
8252 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8253 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8254
8255 mark_temp_addr_taken (memloc);
8256 emit_move_insn (memloc, op0);
8257 op0 = memloc;
8258 }
8259
8260 if (GET_CODE (op0) != MEM)
8261 abort ();
8262
8263 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8264 {
8265 temp = XEXP (op0, 0);
8266 #ifdef POINTERS_EXTEND_UNSIGNED
8267 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8268 && mode == ptr_mode)
8269 temp = convert_memory_address (ptr_mode, temp);
8270 #endif
8271 return temp;
8272 }
8273
8274 op0 = force_operand (XEXP (op0, 0), target);
8275 }
8276
8277 if (flag_force_addr && GET_CODE (op0) != REG)
8278 op0 = force_reg (Pmode, op0);
8279
8280 if (GET_CODE (op0) == REG
8281 && ! REG_USERVAR_P (op0))
8282 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8283
8284 /* If we might have had a temp slot, add an equivalent address
8285 for it. */
8286 if (temp != 0)
8287 update_temp_slot_address (temp, op0);
8288
8289 #ifdef POINTERS_EXTEND_UNSIGNED
8290 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8291 && mode == ptr_mode)
8292 op0 = convert_memory_address (ptr_mode, op0);
8293 #endif
8294
8295 return op0;
8296
8297 case ENTRY_VALUE_EXPR:
8298 abort ();
8299
8300 /* COMPLEX type for Extended Pascal & Fortran */
8301 case COMPLEX_EXPR:
8302 {
8303 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8304 rtx insns;
8305
8306 /* Get the rtx code of the operands. */
8307 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8308 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8309
8310 if (! target)
8311 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8312
8313 start_sequence ();
8314
8315 /* Move the real (op0) and imaginary (op1) parts to their location. */
8316 emit_move_insn (gen_realpart (mode, target), op0);
8317 emit_move_insn (gen_imagpart (mode, target), op1);
8318
8319 insns = get_insns ();
8320 end_sequence ();
8321
8322 /* Complex construction should appear as a single unit. */
8323 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8324 each with a separate pseudo as destination.
8325 It's not correct for flow to treat them as a unit. */
8326 if (GET_CODE (target) != CONCAT)
8327 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8328 else
8329 emit_insns (insns);
8330
8331 return target;
8332 }
8333
8334 case REALPART_EXPR:
8335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8336 return gen_realpart (mode, op0);
8337
8338 case IMAGPART_EXPR:
8339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8340 return gen_imagpart (mode, op0);
8341
8342 case CONJ_EXPR:
8343 {
8344 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8345 rtx imag_t;
8346 rtx insns;
8347
8348 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8349
8350 if (! target)
8351 target = gen_reg_rtx (mode);
8352
8353 start_sequence ();
8354
8355 /* Store the realpart and the negated imagpart to target. */
8356 emit_move_insn (gen_realpart (partmode, target),
8357 gen_realpart (partmode, op0));
8358
8359 imag_t = gen_imagpart (partmode, target);
8360 temp = expand_unop (partmode, neg_optab,
8361 gen_imagpart (partmode, op0), imag_t, 0);
8362 if (temp != imag_t)
8363 emit_move_insn (imag_t, temp);
8364
8365 insns = get_insns ();
8366 end_sequence ();
8367
8368 /* Conjugate should appear as a single unit
8369 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8370 each with a separate pseudo as destination.
8371 It's not correct for flow to treat them as a unit. */
8372 if (GET_CODE (target) != CONCAT)
8373 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8374 else
8375 emit_insns (insns);
8376
8377 return target;
8378 }
8379
8380 case TRY_CATCH_EXPR:
8381 {
8382 tree handler = TREE_OPERAND (exp, 1);
8383
8384 expand_eh_region_start ();
8385
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8387
8388 expand_eh_region_end (handler);
8389
8390 return op0;
8391 }
8392
8393 case TRY_FINALLY_EXPR:
8394 {
8395 tree try_block = TREE_OPERAND (exp, 0);
8396 tree finally_block = TREE_OPERAND (exp, 1);
8397 rtx finally_label = gen_label_rtx ();
8398 rtx done_label = gen_label_rtx ();
8399 rtx return_link = gen_reg_rtx (Pmode);
8400 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8401 (tree) finally_label, (tree) return_link);
8402 TREE_SIDE_EFFECTS (cleanup) = 1;
8403
8404 /* Start a new binding layer that will keep track of all cleanup
8405 actions to be performed. */
8406 expand_start_bindings (2);
8407
8408 target_temp_slot_level = temp_slot_level;
8409
8410 expand_decl_cleanup (NULL_TREE, cleanup);
8411 op0 = expand_expr (try_block, target, tmode, modifier);
8412
8413 preserve_temp_slots (op0);
8414 expand_end_bindings (NULL_TREE, 0, 0);
8415 emit_jump (done_label);
8416 emit_label (finally_label);
8417 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8418 emit_indirect_jump (return_link);
8419 emit_label (done_label);
8420 return op0;
8421 }
8422
8423 case GOTO_SUBROUTINE_EXPR:
8424 {
8425 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8426 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8427 rtx return_address = gen_label_rtx ();
8428 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8429 emit_jump (subr);
8430 emit_label (return_address);
8431 return const0_rtx;
8432 }
8433
8434 case POPDCC_EXPR:
8435 {
8436 rtx dcc = get_dynamic_cleanup_chain ();
8437 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8438 return const0_rtx;
8439 }
8440
8441 case POPDHC_EXPR:
8442 {
8443 rtx dhc = get_dynamic_handler_chain ();
8444 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8445 return const0_rtx;
8446 }
8447
8448 case VA_ARG_EXPR:
8449 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8450
8451 default:
8452 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8453 }
8454
8455 /* Here to do an ordinary binary operator, generating an instruction
8456 from the optab already placed in `this_optab'. */
8457 binop:
8458 preexpand_calls (exp);
8459 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8460 subtarget = 0;
8461 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8462 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8463 binop2:
8464 temp = expand_binop (mode, this_optab, op0, op1, target,
8465 unsignedp, OPTAB_LIB_WIDEN);
8466 if (temp == 0)
8467 abort ();
8468 return temp;
8469 }
8470 \f
8471 /* Similar to expand_expr, except that we don't specify a target, target
8472 mode, or modifier and we return the alignment of the inner type. This is
8473 used in cases where it is not necessary to align the result to the
8474 alignment of its type as long as we know the alignment of the result, for
8475 example for comparisons of BLKmode values. */
8476
8477 static rtx
8478 expand_expr_unaligned (exp, palign)
8479 register tree exp;
8480 int *palign;
8481 {
8482 register rtx op0;
8483 tree type = TREE_TYPE (exp);
8484 register enum machine_mode mode = TYPE_MODE (type);
8485
8486 /* Default the alignment we return to that of the type. */
8487 *palign = TYPE_ALIGN (type);
8488
8489 /* The only cases in which we do anything special is if the resulting mode
8490 is BLKmode. */
8491 if (mode != BLKmode)
8492 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8493
8494 switch (TREE_CODE (exp))
8495 {
8496 case CONVERT_EXPR:
8497 case NOP_EXPR:
8498 case NON_LVALUE_EXPR:
8499 /* Conversions between BLKmode values don't change the underlying
8500 alignment or value. */
8501 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8502 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8503 break;
8504
8505 case ARRAY_REF:
8506 /* Much of the code for this case is copied directly from expand_expr.
8507 We need to duplicate it here because we will do something different
8508 in the fall-through case, so we need to handle the same exceptions
8509 it does. */
8510 {
8511 tree array = TREE_OPERAND (exp, 0);
8512 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8513 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8514 tree index = TREE_OPERAND (exp, 1);
8515 tree index_type = TREE_TYPE (index);
8516 HOST_WIDE_INT i;
8517
8518 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8519 abort ();
8520
8521 /* Optimize the special-case of a zero lower bound.
8522
8523 We convert the low_bound to sizetype to avoid some problems
8524 with constant folding. (E.g. suppose the lower bound is 1,
8525 and its mode is QI. Without the conversion, (ARRAY
8526 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8527 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8528
8529 But sizetype isn't quite right either (especially if
8530 the lowbound is negative). FIXME */
8531
8532 if (! integer_zerop (low_bound))
8533 index = fold (build (MINUS_EXPR, index_type, index,
8534 convert (sizetype, low_bound)));
8535
8536 /* If this is a constant index into a constant array,
8537 just get the value from the array. Handle both the cases when
8538 we have an explicit constructor and when our operand is a variable
8539 that was declared const. */
8540
8541 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8542 {
8543 if (TREE_CODE (index) == INTEGER_CST
8544 && TREE_INT_CST_HIGH (index) == 0)
8545 {
8546 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8547
8548 i = TREE_INT_CST_LOW (index);
8549 while (elem && i--)
8550 elem = TREE_CHAIN (elem);
8551 if (elem)
8552 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8553 palign);
8554 }
8555 }
8556
8557 else if (optimize >= 1
8558 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8559 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8560 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8561 {
8562 if (TREE_CODE (index) == INTEGER_CST)
8563 {
8564 tree init = DECL_INITIAL (array);
8565
8566 i = TREE_INT_CST_LOW (index);
8567 if (TREE_CODE (init) == CONSTRUCTOR)
8568 {
8569 tree elem = CONSTRUCTOR_ELTS (init);
8570
8571 while (elem
8572 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8573 elem = TREE_CHAIN (elem);
8574 if (elem)
8575 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8576 palign);
8577 }
8578 }
8579 }
8580 }
8581
8582 /* ... fall through ... */
8583
8584 case COMPONENT_REF:
8585 case BIT_FIELD_REF:
8586 /* If the operand is a CONSTRUCTOR, we can just extract the
8587 appropriate field if it is present. Don't do this if we have
8588 already written the data since we want to refer to that copy
8589 and varasm.c assumes that's what we'll do. */
8590 if (TREE_CODE (exp) != ARRAY_REF
8591 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8592 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8593 {
8594 tree elt;
8595
8596 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8597 elt = TREE_CHAIN (elt))
8598 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8599 /* Note that unlike the case in expand_expr, we know this is
8600 BLKmode and hence not an integer. */
8601 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8602 }
8603
8604 {
8605 enum machine_mode mode1;
8606 int bitsize;
8607 int bitpos;
8608 tree offset;
8609 int volatilep = 0;
8610 int alignment;
8611 int unsignedp;
8612 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8613 &mode1, &unsignedp, &volatilep,
8614 &alignment);
8615
8616 /* If we got back the original object, something is wrong. Perhaps
8617 we are evaluating an expression too early. In any event, don't
8618 infinitely recurse. */
8619 if (tem == exp)
8620 abort ();
8621
8622 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8623
8624 /* If this is a constant, put it into a register if it is a
8625 legitimate constant and OFFSET is 0 and memory if it isn't. */
8626 if (CONSTANT_P (op0))
8627 {
8628 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8629
8630 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8631 && offset == 0)
8632 op0 = force_reg (inner_mode, op0);
8633 else
8634 op0 = validize_mem (force_const_mem (inner_mode, op0));
8635 }
8636
8637 if (offset != 0)
8638 {
8639 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8640
8641 /* If this object is in a register, put it into memory.
8642 This case can't occur in C, but can in Ada if we have
8643 unchecked conversion of an expression from a scalar type to
8644 an array or record type. */
8645 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8646 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8647 {
8648 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8649
8650 mark_temp_addr_taken (memloc);
8651 emit_move_insn (memloc, op0);
8652 op0 = memloc;
8653 }
8654
8655 if (GET_CODE (op0) != MEM)
8656 abort ();
8657
8658 if (GET_MODE (offset_rtx) != ptr_mode)
8659 {
8660 #ifdef POINTERS_EXTEND_UNSIGNED
8661 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8662 #else
8663 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8664 #endif
8665 }
8666
8667 op0 = change_address (op0, VOIDmode,
8668 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8669 force_reg (ptr_mode,
8670 offset_rtx)));
8671 }
8672
8673 /* Don't forget about volatility even if this is a bitfield. */
8674 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8675 {
8676 op0 = copy_rtx (op0);
8677 MEM_VOLATILE_P (op0) = 1;
8678 }
8679
8680 /* Check the access. */
8681 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8682 {
8683 rtx to;
8684 int size;
8685
8686 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8687 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8688
8689 /* Check the access right of the pointer. */
8690 if (size > BITS_PER_UNIT)
8691 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8692 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8693 TYPE_MODE (sizetype),
8694 GEN_INT (MEMORY_USE_RO),
8695 TYPE_MODE (integer_type_node));
8696 }
8697
8698 /* In cases where an aligned union has an unaligned object
8699 as a field, we might be extracting a BLKmode value from
8700 an integer-mode (e.g., SImode) object. Handle this case
8701 by doing the extract into an object as wide as the field
8702 (which we know to be the width of a basic mode), then
8703 storing into memory, and changing the mode to BLKmode.
8704 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8705 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8706 if (mode1 == VOIDmode
8707 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8708 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8709 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8710 || bitpos % TYPE_ALIGN (type) != 0)))
8711 {
8712 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8713
8714 if (ext_mode == BLKmode)
8715 {
8716 /* In this case, BITPOS must start at a byte boundary. */
8717 if (GET_CODE (op0) != MEM
8718 || bitpos % BITS_PER_UNIT != 0)
8719 abort ();
8720
8721 op0 = change_address (op0, VOIDmode,
8722 plus_constant (XEXP (op0, 0),
8723 bitpos / BITS_PER_UNIT));
8724 }
8725 else
8726 {
8727 rtx new = assign_stack_temp (ext_mode,
8728 bitsize / BITS_PER_UNIT, 0);
8729
8730 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8731 unsignedp, NULL_RTX, ext_mode,
8732 ext_mode, alignment,
8733 int_size_in_bytes (TREE_TYPE (tem)));
8734
8735 /* If the result is a record type and BITSIZE is narrower than
8736 the mode of OP0, an integral mode, and this is a big endian
8737 machine, we must put the field into the high-order bits. */
8738 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8739 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8740 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8741 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8742 size_int (GET_MODE_BITSIZE
8743 (GET_MODE (op0))
8744 - bitsize),
8745 op0, 1);
8746
8747
8748 emit_move_insn (new, op0);
8749 op0 = copy_rtx (new);
8750 PUT_MODE (op0, BLKmode);
8751 }
8752 }
8753 else
8754 /* Get a reference to just this component. */
8755 op0 = change_address (op0, mode1,
8756 plus_constant (XEXP (op0, 0),
8757 (bitpos / BITS_PER_UNIT)));
8758
8759 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8760
8761 /* Adjust the alignment in case the bit position is not
8762 a multiple of the alignment of the inner object. */
8763 while (bitpos % alignment != 0)
8764 alignment >>= 1;
8765
8766 if (GET_CODE (XEXP (op0, 0)) == REG)
8767 mark_reg_pointer (XEXP (op0, 0), alignment);
8768
8769 MEM_IN_STRUCT_P (op0) = 1;
8770 MEM_VOLATILE_P (op0) |= volatilep;
8771
8772 *palign = alignment;
8773 return op0;
8774 }
8775
8776 default:
8777 break;
8778
8779 }
8780
8781 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8782 }
8783 \f
8784 /* Return the tree node and offset if a given argument corresponds to
8785 a string constant. */
8786
8787 tree
8788 string_constant (arg, ptr_offset)
8789 tree arg;
8790 tree *ptr_offset;
8791 {
8792 STRIP_NOPS (arg);
8793
8794 if (TREE_CODE (arg) == ADDR_EXPR
8795 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8796 {
8797 *ptr_offset = integer_zero_node;
8798 return TREE_OPERAND (arg, 0);
8799 }
8800 else if (TREE_CODE (arg) == PLUS_EXPR)
8801 {
8802 tree arg0 = TREE_OPERAND (arg, 0);
8803 tree arg1 = TREE_OPERAND (arg, 1);
8804
8805 STRIP_NOPS (arg0);
8806 STRIP_NOPS (arg1);
8807
8808 if (TREE_CODE (arg0) == ADDR_EXPR
8809 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8810 {
8811 *ptr_offset = arg1;
8812 return TREE_OPERAND (arg0, 0);
8813 }
8814 else if (TREE_CODE (arg1) == ADDR_EXPR
8815 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8816 {
8817 *ptr_offset = arg0;
8818 return TREE_OPERAND (arg1, 0);
8819 }
8820 }
8821
8822 return 0;
8823 }
8824 \f
8825 /* Expand code for a post- or pre- increment or decrement
8826 and return the RTX for the result.
8827 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8828
8829 static rtx
8830 expand_increment (exp, post, ignore)
8831 register tree exp;
8832 int post, ignore;
8833 {
8834 register rtx op0, op1;
8835 register rtx temp, value;
8836 register tree incremented = TREE_OPERAND (exp, 0);
8837 optab this_optab = add_optab;
8838 int icode;
8839 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8840 int op0_is_copy = 0;
8841 int single_insn = 0;
8842 /* 1 means we can't store into OP0 directly,
8843 because it is a subreg narrower than a word,
8844 and we don't dare clobber the rest of the word. */
8845 int bad_subreg = 0;
8846
8847 /* Stabilize any component ref that might need to be
8848 evaluated more than once below. */
8849 if (!post
8850 || TREE_CODE (incremented) == BIT_FIELD_REF
8851 || (TREE_CODE (incremented) == COMPONENT_REF
8852 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8853 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8854 incremented = stabilize_reference (incremented);
8855 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8856 ones into save exprs so that they don't accidentally get evaluated
8857 more than once by the code below. */
8858 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8859 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8860 incremented = save_expr (incremented);
8861
8862 /* Compute the operands as RTX.
8863 Note whether OP0 is the actual lvalue or a copy of it:
8864 I believe it is a copy iff it is a register or subreg
8865 and insns were generated in computing it. */
8866
8867 temp = get_last_insn ();
8868 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8869
8870 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8871 in place but instead must do sign- or zero-extension during assignment,
8872 so we copy it into a new register and let the code below use it as
8873 a copy.
8874
8875 Note that we can safely modify this SUBREG since it is know not to be
8876 shared (it was made by the expand_expr call above). */
8877
8878 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8879 {
8880 if (post)
8881 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8882 else
8883 bad_subreg = 1;
8884 }
8885 else if (GET_CODE (op0) == SUBREG
8886 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8887 {
8888 /* We cannot increment this SUBREG in place. If we are
8889 post-incrementing, get a copy of the old value. Otherwise,
8890 just mark that we cannot increment in place. */
8891 if (post)
8892 op0 = copy_to_reg (op0);
8893 else
8894 bad_subreg = 1;
8895 }
8896
8897 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8898 && temp != get_last_insn ());
8899 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8900 EXPAND_MEMORY_USE_BAD);
8901
8902 /* Decide whether incrementing or decrementing. */
8903 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8904 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8905 this_optab = sub_optab;
8906
8907 /* Convert decrement by a constant into a negative increment. */
8908 if (this_optab == sub_optab
8909 && GET_CODE (op1) == CONST_INT)
8910 {
8911 op1 = GEN_INT (- INTVAL (op1));
8912 this_optab = add_optab;
8913 }
8914
8915 /* For a preincrement, see if we can do this with a single instruction. */
8916 if (!post)
8917 {
8918 icode = (int) this_optab->handlers[(int) mode].insn_code;
8919 if (icode != (int) CODE_FOR_nothing
8920 /* Make sure that OP0 is valid for operands 0 and 1
8921 of the insn we want to queue. */
8922 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8923 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8924 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8925 single_insn = 1;
8926 }
8927
8928 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8929 then we cannot just increment OP0. We must therefore contrive to
8930 increment the original value. Then, for postincrement, we can return
8931 OP0 since it is a copy of the old value. For preincrement, expand here
8932 unless we can do it with a single insn.
8933
8934 Likewise if storing directly into OP0 would clobber high bits
8935 we need to preserve (bad_subreg). */
8936 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8937 {
8938 /* This is the easiest way to increment the value wherever it is.
8939 Problems with multiple evaluation of INCREMENTED are prevented
8940 because either (1) it is a component_ref or preincrement,
8941 in which case it was stabilized above, or (2) it is an array_ref
8942 with constant index in an array in a register, which is
8943 safe to reevaluate. */
8944 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8945 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8946 ? MINUS_EXPR : PLUS_EXPR),
8947 TREE_TYPE (exp),
8948 incremented,
8949 TREE_OPERAND (exp, 1));
8950
8951 while (TREE_CODE (incremented) == NOP_EXPR
8952 || TREE_CODE (incremented) == CONVERT_EXPR)
8953 {
8954 newexp = convert (TREE_TYPE (incremented), newexp);
8955 incremented = TREE_OPERAND (incremented, 0);
8956 }
8957
8958 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8959 return post ? op0 : temp;
8960 }
8961
8962 if (post)
8963 {
8964 /* We have a true reference to the value in OP0.
8965 If there is an insn to add or subtract in this mode, queue it.
8966 Queueing the increment insn avoids the register shuffling
8967 that often results if we must increment now and first save
8968 the old value for subsequent use. */
8969
8970 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8971 op0 = stabilize (op0);
8972 #endif
8973
8974 icode = (int) this_optab->handlers[(int) mode].insn_code;
8975 if (icode != (int) CODE_FOR_nothing
8976 /* Make sure that OP0 is valid for operands 0 and 1
8977 of the insn we want to queue. */
8978 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8979 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8980 {
8981 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8982 op1 = force_reg (mode, op1);
8983
8984 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8985 }
8986 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8987 {
8988 rtx addr = (general_operand (XEXP (op0, 0), mode)
8989 ? force_reg (Pmode, XEXP (op0, 0))
8990 : copy_to_reg (XEXP (op0, 0)));
8991 rtx temp, result;
8992
8993 op0 = change_address (op0, VOIDmode, addr);
8994 temp = force_reg (GET_MODE (op0), op0);
8995 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8996 op1 = force_reg (mode, op1);
8997
8998 /* The increment queue is LIFO, thus we have to `queue'
8999 the instructions in reverse order. */
9000 enqueue_insn (op0, gen_move_insn (op0, temp));
9001 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9002 return result;
9003 }
9004 }
9005
9006 /* Preincrement, or we can't increment with one simple insn. */
9007 if (post)
9008 /* Save a copy of the value before inc or dec, to return it later. */
9009 temp = value = copy_to_reg (op0);
9010 else
9011 /* Arrange to return the incremented value. */
9012 /* Copy the rtx because expand_binop will protect from the queue,
9013 and the results of that would be invalid for us to return
9014 if our caller does emit_queue before using our result. */
9015 temp = copy_rtx (value = op0);
9016
9017 /* Increment however we can. */
9018 op1 = expand_binop (mode, this_optab, value, op1,
9019 current_function_check_memory_usage ? NULL_RTX : op0,
9020 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9021 /* Make sure the value is stored into OP0. */
9022 if (op1 != op0)
9023 emit_move_insn (op0, op1);
9024
9025 return temp;
9026 }
9027 \f
9028 /* Expand all function calls contained within EXP, innermost ones first.
9029 But don't look within expressions that have sequence points.
9030 For each CALL_EXPR, record the rtx for its value
9031 in the CALL_EXPR_RTL field. */
9032
9033 static void
9034 preexpand_calls (exp)
9035 tree exp;
9036 {
9037 register int nops, i;
9038 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9039
9040 if (! do_preexpand_calls)
9041 return;
9042
9043 /* Only expressions and references can contain calls. */
9044
9045 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9046 return;
9047
9048 switch (TREE_CODE (exp))
9049 {
9050 case CALL_EXPR:
9051 /* Do nothing if already expanded. */
9052 if (CALL_EXPR_RTL (exp) != 0
9053 /* Do nothing if the call returns a variable-sized object. */
9054 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9055 /* Do nothing to built-in functions. */
9056 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9057 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9058 == FUNCTION_DECL)
9059 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9060 return;
9061
9062 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9063 return;
9064
9065 case COMPOUND_EXPR:
9066 case COND_EXPR:
9067 case TRUTH_ANDIF_EXPR:
9068 case TRUTH_ORIF_EXPR:
9069 /* If we find one of these, then we can be sure
9070 the adjust will be done for it (since it makes jumps).
9071 Do it now, so that if this is inside an argument
9072 of a function, we don't get the stack adjustment
9073 after some other args have already been pushed. */
9074 do_pending_stack_adjust ();
9075 return;
9076
9077 case BLOCK:
9078 case RTL_EXPR:
9079 case WITH_CLEANUP_EXPR:
9080 case CLEANUP_POINT_EXPR:
9081 case TRY_CATCH_EXPR:
9082 return;
9083
9084 case SAVE_EXPR:
9085 if (SAVE_EXPR_RTL (exp) != 0)
9086 return;
9087
9088 default:
9089 break;
9090 }
9091
9092 nops = tree_code_length[(int) TREE_CODE (exp)];
9093 for (i = 0; i < nops; i++)
9094 if (TREE_OPERAND (exp, i) != 0)
9095 {
9096 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9097 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9098 It doesn't happen before the call is made. */
9099 ;
9100 else
9101 {
9102 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9103 if (type == 'e' || type == '<' || type == '1' || type == '2'
9104 || type == 'r')
9105 preexpand_calls (TREE_OPERAND (exp, i));
9106 }
9107 }
9108 }
9109 \f
9110 /* At the start of a function, record that we have no previously-pushed
9111 arguments waiting to be popped. */
9112
9113 void
9114 init_pending_stack_adjust ()
9115 {
9116 pending_stack_adjust = 0;
9117 }
9118
9119 /* When exiting from function, if safe, clear out any pending stack adjust
9120 so the adjustment won't get done.
9121
9122 Note, if the current function calls alloca, then it must have a
9123 frame pointer regardless of the value of flag_omit_frame_pointer. */
9124
9125 void
9126 clear_pending_stack_adjust ()
9127 {
9128 #ifdef EXIT_IGNORE_STACK
9129 if (optimize > 0
9130 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9131 && EXIT_IGNORE_STACK
9132 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9133 && ! flag_inline_functions)
9134 pending_stack_adjust = 0;
9135 #endif
9136 }
9137
9138 /* Pop any previously-pushed arguments that have not been popped yet. */
9139
9140 void
9141 do_pending_stack_adjust ()
9142 {
9143 if (inhibit_defer_pop == 0)
9144 {
9145 if (pending_stack_adjust != 0)
9146 adjust_stack (GEN_INT (pending_stack_adjust));
9147 pending_stack_adjust = 0;
9148 }
9149 }
9150 \f
9151 /* Expand conditional expressions. */
9152
9153 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9154 LABEL is an rtx of code CODE_LABEL, in this function and all the
9155 functions here. */
9156
9157 void
9158 jumpifnot (exp, label)
9159 tree exp;
9160 rtx label;
9161 {
9162 do_jump (exp, label, NULL_RTX);
9163 }
9164
9165 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9166
9167 void
9168 jumpif (exp, label)
9169 tree exp;
9170 rtx label;
9171 {
9172 do_jump (exp, NULL_RTX, label);
9173 }
9174
9175 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9176 the result is zero, or IF_TRUE_LABEL if the result is one.
9177 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9178 meaning fall through in that case.
9179
9180 do_jump always does any pending stack adjust except when it does not
9181 actually perform a jump. An example where there is no jump
9182 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9183
9184 This function is responsible for optimizing cases such as
9185 &&, || and comparison operators in EXP. */
9186
9187 void
9188 do_jump (exp, if_false_label, if_true_label)
9189 tree exp;
9190 rtx if_false_label, if_true_label;
9191 {
9192 register enum tree_code code = TREE_CODE (exp);
9193 /* Some cases need to create a label to jump to
9194 in order to properly fall through.
9195 These cases set DROP_THROUGH_LABEL nonzero. */
9196 rtx drop_through_label = 0;
9197 rtx temp;
9198 int i;
9199 tree type;
9200 enum machine_mode mode;
9201
9202 #ifdef MAX_INTEGER_COMPUTATION_MODE
9203 check_max_integer_computation_mode (exp);
9204 #endif
9205
9206 emit_queue ();
9207
9208 switch (code)
9209 {
9210 case ERROR_MARK:
9211 break;
9212
9213 case INTEGER_CST:
9214 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9215 if (temp)
9216 emit_jump (temp);
9217 break;
9218
9219 #if 0
9220 /* This is not true with #pragma weak */
9221 case ADDR_EXPR:
9222 /* The address of something can never be zero. */
9223 if (if_true_label)
9224 emit_jump (if_true_label);
9225 break;
9226 #endif
9227
9228 case NOP_EXPR:
9229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9230 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9231 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9232 goto normal;
9233 case CONVERT_EXPR:
9234 /* If we are narrowing the operand, we have to do the compare in the
9235 narrower mode. */
9236 if ((TYPE_PRECISION (TREE_TYPE (exp))
9237 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9238 goto normal;
9239 case NON_LVALUE_EXPR:
9240 case REFERENCE_EXPR:
9241 case ABS_EXPR:
9242 case NEGATE_EXPR:
9243 case LROTATE_EXPR:
9244 case RROTATE_EXPR:
9245 /* These cannot change zero->non-zero or vice versa. */
9246 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9247 break;
9248
9249 case WITH_RECORD_EXPR:
9250 /* Put the object on the placeholder list, recurse through our first
9251 operand, and pop the list. */
9252 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9253 placeholder_list);
9254 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9255 placeholder_list = TREE_CHAIN (placeholder_list);
9256 break;
9257
9258 #if 0
9259 /* This is never less insns than evaluating the PLUS_EXPR followed by
9260 a test and can be longer if the test is eliminated. */
9261 case PLUS_EXPR:
9262 /* Reduce to minus. */
9263 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9264 TREE_OPERAND (exp, 0),
9265 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9266 TREE_OPERAND (exp, 1))));
9267 /* Process as MINUS. */
9268 #endif
9269
9270 case MINUS_EXPR:
9271 /* Non-zero iff operands of minus differ. */
9272 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9273 TREE_OPERAND (exp, 0),
9274 TREE_OPERAND (exp, 1)),
9275 NE, NE, if_false_label, if_true_label);
9276 break;
9277
9278 case BIT_AND_EXPR:
9279 /* If we are AND'ing with a small constant, do this comparison in the
9280 smallest type that fits. If the machine doesn't have comparisons
9281 that small, it will be converted back to the wider comparison.
9282 This helps if we are testing the sign bit of a narrower object.
9283 combine can't do this for us because it can't know whether a
9284 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9285
9286 if (! SLOW_BYTE_ACCESS
9287 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9288 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9289 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9290 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9291 && (type = type_for_mode (mode, 1)) != 0
9292 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9293 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9294 != CODE_FOR_nothing))
9295 {
9296 do_jump (convert (type, exp), if_false_label, if_true_label);
9297 break;
9298 }
9299 goto normal;
9300
9301 case TRUTH_NOT_EXPR:
9302 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9303 break;
9304
9305 case TRUTH_ANDIF_EXPR:
9306 if (if_false_label == 0)
9307 if_false_label = drop_through_label = gen_label_rtx ();
9308 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9309 start_cleanup_deferral ();
9310 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9311 end_cleanup_deferral ();
9312 break;
9313
9314 case TRUTH_ORIF_EXPR:
9315 if (if_true_label == 0)
9316 if_true_label = drop_through_label = gen_label_rtx ();
9317 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9318 start_cleanup_deferral ();
9319 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9320 end_cleanup_deferral ();
9321 break;
9322
9323 case COMPOUND_EXPR:
9324 push_temp_slots ();
9325 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9326 preserve_temp_slots (NULL_RTX);
9327 free_temp_slots ();
9328 pop_temp_slots ();
9329 emit_queue ();
9330 do_pending_stack_adjust ();
9331 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9332 break;
9333
9334 case COMPONENT_REF:
9335 case BIT_FIELD_REF:
9336 case ARRAY_REF:
9337 {
9338 int bitsize, bitpos, unsignedp;
9339 enum machine_mode mode;
9340 tree type;
9341 tree offset;
9342 int volatilep = 0;
9343 int alignment;
9344
9345 /* Get description of this reference. We don't actually care
9346 about the underlying object here. */
9347 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9348 &mode, &unsignedp, &volatilep,
9349 &alignment);
9350
9351 type = type_for_size (bitsize, unsignedp);
9352 if (! SLOW_BYTE_ACCESS
9353 && type != 0 && bitsize >= 0
9354 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9355 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9356 != CODE_FOR_nothing))
9357 {
9358 do_jump (convert (type, exp), if_false_label, if_true_label);
9359 break;
9360 }
9361 goto normal;
9362 }
9363
9364 case COND_EXPR:
9365 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9366 if (integer_onep (TREE_OPERAND (exp, 1))
9367 && integer_zerop (TREE_OPERAND (exp, 2)))
9368 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9369
9370 else if (integer_zerop (TREE_OPERAND (exp, 1))
9371 && integer_onep (TREE_OPERAND (exp, 2)))
9372 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9373
9374 else
9375 {
9376 register rtx label1 = gen_label_rtx ();
9377 drop_through_label = gen_label_rtx ();
9378
9379 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9380
9381 start_cleanup_deferral ();
9382 /* Now the THEN-expression. */
9383 do_jump (TREE_OPERAND (exp, 1),
9384 if_false_label ? if_false_label : drop_through_label,
9385 if_true_label ? if_true_label : drop_through_label);
9386 /* In case the do_jump just above never jumps. */
9387 do_pending_stack_adjust ();
9388 emit_label (label1);
9389
9390 /* Now the ELSE-expression. */
9391 do_jump (TREE_OPERAND (exp, 2),
9392 if_false_label ? if_false_label : drop_through_label,
9393 if_true_label ? if_true_label : drop_through_label);
9394 end_cleanup_deferral ();
9395 }
9396 break;
9397
9398 case EQ_EXPR:
9399 {
9400 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9401
9402 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9403 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9404 {
9405 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9406 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9407 do_jump
9408 (fold
9409 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9410 fold (build (EQ_EXPR, TREE_TYPE (exp),
9411 fold (build1 (REALPART_EXPR,
9412 TREE_TYPE (inner_type),
9413 exp0)),
9414 fold (build1 (REALPART_EXPR,
9415 TREE_TYPE (inner_type),
9416 exp1)))),
9417 fold (build (EQ_EXPR, TREE_TYPE (exp),
9418 fold (build1 (IMAGPART_EXPR,
9419 TREE_TYPE (inner_type),
9420 exp0)),
9421 fold (build1 (IMAGPART_EXPR,
9422 TREE_TYPE (inner_type),
9423 exp1)))))),
9424 if_false_label, if_true_label);
9425 }
9426
9427 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9428 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9429
9430 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9431 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9432 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9433 else
9434 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9435 break;
9436 }
9437
9438 case NE_EXPR:
9439 {
9440 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9441
9442 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9443 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9444 {
9445 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9446 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9447 do_jump
9448 (fold
9449 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9450 fold (build (NE_EXPR, TREE_TYPE (exp),
9451 fold (build1 (REALPART_EXPR,
9452 TREE_TYPE (inner_type),
9453 exp0)),
9454 fold (build1 (REALPART_EXPR,
9455 TREE_TYPE (inner_type),
9456 exp1)))),
9457 fold (build (NE_EXPR, TREE_TYPE (exp),
9458 fold (build1 (IMAGPART_EXPR,
9459 TREE_TYPE (inner_type),
9460 exp0)),
9461 fold (build1 (IMAGPART_EXPR,
9462 TREE_TYPE (inner_type),
9463 exp1)))))),
9464 if_false_label, if_true_label);
9465 }
9466
9467 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9468 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9469
9470 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9471 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9472 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9473 else
9474 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9475 break;
9476 }
9477
9478 case LT_EXPR:
9479 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9480 if (GET_MODE_CLASS (mode) == MODE_INT
9481 && ! can_compare_p (LT, mode, ccp_jump))
9482 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9483 else
9484 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9485 break;
9486
9487 case LE_EXPR:
9488 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9489 if (GET_MODE_CLASS (mode) == MODE_INT
9490 && ! can_compare_p (LE, mode, ccp_jump))
9491 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9492 else
9493 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9494 break;
9495
9496 case GT_EXPR:
9497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9498 if (GET_MODE_CLASS (mode) == MODE_INT
9499 && ! can_compare_p (GT, mode, ccp_jump))
9500 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9501 else
9502 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9503 break;
9504
9505 case GE_EXPR:
9506 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9507 if (GET_MODE_CLASS (mode) == MODE_INT
9508 && ! can_compare_p (GE, mode, ccp_jump))
9509 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9510 else
9511 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9512 break;
9513
9514 case UNORDERED_EXPR:
9515 case ORDERED_EXPR:
9516 {
9517 enum rtx_code cmp, rcmp;
9518 int do_rev;
9519
9520 if (code == UNORDERED_EXPR)
9521 cmp = UNORDERED, rcmp = ORDERED;
9522 else
9523 cmp = ORDERED, rcmp = UNORDERED;
9524 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9525
9526 do_rev = 0;
9527 if (! can_compare_p (cmp, mode, ccp_jump)
9528 && (can_compare_p (rcmp, mode, ccp_jump)
9529 /* If the target doesn't provide either UNORDERED or ORDERED
9530 comparisons, canonicalize on UNORDERED for the library. */
9531 || rcmp == UNORDERED))
9532 do_rev = 1;
9533
9534 if (! do_rev)
9535 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9536 else
9537 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9538 }
9539 break;
9540
9541 {
9542 enum rtx_code rcode1;
9543 enum tree_code tcode2;
9544
9545 case UNLT_EXPR:
9546 rcode1 = UNLT;
9547 tcode2 = LT_EXPR;
9548 goto unordered_bcc;
9549 case UNLE_EXPR:
9550 rcode1 = UNLE;
9551 tcode2 = LE_EXPR;
9552 goto unordered_bcc;
9553 case UNGT_EXPR:
9554 rcode1 = UNGT;
9555 tcode2 = GT_EXPR;
9556 goto unordered_bcc;
9557 case UNGE_EXPR:
9558 rcode1 = UNGE;
9559 tcode2 = GE_EXPR;
9560 goto unordered_bcc;
9561 case UNEQ_EXPR:
9562 rcode1 = UNEQ;
9563 tcode2 = EQ_EXPR;
9564 goto unordered_bcc;
9565
9566 unordered_bcc:
9567 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9568 if (can_compare_p (rcode1, mode, ccp_jump))
9569 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9570 if_true_label);
9571 else
9572 {
9573 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9574 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9575 tree cmp0, cmp1;
9576
9577 /* If the target doesn't support combined unordered
9578 compares, decompose into UNORDERED + comparison. */
9579 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9580 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9581 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9582 do_jump (exp, if_false_label, if_true_label);
9583 }
9584 }
9585 break;
9586
9587 default:
9588 normal:
9589 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9590 #if 0
9591 /* This is not needed any more and causes poor code since it causes
9592 comparisons and tests from non-SI objects to have different code
9593 sequences. */
9594 /* Copy to register to avoid generating bad insns by cse
9595 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9596 if (!cse_not_expected && GET_CODE (temp) == MEM)
9597 temp = copy_to_reg (temp);
9598 #endif
9599 do_pending_stack_adjust ();
9600 /* Do any postincrements in the expression that was tested. */
9601 emit_queue ();
9602
9603 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9604 {
9605 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9606 if (target)
9607 emit_jump (target);
9608 }
9609 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9610 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9611 /* Note swapping the labels gives us not-equal. */
9612 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9613 else if (GET_MODE (temp) != VOIDmode)
9614 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9615 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9616 GET_MODE (temp), NULL_RTX, 0,
9617 if_false_label, if_true_label);
9618 else
9619 abort ();
9620 }
9621
9622 if (drop_through_label)
9623 {
9624 /* If do_jump produces code that might be jumped around,
9625 do any stack adjusts from that code, before the place
9626 where control merges in. */
9627 do_pending_stack_adjust ();
9628 emit_label (drop_through_label);
9629 }
9630 }
9631 \f
9632 /* Given a comparison expression EXP for values too wide to be compared
9633 with one insn, test the comparison and jump to the appropriate label.
9634 The code of EXP is ignored; we always test GT if SWAP is 0,
9635 and LT if SWAP is 1. */
9636
9637 static void
9638 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9639 tree exp;
9640 int swap;
9641 rtx if_false_label, if_true_label;
9642 {
9643 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9644 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9645 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9646 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9647
9648 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9649 }
9650
9651 /* Compare OP0 with OP1, word at a time, in mode MODE.
9652 UNSIGNEDP says to do unsigned comparison.
9653 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9654
9655 void
9656 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9657 enum machine_mode mode;
9658 int unsignedp;
9659 rtx op0, op1;
9660 rtx if_false_label, if_true_label;
9661 {
9662 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9663 rtx drop_through_label = 0;
9664 int i;
9665
9666 if (! if_true_label || ! if_false_label)
9667 drop_through_label = gen_label_rtx ();
9668 if (! if_true_label)
9669 if_true_label = drop_through_label;
9670 if (! if_false_label)
9671 if_false_label = drop_through_label;
9672
9673 /* Compare a word at a time, high order first. */
9674 for (i = 0; i < nwords; i++)
9675 {
9676 rtx op0_word, op1_word;
9677
9678 if (WORDS_BIG_ENDIAN)
9679 {
9680 op0_word = operand_subword_force (op0, i, mode);
9681 op1_word = operand_subword_force (op1, i, mode);
9682 }
9683 else
9684 {
9685 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9686 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9687 }
9688
9689 /* All but high-order word must be compared as unsigned. */
9690 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9691 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9692 NULL_RTX, if_true_label);
9693
9694 /* Consider lower words only if these are equal. */
9695 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9696 NULL_RTX, 0, NULL_RTX, if_false_label);
9697 }
9698
9699 if (if_false_label)
9700 emit_jump (if_false_label);
9701 if (drop_through_label)
9702 emit_label (drop_through_label);
9703 }
9704
9705 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9706 with one insn, test the comparison and jump to the appropriate label. */
9707
9708 static void
9709 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9710 tree exp;
9711 rtx if_false_label, if_true_label;
9712 {
9713 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9714 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9717 int i;
9718 rtx drop_through_label = 0;
9719
9720 if (! if_false_label)
9721 drop_through_label = if_false_label = gen_label_rtx ();
9722
9723 for (i = 0; i < nwords; i++)
9724 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9725 operand_subword_force (op1, i, mode),
9726 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9727 word_mode, NULL_RTX, 0, if_false_label,
9728 NULL_RTX);
9729
9730 if (if_true_label)
9731 emit_jump (if_true_label);
9732 if (drop_through_label)
9733 emit_label (drop_through_label);
9734 }
9735 \f
9736 /* Jump according to whether OP0 is 0.
9737 We assume that OP0 has an integer mode that is too wide
9738 for the available compare insns. */
9739
9740 void
9741 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9742 rtx op0;
9743 rtx if_false_label, if_true_label;
9744 {
9745 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9746 rtx part;
9747 int i;
9748 rtx drop_through_label = 0;
9749
9750 /* The fastest way of doing this comparison on almost any machine is to
9751 "or" all the words and compare the result. If all have to be loaded
9752 from memory and this is a very wide item, it's possible this may
9753 be slower, but that's highly unlikely. */
9754
9755 part = gen_reg_rtx (word_mode);
9756 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9757 for (i = 1; i < nwords && part != 0; i++)
9758 part = expand_binop (word_mode, ior_optab, part,
9759 operand_subword_force (op0, i, GET_MODE (op0)),
9760 part, 1, OPTAB_WIDEN);
9761
9762 if (part != 0)
9763 {
9764 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9765 NULL_RTX, 0, if_false_label, if_true_label);
9766
9767 return;
9768 }
9769
9770 /* If we couldn't do the "or" simply, do this with a series of compares. */
9771 if (! if_false_label)
9772 drop_through_label = if_false_label = gen_label_rtx ();
9773
9774 for (i = 0; i < nwords; i++)
9775 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9776 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9777 if_false_label, NULL_RTX);
9778
9779 if (if_true_label)
9780 emit_jump (if_true_label);
9781
9782 if (drop_through_label)
9783 emit_label (drop_through_label);
9784 }
9785 \f
9786 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9787 (including code to compute the values to be compared)
9788 and set (CC0) according to the result.
9789 The decision as to signed or unsigned comparison must be made by the caller.
9790
9791 We force a stack adjustment unless there are currently
9792 things pushed on the stack that aren't yet used.
9793
9794 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9795 compared.
9796
9797 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9798 size of MODE should be used. */
9799
9800 rtx
9801 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9802 register rtx op0, op1;
9803 enum rtx_code code;
9804 int unsignedp;
9805 enum machine_mode mode;
9806 rtx size;
9807 int align;
9808 {
9809 rtx tem;
9810
9811 /* If one operand is constant, make it the second one. Only do this
9812 if the other operand is not constant as well. */
9813
9814 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9815 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9816 {
9817 tem = op0;
9818 op0 = op1;
9819 op1 = tem;
9820 code = swap_condition (code);
9821 }
9822
9823 if (flag_force_mem)
9824 {
9825 op0 = force_not_mem (op0);
9826 op1 = force_not_mem (op1);
9827 }
9828
9829 do_pending_stack_adjust ();
9830
9831 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9832 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9833 return tem;
9834
9835 #if 0
9836 /* There's no need to do this now that combine.c can eliminate lots of
9837 sign extensions. This can be less efficient in certain cases on other
9838 machines. */
9839
9840 /* If this is a signed equality comparison, we can do it as an
9841 unsigned comparison since zero-extension is cheaper than sign
9842 extension and comparisons with zero are done as unsigned. This is
9843 the case even on machines that can do fast sign extension, since
9844 zero-extension is easier to combine with other operations than
9845 sign-extension is. If we are comparing against a constant, we must
9846 convert it to what it would look like unsigned. */
9847 if ((code == EQ || code == NE) && ! unsignedp
9848 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9849 {
9850 if (GET_CODE (op1) == CONST_INT
9851 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9852 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9853 unsignedp = 1;
9854 }
9855 #endif
9856
9857 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9858
9859 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9860 }
9861
9862 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9863 The decision as to signed or unsigned comparison must be made by the caller.
9864
9865 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9866 compared.
9867
9868 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9869 size of MODE should be used. */
9870
9871 void
9872 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9873 if_false_label, if_true_label)
9874 register rtx op0, op1;
9875 enum rtx_code code;
9876 int unsignedp;
9877 enum machine_mode mode;
9878 rtx size;
9879 int align;
9880 rtx if_false_label, if_true_label;
9881 {
9882 rtx tem;
9883 int dummy_true_label = 0;
9884
9885 /* Reverse the comparison if that is safe and we want to jump if it is
9886 false. */
9887 if (! if_true_label && ! FLOAT_MODE_P (mode))
9888 {
9889 if_true_label = if_false_label;
9890 if_false_label = 0;
9891 code = reverse_condition (code);
9892 }
9893
9894 /* If one operand is constant, make it the second one. Only do this
9895 if the other operand is not constant as well. */
9896
9897 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9898 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9899 {
9900 tem = op0;
9901 op0 = op1;
9902 op1 = tem;
9903 code = swap_condition (code);
9904 }
9905
9906 if (flag_force_mem)
9907 {
9908 op0 = force_not_mem (op0);
9909 op1 = force_not_mem (op1);
9910 }
9911
9912 do_pending_stack_adjust ();
9913
9914 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9915 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9916 {
9917 if (tem == const_true_rtx)
9918 {
9919 if (if_true_label)
9920 emit_jump (if_true_label);
9921 }
9922 else
9923 {
9924 if (if_false_label)
9925 emit_jump (if_false_label);
9926 }
9927 return;
9928 }
9929
9930 #if 0
9931 /* There's no need to do this now that combine.c can eliminate lots of
9932 sign extensions. This can be less efficient in certain cases on other
9933 machines. */
9934
9935 /* If this is a signed equality comparison, we can do it as an
9936 unsigned comparison since zero-extension is cheaper than sign
9937 extension and comparisons with zero are done as unsigned. This is
9938 the case even on machines that can do fast sign extension, since
9939 zero-extension is easier to combine with other operations than
9940 sign-extension is. If we are comparing against a constant, we must
9941 convert it to what it would look like unsigned. */
9942 if ((code == EQ || code == NE) && ! unsignedp
9943 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9944 {
9945 if (GET_CODE (op1) == CONST_INT
9946 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9947 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9948 unsignedp = 1;
9949 }
9950 #endif
9951
9952 if (! if_true_label)
9953 {
9954 dummy_true_label = 1;
9955 if_true_label = gen_label_rtx ();
9956 }
9957
9958 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9959 if_true_label);
9960
9961 if (if_false_label)
9962 emit_jump (if_false_label);
9963 if (dummy_true_label)
9964 emit_label (if_true_label);
9965 }
9966
9967 /* Generate code for a comparison expression EXP (including code to compute
9968 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9969 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9970 generated code will drop through.
9971 SIGNED_CODE should be the rtx operation for this comparison for
9972 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9973
9974 We force a stack adjustment unless there are currently
9975 things pushed on the stack that aren't yet used. */
9976
9977 static void
9978 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9979 if_true_label)
9980 register tree exp;
9981 enum rtx_code signed_code, unsigned_code;
9982 rtx if_false_label, if_true_label;
9983 {
9984 int align0, align1;
9985 register rtx op0, op1;
9986 register tree type;
9987 register enum machine_mode mode;
9988 int unsignedp;
9989 enum rtx_code code;
9990
9991 /* Don't crash if the comparison was erroneous. */
9992 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9993 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9994 return;
9995
9996 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9997 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9998 mode = TYPE_MODE (type);
9999 unsignedp = TREE_UNSIGNED (type);
10000 code = unsignedp ? unsigned_code : signed_code;
10001
10002 #ifdef HAVE_canonicalize_funcptr_for_compare
10003 /* If function pointers need to be "canonicalized" before they can
10004 be reliably compared, then canonicalize them. */
10005 if (HAVE_canonicalize_funcptr_for_compare
10006 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10007 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10008 == FUNCTION_TYPE))
10009 {
10010 rtx new_op0 = gen_reg_rtx (mode);
10011
10012 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10013 op0 = new_op0;
10014 }
10015
10016 if (HAVE_canonicalize_funcptr_for_compare
10017 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10018 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10019 == FUNCTION_TYPE))
10020 {
10021 rtx new_op1 = gen_reg_rtx (mode);
10022
10023 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10024 op1 = new_op1;
10025 }
10026 #endif
10027
10028 /* Do any postincrements in the expression that was tested. */
10029 emit_queue ();
10030
10031 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10032 ((mode == BLKmode)
10033 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10034 MIN (align0, align1) / BITS_PER_UNIT,
10035 if_false_label, if_true_label);
10036 }
10037 \f
10038 /* Generate code to calculate EXP using a store-flag instruction
10039 and return an rtx for the result. EXP is either a comparison
10040 or a TRUTH_NOT_EXPR whose operand is a comparison.
10041
10042 If TARGET is nonzero, store the result there if convenient.
10043
10044 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10045 cheap.
10046
10047 Return zero if there is no suitable set-flag instruction
10048 available on this machine.
10049
10050 Once expand_expr has been called on the arguments of the comparison,
10051 we are committed to doing the store flag, since it is not safe to
10052 re-evaluate the expression. We emit the store-flag insn by calling
10053 emit_store_flag, but only expand the arguments if we have a reason
10054 to believe that emit_store_flag will be successful. If we think that
10055 it will, but it isn't, we have to simulate the store-flag with a
10056 set/jump/set sequence. */
10057
10058 static rtx
10059 do_store_flag (exp, target, mode, only_cheap)
10060 tree exp;
10061 rtx target;
10062 enum machine_mode mode;
10063 int only_cheap;
10064 {
10065 enum rtx_code code;
10066 tree arg0, arg1, type;
10067 tree tem;
10068 enum machine_mode operand_mode;
10069 int invert = 0;
10070 int unsignedp;
10071 rtx op0, op1;
10072 enum insn_code icode;
10073 rtx subtarget = target;
10074 rtx result, label;
10075
10076 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10077 result at the end. We can't simply invert the test since it would
10078 have already been inverted if it were valid. This case occurs for
10079 some floating-point comparisons. */
10080
10081 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10082 invert = 1, exp = TREE_OPERAND (exp, 0);
10083
10084 arg0 = TREE_OPERAND (exp, 0);
10085 arg1 = TREE_OPERAND (exp, 1);
10086 type = TREE_TYPE (arg0);
10087 operand_mode = TYPE_MODE (type);
10088 unsignedp = TREE_UNSIGNED (type);
10089
10090 /* We won't bother with BLKmode store-flag operations because it would mean
10091 passing a lot of information to emit_store_flag. */
10092 if (operand_mode == BLKmode)
10093 return 0;
10094
10095 /* We won't bother with store-flag operations involving function pointers
10096 when function pointers must be canonicalized before comparisons. */
10097 #ifdef HAVE_canonicalize_funcptr_for_compare
10098 if (HAVE_canonicalize_funcptr_for_compare
10099 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10101 == FUNCTION_TYPE))
10102 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10103 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10104 == FUNCTION_TYPE))))
10105 return 0;
10106 #endif
10107
10108 STRIP_NOPS (arg0);
10109 STRIP_NOPS (arg1);
10110
10111 /* Get the rtx comparison code to use. We know that EXP is a comparison
10112 operation of some type. Some comparisons against 1 and -1 can be
10113 converted to comparisons with zero. Do so here so that the tests
10114 below will be aware that we have a comparison with zero. These
10115 tests will not catch constants in the first operand, but constants
10116 are rarely passed as the first operand. */
10117
10118 switch (TREE_CODE (exp))
10119 {
10120 case EQ_EXPR:
10121 code = EQ;
10122 break;
10123 case NE_EXPR:
10124 code = NE;
10125 break;
10126 case LT_EXPR:
10127 if (integer_onep (arg1))
10128 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10129 else
10130 code = unsignedp ? LTU : LT;
10131 break;
10132 case LE_EXPR:
10133 if (! unsignedp && integer_all_onesp (arg1))
10134 arg1 = integer_zero_node, code = LT;
10135 else
10136 code = unsignedp ? LEU : LE;
10137 break;
10138 case GT_EXPR:
10139 if (! unsignedp && integer_all_onesp (arg1))
10140 arg1 = integer_zero_node, code = GE;
10141 else
10142 code = unsignedp ? GTU : GT;
10143 break;
10144 case GE_EXPR:
10145 if (integer_onep (arg1))
10146 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10147 else
10148 code = unsignedp ? GEU : GE;
10149 break;
10150
10151 case UNORDERED_EXPR:
10152 code = UNORDERED;
10153 break;
10154 case ORDERED_EXPR:
10155 code = ORDERED;
10156 break;
10157 case UNLT_EXPR:
10158 code = UNLT;
10159 break;
10160 case UNLE_EXPR:
10161 code = UNLE;
10162 break;
10163 case UNGT_EXPR:
10164 code = UNGT;
10165 break;
10166 case UNGE_EXPR:
10167 code = UNGE;
10168 break;
10169 case UNEQ_EXPR:
10170 code = UNEQ;
10171 break;
10172
10173 default:
10174 abort ();
10175 }
10176
10177 /* Put a constant second. */
10178 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10179 {
10180 tem = arg0; arg0 = arg1; arg1 = tem;
10181 code = swap_condition (code);
10182 }
10183
10184 /* If this is an equality or inequality test of a single bit, we can
10185 do this by shifting the bit being tested to the low-order bit and
10186 masking the result with the constant 1. If the condition was EQ,
10187 we xor it with 1. This does not require an scc insn and is faster
10188 than an scc insn even if we have it. */
10189
10190 if ((code == NE || code == EQ)
10191 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10192 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10193 {
10194 tree inner = TREE_OPERAND (arg0, 0);
10195 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10196 int ops_unsignedp;
10197
10198 /* If INNER is a right shift of a constant and it plus BITNUM does
10199 not overflow, adjust BITNUM and INNER. */
10200
10201 if (TREE_CODE (inner) == RSHIFT_EXPR
10202 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10203 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10204 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10205 < TYPE_PRECISION (type)))
10206 {
10207 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10208 inner = TREE_OPERAND (inner, 0);
10209 }
10210
10211 /* If we are going to be able to omit the AND below, we must do our
10212 operations as unsigned. If we must use the AND, we have a choice.
10213 Normally unsigned is faster, but for some machines signed is. */
10214 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10215 #ifdef LOAD_EXTEND_OP
10216 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10217 #else
10218 : 1
10219 #endif
10220 );
10221
10222 if (subtarget == 0 || GET_CODE (subtarget) != REG
10223 || GET_MODE (subtarget) != operand_mode
10224 || ! safe_from_p (subtarget, inner, 1))
10225 subtarget = 0;
10226
10227 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10228
10229 if (bitnum != 0)
10230 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10231 size_int (bitnum), subtarget, ops_unsignedp);
10232
10233 if (GET_MODE (op0) != mode)
10234 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10235
10236 if ((code == EQ && ! invert) || (code == NE && invert))
10237 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10238 ops_unsignedp, OPTAB_LIB_WIDEN);
10239
10240 /* Put the AND last so it can combine with more things. */
10241 if (bitnum != TYPE_PRECISION (type) - 1)
10242 op0 = expand_and (op0, const1_rtx, subtarget);
10243
10244 return op0;
10245 }
10246
10247 /* Now see if we are likely to be able to do this. Return if not. */
10248 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10249 return 0;
10250
10251 icode = setcc_gen_code[(int) code];
10252 if (icode == CODE_FOR_nothing
10253 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10254 {
10255 /* We can only do this if it is one of the special cases that
10256 can be handled without an scc insn. */
10257 if ((code == LT && integer_zerop (arg1))
10258 || (! only_cheap && code == GE && integer_zerop (arg1)))
10259 ;
10260 else if (BRANCH_COST >= 0
10261 && ! only_cheap && (code == NE || code == EQ)
10262 && TREE_CODE (type) != REAL_TYPE
10263 && ((abs_optab->handlers[(int) operand_mode].insn_code
10264 != CODE_FOR_nothing)
10265 || (ffs_optab->handlers[(int) operand_mode].insn_code
10266 != CODE_FOR_nothing)))
10267 ;
10268 else
10269 return 0;
10270 }
10271
10272 preexpand_calls (exp);
10273 if (subtarget == 0 || GET_CODE (subtarget) != REG
10274 || GET_MODE (subtarget) != operand_mode
10275 || ! safe_from_p (subtarget, arg1, 1))
10276 subtarget = 0;
10277
10278 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10279 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10280
10281 if (target == 0)
10282 target = gen_reg_rtx (mode);
10283
10284 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10285 because, if the emit_store_flag does anything it will succeed and
10286 OP0 and OP1 will not be used subsequently. */
10287
10288 result = emit_store_flag (target, code,
10289 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10290 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10291 operand_mode, unsignedp, 1);
10292
10293 if (result)
10294 {
10295 if (invert)
10296 result = expand_binop (mode, xor_optab, result, const1_rtx,
10297 result, 0, OPTAB_LIB_WIDEN);
10298 return result;
10299 }
10300
10301 /* If this failed, we have to do this with set/compare/jump/set code. */
10302 if (GET_CODE (target) != REG
10303 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10304 target = gen_reg_rtx (GET_MODE (target));
10305
10306 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10307 result = compare_from_rtx (op0, op1, code, unsignedp,
10308 operand_mode, NULL_RTX, 0);
10309 if (GET_CODE (result) == CONST_INT)
10310 return (((result == const0_rtx && ! invert)
10311 || (result != const0_rtx && invert))
10312 ? const0_rtx : const1_rtx);
10313
10314 label = gen_label_rtx ();
10315 if (bcc_gen_fctn[(int) code] == 0)
10316 abort ();
10317
10318 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10319 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10320 emit_label (label);
10321
10322 return target;
10323 }
10324 \f
10325 /* Generate a tablejump instruction (used for switch statements). */
10326
10327 #ifdef HAVE_tablejump
10328
10329 /* INDEX is the value being switched on, with the lowest value
10330 in the table already subtracted.
10331 MODE is its expected mode (needed if INDEX is constant).
10332 RANGE is the length of the jump table.
10333 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10334
10335 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10336 index value is out of range. */
10337
10338 void
10339 do_tablejump (index, mode, range, table_label, default_label)
10340 rtx index, range, table_label, default_label;
10341 enum machine_mode mode;
10342 {
10343 register rtx temp, vector;
10344
10345 /* Do an unsigned comparison (in the proper mode) between the index
10346 expression and the value which represents the length of the range.
10347 Since we just finished subtracting the lower bound of the range
10348 from the index expression, this comparison allows us to simultaneously
10349 check that the original index expression value is both greater than
10350 or equal to the minimum value of the range and less than or equal to
10351 the maximum value of the range. */
10352
10353 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10354 0, default_label);
10355
10356 /* If index is in range, it must fit in Pmode.
10357 Convert to Pmode so we can index with it. */
10358 if (mode != Pmode)
10359 index = convert_to_mode (Pmode, index, 1);
10360
10361 /* Don't let a MEM slip thru, because then INDEX that comes
10362 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10363 and break_out_memory_refs will go to work on it and mess it up. */
10364 #ifdef PIC_CASE_VECTOR_ADDRESS
10365 if (flag_pic && GET_CODE (index) != REG)
10366 index = copy_to_mode_reg (Pmode, index);
10367 #endif
10368
10369 /* If flag_force_addr were to affect this address
10370 it could interfere with the tricky assumptions made
10371 about addresses that contain label-refs,
10372 which may be valid only very near the tablejump itself. */
10373 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10374 GET_MODE_SIZE, because this indicates how large insns are. The other
10375 uses should all be Pmode, because they are addresses. This code
10376 could fail if addresses and insns are not the same size. */
10377 index = gen_rtx_PLUS (Pmode,
10378 gen_rtx_MULT (Pmode, index,
10379 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10380 gen_rtx_LABEL_REF (Pmode, table_label));
10381 #ifdef PIC_CASE_VECTOR_ADDRESS
10382 if (flag_pic)
10383 index = PIC_CASE_VECTOR_ADDRESS (index);
10384 else
10385 #endif
10386 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10387 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10388 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10389 RTX_UNCHANGING_P (vector) = 1;
10390 convert_move (temp, vector, 0);
10391
10392 emit_jump_insn (gen_tablejump (temp, table_label));
10393
10394 /* If we are generating PIC code or if the table is PC-relative, the
10395 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10396 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10397 emit_barrier ();
10398 }
10399
10400 #endif /* HAVE_tablejump */