expr.c (store_constructor): Do not emit USE.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "tm_p.h"
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
91
92 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
93 static tree placeholder_list = 0;
94
95 /* This structure is used by move_by_pieces to describe the move to
96 be performed. */
97 struct move_by_pieces
98 {
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
103 int to_struct;
104 int to_readonly;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int from_struct;
110 int from_readonly;
111 int len;
112 int offset;
113 int reverse;
114 };
115
116 /* This structure is used by clear_by_pieces to describe the clear to
117 be performed. */
118
119 struct clear_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 extern struct obstack permanent_obstack;
132
133 static rtx get_push_address PARAMS ((int));
134
135 static rtx enqueue_insn PARAMS ((rtx, rtx));
136 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
140 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
141 enum machine_mode,
142 struct clear_by_pieces *));
143 static int is_zeros_p PARAMS ((tree));
144 static int mostly_zeros_p PARAMS ((tree));
145 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
146 tree, tree, unsigned int, int));
147 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
148 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
149 tree, enum machine_mode, int,
150 unsigned int, int, int));
151 static enum memory_use_mode
152 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
153 static tree save_noncopied_parts PARAMS ((tree, tree));
154 static tree init_noncopied_parts PARAMS ((tree, tree));
155 static int safe_from_p PARAMS ((rtx, tree, int));
156 static int fixed_type_p PARAMS ((tree));
157 static rtx var_rtx PARAMS ((tree));
158 static int readonly_fields_p PARAMS ((tree));
159 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
160 static rtx expand_increment PARAMS ((tree, int, int));
161 static void preexpand_calls PARAMS ((tree));
162 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
163 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
164 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
165 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
166
167 /* Record for each mode whether we can move a register directly to or
168 from an object of that mode in memory. If we can't, we won't try
169 to use that mode directly when accessing a field of that mode. */
170
171 static char direct_load[NUM_MACHINE_MODES];
172 static char direct_store[NUM_MACHINE_MODES];
173
174 /* If a memory-to-memory move would take MOVE_RATIO or more simple
175 move-instruction sequences, we will do a movstr or libcall instead. */
176
177 #ifndef MOVE_RATIO
178 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
179 #define MOVE_RATIO 2
180 #else
181 /* If we are optimizing for space (-Os), cut down the default move ratio */
182 #define MOVE_RATIO (optimize_size ? 3 : 15)
183 #endif
184 #endif
185
186 /* This macro is used to determine whether move_by_pieces should be called
187 to perform a structure copy. */
188 #ifndef MOVE_BY_PIECES_P
189 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
190 (SIZE, ALIGN) < MOVE_RATIO)
191 #endif
192
193 /* This array records the insn_code of insns to perform block moves. */
194 enum insn_code movstr_optab[NUM_MACHINE_MODES];
195
196 /* This array records the insn_code of insns to perform block clears. */
197 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
198
199 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
200
201 #ifndef SLOW_UNALIGNED_ACCESS
202 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
203 #endif
204 \f
205 /* This is run once per compilation to set up which modes can be used
206 directly in memory and to initialize the block move optab. */
207
208 void
209 init_expr_once ()
210 {
211 rtx insn, pat;
212 enum machine_mode mode;
213 int num_clobbers;
214 rtx mem, mem1;
215 char *free_point;
216
217 start_sequence ();
218
219 /* Since we are on the permanent obstack, we must be sure we save this
220 spot AFTER we call start_sequence, since it will reuse the rtl it
221 makes. */
222 free_point = (char *) oballoc (0);
223
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
229
230 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
231 pat = PATTERN (insn);
232
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
235 {
236 int regno;
237 rtx reg;
238
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
242
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
245
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
249 regno++)
250 {
251 if (! HARD_REGNO_MODE_OK (regno, mode))
252 continue;
253
254 reg = gen_rtx_REG (mode, regno);
255
256 SET_SRC (pat) = mem;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275 }
276 }
277
278 end_sequence ();
279 obfree (free_point);
280 }
281
282 /* This is run at the start of compiling a function. */
283
284 void
285 init_expr ()
286 {
287 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
288
289 pending_chain = 0;
290 pending_stack_adjust = 0;
291 inhibit_defer_pop = 0;
292 saveregs_value = 0;
293 apply_args_value = 0;
294 forced_labels = 0;
295 }
296
297 void
298 mark_expr_status (p)
299 struct expr_status *p;
300 {
301 if (p == NULL)
302 return;
303
304 ggc_mark_rtx (p->x_saveregs_value);
305 ggc_mark_rtx (p->x_apply_args_value);
306 ggc_mark_rtx (p->x_forced_labels);
307 }
308
309 void
310 free_expr_status (f)
311 struct function *f;
312 {
313 free (f->expr);
314 f->expr = NULL;
315 }
316
317 /* Small sanity check that the queue is empty at the end of a function. */
318 void
319 finish_expr_for_function ()
320 {
321 if (pending_chain)
322 abort ();
323 }
324 \f
325 /* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
327
328 /* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
331
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
334
335 static rtx
336 enqueue_insn (var, body)
337 rtx var, body;
338 {
339 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
340 body, pending_chain);
341 return pending_chain;
342 }
343
344 /* Use protect_from_queue to convert a QUEUED expression
345 into something that you can put immediately into an instruction.
346 If the queued incrementation has not happened yet,
347 protect_from_queue returns the variable itself.
348 If the incrementation has happened, protect_from_queue returns a temp
349 that contains a copy of the old value of the variable.
350
351 Any time an rtx which might possibly be a QUEUED is to be put
352 into an instruction, it must be passed through protect_from_queue first.
353 QUEUED expressions are not meaningful in instructions.
354
355 Do not pass a value through protect_from_queue and then hold
356 on to it for a while before putting it in an instruction!
357 If the queue is flushed in between, incorrect code will result. */
358
359 rtx
360 protect_from_queue (x, modify)
361 register rtx x;
362 int modify;
363 {
364 register RTX_CODE code = GET_CODE (x);
365
366 #if 0 /* A QUEUED can hang around after the queue is forced out. */
367 /* Shortcut for most common case. */
368 if (pending_chain == 0)
369 return x;
370 #endif
371
372 if (code != QUEUED)
373 {
374 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
375 use of autoincrement. Make a copy of the contents of the memory
376 location rather than a copy of the address, but not if the value is
377 of mode BLKmode. Don't modify X in place since it might be
378 shared. */
379 if (code == MEM && GET_MODE (x) != BLKmode
380 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
381 {
382 register rtx y = XEXP (x, 0);
383 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
384
385 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
386 MEM_COPY_ATTRIBUTES (new, x);
387 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
388
389 if (QUEUED_INSN (y))
390 {
391 register rtx temp = gen_reg_rtx (GET_MODE (new));
392 emit_insn_before (gen_move_insn (temp, new),
393 QUEUED_INSN (y));
394 return temp;
395 }
396 return new;
397 }
398 /* Otherwise, recursively protect the subexpressions of all
399 the kinds of rtx's that can contain a QUEUED. */
400 if (code == MEM)
401 {
402 rtx tem = protect_from_queue (XEXP (x, 0), 0);
403 if (tem != XEXP (x, 0))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = tem;
407 }
408 }
409 else if (code == PLUS || code == MULT)
410 {
411 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
412 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
413 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
414 {
415 x = copy_rtx (x);
416 XEXP (x, 0) = new0;
417 XEXP (x, 1) = new1;
418 }
419 }
420 return x;
421 }
422 /* If the increment has not happened, use the variable itself. */
423 if (QUEUED_INSN (x) == 0)
424 return QUEUED_VAR (x);
425 /* If the increment has happened and a pre-increment copy exists,
426 use that copy. */
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
433 QUEUED_INSN (x));
434 return QUEUED_COPY (x);
435 }
436
437 /* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
441
442 int
443 queued_subexp_p (x)
444 rtx x;
445 {
446 register enum rtx_code code = GET_CODE (x);
447 switch (code)
448 {
449 case QUEUED:
450 return 1;
451 case MEM:
452 return queued_subexp_p (XEXP (x, 0));
453 case MULT:
454 case PLUS:
455 case MINUS:
456 return (queued_subexp_p (XEXP (x, 0))
457 || queued_subexp_p (XEXP (x, 1)));
458 default:
459 return 0;
460 }
461 }
462
463 /* Perform all the pending incrementations. */
464
465 void
466 emit_queue ()
467 {
468 register rtx p;
469 while ((p = pending_chain))
470 {
471 rtx body = QUEUED_BODY (p);
472
473 if (GET_CODE (body) == SEQUENCE)
474 {
475 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
476 emit_insn (QUEUED_BODY (p));
477 }
478 else
479 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
480 pending_chain = QUEUED_NEXT (p);
481 }
482 }
483 \f
484 /* Copy data from FROM to TO, where the machine modes are not the same.
485 Both modes may be integer, or both may be floating.
486 UNSIGNEDP should be nonzero if FROM is an unsigned type.
487 This causes zero-extension instead of sign-extension. */
488
489 void
490 convert_move (to, from, unsignedp)
491 register rtx to, from;
492 int unsignedp;
493 {
494 enum machine_mode to_mode = GET_MODE (to);
495 enum machine_mode from_mode = GET_MODE (from);
496 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
497 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
498 enum insn_code code;
499 rtx libcall;
500
501 /* rtx code for making an equivalent value. */
502 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
503
504 to = protect_from_queue (to, 1);
505 from = protect_from_queue (from, 0);
506
507 if (to_real != from_real)
508 abort ();
509
510 /* If FROM is a SUBREG that indicates that we have already done at least
511 the required extension, strip it. We don't handle such SUBREGs as
512 TO here. */
513
514 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
515 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
516 >= GET_MODE_SIZE (to_mode))
517 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
518 from = gen_lowpart (to_mode, from), from_mode = to_mode;
519
520 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
521 abort ();
522
523 if (to_mode == from_mode
524 || (from_mode == VOIDmode && CONSTANT_P (from)))
525 {
526 emit_move_insn (to, from);
527 return;
528 }
529
530 if (to_real)
531 {
532 rtx value;
533
534 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
535 {
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, 0))
538 != CODE_FOR_nothing)
539 {
540 emit_unop_insn (code, to, from, UNKNOWN);
541 return;
542 }
543 }
544
545 #ifdef HAVE_trunchfqf2
546 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
547 {
548 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
549 return;
550 }
551 #endif
552 #ifdef HAVE_trunctqfqf2
553 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
554 {
555 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
556 return;
557 }
558 #endif
559 #ifdef HAVE_truncsfqf2
560 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
561 {
562 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
563 return;
564 }
565 #endif
566 #ifdef HAVE_truncdfqf2
567 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
568 {
569 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
570 return;
571 }
572 #endif
573 #ifdef HAVE_truncxfqf2
574 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
575 {
576 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
577 return;
578 }
579 #endif
580 #ifdef HAVE_trunctfqf2
581 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
582 {
583 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
584 return;
585 }
586 #endif
587
588 #ifdef HAVE_trunctqfhf2
589 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
590 {
591 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
592 return;
593 }
594 #endif
595 #ifdef HAVE_truncsfhf2
596 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
597 {
598 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
599 return;
600 }
601 #endif
602 #ifdef HAVE_truncdfhf2
603 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
604 {
605 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
606 return;
607 }
608 #endif
609 #ifdef HAVE_truncxfhf2
610 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
611 {
612 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
613 return;
614 }
615 #endif
616 #ifdef HAVE_trunctfhf2
617 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
618 {
619 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
620 return;
621 }
622 #endif
623
624 #ifdef HAVE_truncsftqf2
625 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
626 {
627 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
628 return;
629 }
630 #endif
631 #ifdef HAVE_truncdftqf2
632 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
633 {
634 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
635 return;
636 }
637 #endif
638 #ifdef HAVE_truncxftqf2
639 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
640 {
641 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
642 return;
643 }
644 #endif
645 #ifdef HAVE_trunctftqf2
646 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
647 {
648 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
649 return;
650 }
651 #endif
652
653 #ifdef HAVE_truncdfsf2
654 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
655 {
656 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
657 return;
658 }
659 #endif
660 #ifdef HAVE_truncxfsf2
661 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
662 {
663 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
664 return;
665 }
666 #endif
667 #ifdef HAVE_trunctfsf2
668 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
669 {
670 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
671 return;
672 }
673 #endif
674 #ifdef HAVE_truncxfdf2
675 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
676 {
677 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
678 return;
679 }
680 #endif
681 #ifdef HAVE_trunctfdf2
682 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
683 {
684 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
685 return;
686 }
687 #endif
688
689 libcall = (rtx) 0;
690 switch (from_mode)
691 {
692 case SFmode:
693 switch (to_mode)
694 {
695 case DFmode:
696 libcall = extendsfdf2_libfunc;
697 break;
698
699 case XFmode:
700 libcall = extendsfxf2_libfunc;
701 break;
702
703 case TFmode:
704 libcall = extendsftf2_libfunc;
705 break;
706
707 default:
708 break;
709 }
710 break;
711
712 case DFmode:
713 switch (to_mode)
714 {
715 case SFmode:
716 libcall = truncdfsf2_libfunc;
717 break;
718
719 case XFmode:
720 libcall = extenddfxf2_libfunc;
721 break;
722
723 case TFmode:
724 libcall = extenddftf2_libfunc;
725 break;
726
727 default:
728 break;
729 }
730 break;
731
732 case XFmode:
733 switch (to_mode)
734 {
735 case SFmode:
736 libcall = truncxfsf2_libfunc;
737 break;
738
739 case DFmode:
740 libcall = truncxfdf2_libfunc;
741 break;
742
743 default:
744 break;
745 }
746 break;
747
748 case TFmode:
749 switch (to_mode)
750 {
751 case SFmode:
752 libcall = trunctfsf2_libfunc;
753 break;
754
755 case DFmode:
756 libcall = trunctfdf2_libfunc;
757 break;
758
759 default:
760 break;
761 }
762 break;
763
764 default:
765 break;
766 }
767
768 if (libcall == (rtx) 0)
769 /* This conversion is not implemented yet. */
770 abort ();
771
772 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
773 1, from, from_mode);
774 emit_move_insn (to, value);
775 return;
776 }
777
778 /* Now both modes are integers. */
779
780 /* Handle expanding beyond a word. */
781 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
782 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
783 {
784 rtx insns;
785 rtx lowpart;
786 rtx fill_value;
787 rtx lowfrom;
788 int i;
789 enum machine_mode lowpart_mode;
790 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
791
792 /* Try converting directly if the insn is supported. */
793 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
794 != CODE_FOR_nothing)
795 {
796 /* If FROM is a SUBREG, put it into a register. Do this
797 so that we always generate the same set of insns for
798 better cse'ing; if an intermediate assignment occurred,
799 we won't be doing the operation directly on the SUBREG. */
800 if (optimize > 0 && GET_CODE (from) == SUBREG)
801 from = force_reg (from_mode, from);
802 emit_unop_insn (code, to, from, equiv_code);
803 return;
804 }
805 /* Next, try converting via full word. */
806 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
807 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
808 != CODE_FOR_nothing))
809 {
810 if (GET_CODE (to) == REG)
811 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
812 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
813 emit_unop_insn (code, to,
814 gen_lowpart (word_mode, to), equiv_code);
815 return;
816 }
817
818 /* No special multiword conversion insn; do it by hand. */
819 start_sequence ();
820
821 /* Since we will turn this into a no conflict block, we must ensure
822 that the source does not overlap the target. */
823
824 if (reg_overlap_mentioned_p (to, from))
825 from = force_reg (from_mode, from);
826
827 /* Get a copy of FROM widened to a word, if necessary. */
828 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
829 lowpart_mode = word_mode;
830 else
831 lowpart_mode = from_mode;
832
833 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
834
835 lowpart = gen_lowpart (lowpart_mode, to);
836 emit_move_insn (lowpart, lowfrom);
837
838 /* Compute the value to put in each remaining word. */
839 if (unsignedp)
840 fill_value = const0_rtx;
841 else
842 {
843 #ifdef HAVE_slt
844 if (HAVE_slt
845 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
846 && STORE_FLAG_VALUE == -1)
847 {
848 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
849 lowpart_mode, 0, 0);
850 fill_value = gen_reg_rtx (word_mode);
851 emit_insn (gen_slt (fill_value));
852 }
853 else
854 #endif
855 {
856 fill_value
857 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
858 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
859 NULL_RTX, 0);
860 fill_value = convert_to_mode (word_mode, fill_value, 1);
861 }
862 }
863
864 /* Fill the remaining words. */
865 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
866 {
867 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
868 rtx subword = operand_subword (to, index, 1, to_mode);
869
870 if (subword == 0)
871 abort ();
872
873 if (fill_value != subword)
874 emit_move_insn (subword, fill_value);
875 }
876
877 insns = get_insns ();
878 end_sequence ();
879
880 emit_no_conflict_block (insns, to, from, NULL_RTX,
881 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
882 return;
883 }
884
885 /* Truncating multi-word to a word or less. */
886 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
887 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
888 {
889 if (!((GET_CODE (from) == MEM
890 && ! MEM_VOLATILE_P (from)
891 && direct_load[(int) to_mode]
892 && ! mode_dependent_address_p (XEXP (from, 0)))
893 || GET_CODE (from) == REG
894 || GET_CODE (from) == SUBREG))
895 from = force_reg (from_mode, from);
896 convert_move (to, gen_lowpart (word_mode, from), 0);
897 return;
898 }
899
900 /* Handle pointer conversion */ /* SPEE 900220 */
901 if (to_mode == PQImode)
902 {
903 if (from_mode != QImode)
904 from = convert_to_mode (QImode, from, unsignedp);
905
906 #ifdef HAVE_truncqipqi2
907 if (HAVE_truncqipqi2)
908 {
909 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
910 return;
911 }
912 #endif /* HAVE_truncqipqi2 */
913 abort ();
914 }
915
916 if (from_mode == PQImode)
917 {
918 if (to_mode != QImode)
919 {
920 from = convert_to_mode (QImode, from, unsignedp);
921 from_mode = QImode;
922 }
923 else
924 {
925 #ifdef HAVE_extendpqiqi2
926 if (HAVE_extendpqiqi2)
927 {
928 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
929 return;
930 }
931 #endif /* HAVE_extendpqiqi2 */
932 abort ();
933 }
934 }
935
936 if (to_mode == PSImode)
937 {
938 if (from_mode != SImode)
939 from = convert_to_mode (SImode, from, unsignedp);
940
941 #ifdef HAVE_truncsipsi2
942 if (HAVE_truncsipsi2)
943 {
944 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
945 return;
946 }
947 #endif /* HAVE_truncsipsi2 */
948 abort ();
949 }
950
951 if (from_mode == PSImode)
952 {
953 if (to_mode != SImode)
954 {
955 from = convert_to_mode (SImode, from, unsignedp);
956 from_mode = SImode;
957 }
958 else
959 {
960 #ifdef HAVE_extendpsisi2
961 if (HAVE_extendpsisi2)
962 {
963 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
964 return;
965 }
966 #endif /* HAVE_extendpsisi2 */
967 abort ();
968 }
969 }
970
971 if (to_mode == PDImode)
972 {
973 if (from_mode != DImode)
974 from = convert_to_mode (DImode, from, unsignedp);
975
976 #ifdef HAVE_truncdipdi2
977 if (HAVE_truncdipdi2)
978 {
979 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
980 return;
981 }
982 #endif /* HAVE_truncdipdi2 */
983 abort ();
984 }
985
986 if (from_mode == PDImode)
987 {
988 if (to_mode != DImode)
989 {
990 from = convert_to_mode (DImode, from, unsignedp);
991 from_mode = DImode;
992 }
993 else
994 {
995 #ifdef HAVE_extendpdidi2
996 if (HAVE_extendpdidi2)
997 {
998 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
999 return;
1000 }
1001 #endif /* HAVE_extendpdidi2 */
1002 abort ();
1003 }
1004 }
1005
1006 /* Now follow all the conversions between integers
1007 no more than a word long. */
1008
1009 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1010 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1011 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1012 GET_MODE_BITSIZE (from_mode)))
1013 {
1014 if (!((GET_CODE (from) == MEM
1015 && ! MEM_VOLATILE_P (from)
1016 && direct_load[(int) to_mode]
1017 && ! mode_dependent_address_p (XEXP (from, 0)))
1018 || GET_CODE (from) == REG
1019 || GET_CODE (from) == SUBREG))
1020 from = force_reg (from_mode, from);
1021 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1022 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1023 from = copy_to_reg (from);
1024 emit_move_insn (to, gen_lowpart (to_mode, from));
1025 return;
1026 }
1027
1028 /* Handle extension. */
1029 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1030 {
1031 /* Convert directly if that works. */
1032 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1033 != CODE_FOR_nothing)
1034 {
1035 emit_unop_insn (code, to, from, equiv_code);
1036 return;
1037 }
1038 else
1039 {
1040 enum machine_mode intermediate;
1041 rtx tmp;
1042 tree shift_amount;
1043
1044 /* Search for a mode to convert via. */
1045 for (intermediate = from_mode; intermediate != VOIDmode;
1046 intermediate = GET_MODE_WIDER_MODE (intermediate))
1047 if (((can_extend_p (to_mode, intermediate, unsignedp)
1048 != CODE_FOR_nothing)
1049 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (intermediate))))
1052 && (can_extend_p (intermediate, from_mode, unsignedp)
1053 != CODE_FOR_nothing))
1054 {
1055 convert_move (to, convert_to_mode (intermediate, from,
1056 unsignedp), unsignedp);
1057 return;
1058 }
1059
1060 /* No suitable intermediate mode.
1061 Generate what we need with shifts. */
1062 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1063 - GET_MODE_BITSIZE (from_mode), 0);
1064 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1065 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1066 to, unsignedp);
1067 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1068 to, unsignedp);
1069 if (tmp != to)
1070 emit_move_insn (to, tmp);
1071 return;
1072 }
1073 }
1074
1075 /* Support special truncate insns for certain modes. */
1076
1077 if (from_mode == DImode && to_mode == SImode)
1078 {
1079 #ifdef HAVE_truncdisi2
1080 if (HAVE_truncdisi2)
1081 {
1082 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1083 return;
1084 }
1085 #endif
1086 convert_move (to, force_reg (from_mode, from), unsignedp);
1087 return;
1088 }
1089
1090 if (from_mode == DImode && to_mode == HImode)
1091 {
1092 #ifdef HAVE_truncdihi2
1093 if (HAVE_truncdihi2)
1094 {
1095 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1096 return;
1097 }
1098 #endif
1099 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 return;
1101 }
1102
1103 if (from_mode == DImode && to_mode == QImode)
1104 {
1105 #ifdef HAVE_truncdiqi2
1106 if (HAVE_truncdiqi2)
1107 {
1108 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1109 return;
1110 }
1111 #endif
1112 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 return;
1114 }
1115
1116 if (from_mode == SImode && to_mode == HImode)
1117 {
1118 #ifdef HAVE_truncsihi2
1119 if (HAVE_truncsihi2)
1120 {
1121 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1122 return;
1123 }
1124 #endif
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 return;
1127 }
1128
1129 if (from_mode == SImode && to_mode == QImode)
1130 {
1131 #ifdef HAVE_truncsiqi2
1132 if (HAVE_truncsiqi2)
1133 {
1134 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1135 return;
1136 }
1137 #endif
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 return;
1140 }
1141
1142 if (from_mode == HImode && to_mode == QImode)
1143 {
1144 #ifdef HAVE_trunchiqi2
1145 if (HAVE_trunchiqi2)
1146 {
1147 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1148 return;
1149 }
1150 #endif
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 return;
1153 }
1154
1155 if (from_mode == TImode && to_mode == DImode)
1156 {
1157 #ifdef HAVE_trunctidi2
1158 if (HAVE_trunctidi2)
1159 {
1160 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1161 return;
1162 }
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == TImode && to_mode == SImode)
1169 {
1170 #ifdef HAVE_trunctisi2
1171 if (HAVE_trunctisi2)
1172 {
1173 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1174 return;
1175 }
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == TImode && to_mode == HImode)
1182 {
1183 #ifdef HAVE_trunctihi2
1184 if (HAVE_trunctihi2)
1185 {
1186 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1187 return;
1188 }
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == TImode && to_mode == QImode)
1195 {
1196 #ifdef HAVE_trunctiqi2
1197 if (HAVE_trunctiqi2)
1198 {
1199 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1200 return;
1201 }
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 /* Handle truncation of volatile memrefs, and so on;
1208 the things that couldn't be truncated directly,
1209 and for which there was no special instruction. */
1210 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1211 {
1212 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1213 emit_move_insn (to, temp);
1214 return;
1215 }
1216
1217 /* Mode combination is not recognized. */
1218 abort ();
1219 }
1220
1221 /* Return an rtx for a value that would result
1222 from converting X to mode MODE.
1223 Both X and MODE may be floating, or both integer.
1224 UNSIGNEDP is nonzero if X is an unsigned value.
1225 This can be done by referring to a part of X in place
1226 or by copying to a new temporary with conversion.
1227
1228 This function *must not* call protect_from_queue
1229 except when putting X into an insn (in which case convert_move does it). */
1230
1231 rtx
1232 convert_to_mode (mode, x, unsignedp)
1233 enum machine_mode mode;
1234 rtx x;
1235 int unsignedp;
1236 {
1237 return convert_modes (mode, VOIDmode, x, unsignedp);
1238 }
1239
1240 /* Return an rtx for a value that would result
1241 from converting X from mode OLDMODE to mode MODE.
1242 Both modes may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1244
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1249
1250 This function *must not* call protect_from_queue
1251 except when putting X into an insn (in which case convert_move does it). */
1252
1253 rtx
1254 convert_modes (mode, oldmode, x, unsignedp)
1255 enum machine_mode mode, oldmode;
1256 rtx x;
1257 int unsignedp;
1258 {
1259 register rtx temp;
1260
1261 /* If FROM is a SUBREG that indicates that we have already done at least
1262 the required extension, strip it. */
1263
1264 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1265 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1266 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1267 x = gen_lowpart (mode, x);
1268
1269 if (GET_MODE (x) != VOIDmode)
1270 oldmode = GET_MODE (x);
1271
1272 if (mode == oldmode)
1273 return x;
1274
1275 /* There is one case that we must handle specially: If we are converting
1276 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1277 we are to interpret the constant as unsigned, gen_lowpart will do
1278 the wrong if the constant appears negative. What we want to do is
1279 make the high-order word of the constant zero, not all ones. */
1280
1281 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1282 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1283 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1284 {
1285 HOST_WIDE_INT val = INTVAL (x);
1286
1287 if (oldmode != VOIDmode
1288 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1289 {
1290 int width = GET_MODE_BITSIZE (oldmode);
1291
1292 /* We need to zero extend VAL. */
1293 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1294 }
1295
1296 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1297 }
1298
1299 /* We can do this with a gen_lowpart if both desired and current modes
1300 are integer, and this is either a constant integer, a register, or a
1301 non-volatile MEM. Except for the constant case where MODE is no
1302 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1303
1304 if ((GET_CODE (x) == CONST_INT
1305 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1306 || (GET_MODE_CLASS (mode) == MODE_INT
1307 && GET_MODE_CLASS (oldmode) == MODE_INT
1308 && (GET_CODE (x) == CONST_DOUBLE
1309 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1310 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1311 && direct_load[(int) mode])
1312 || (GET_CODE (x) == REG
1313 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1314 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1315 {
1316 /* ?? If we don't know OLDMODE, we have to assume here that
1317 X does not need sign- or zero-extension. This may not be
1318 the case, but it's the best we can do. */
1319 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1320 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1321 {
1322 HOST_WIDE_INT val = INTVAL (x);
1323 int width = GET_MODE_BITSIZE (oldmode);
1324
1325 /* We must sign or zero-extend in this case. Start by
1326 zero-extending, then sign extend if we need to. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1328 if (! unsignedp
1329 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1330 val |= (HOST_WIDE_INT) (-1) << width;
1331
1332 return GEN_INT (val);
1333 }
1334
1335 return gen_lowpart (mode, x);
1336 }
1337
1338 temp = gen_reg_rtx (mode);
1339 convert_move (temp, x, unsignedp);
1340 return temp;
1341 }
1342 \f
1343
1344 /* This macro is used to determine what the largest unit size that
1345 move_by_pieces can use is. */
1346
1347 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1348 move efficiently, as opposed to MOVE_MAX which is the maximum
1349 number of bhytes we can move with a single instruction. */
1350
1351 #ifndef MOVE_MAX_PIECES
1352 #define MOVE_MAX_PIECES MOVE_MAX
1353 #endif
1354
1355 /* Generate several move instructions to copy LEN bytes
1356 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1357 The caller must pass FROM and TO
1358 through protect_from_queue before calling.
1359 ALIGN (in bytes) is maximum alignment we can assume. */
1360
1361 void
1362 move_by_pieces (to, from, len, align)
1363 rtx to, from;
1364 int len;
1365 unsigned int align;
1366 {
1367 struct move_by_pieces data;
1368 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1369 int max_size = MOVE_MAX_PIECES + 1;
1370 enum machine_mode mode = VOIDmode, tmode;
1371 enum insn_code icode;
1372
1373 data.offset = 0;
1374 data.to_addr = to_addr;
1375 data.from_addr = from_addr;
1376 data.to = to;
1377 data.from = from;
1378 data.autinc_to
1379 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1380 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1381 data.autinc_from
1382 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1383 || GET_CODE (from_addr) == POST_INC
1384 || GET_CODE (from_addr) == POST_DEC);
1385
1386 data.explicit_inc_from = 0;
1387 data.explicit_inc_to = 0;
1388 data.reverse
1389 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1390 if (data.reverse) data.offset = len;
1391 data.len = len;
1392
1393 data.to_struct = MEM_IN_STRUCT_P (to);
1394 data.from_struct = MEM_IN_STRUCT_P (from);
1395 data.to_readonly = RTX_UNCHANGING_P (to);
1396 data.from_readonly = RTX_UNCHANGING_P (from);
1397
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1403 {
1404 /* Find the mode of the largest move... */
1405 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1406 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1407 if (GET_MODE_SIZE (tmode) < max_size)
1408 mode = tmode;
1409
1410 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1411 {
1412 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1413 data.autinc_from = 1;
1414 data.explicit_inc_from = -1;
1415 }
1416 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1417 {
1418 data.from_addr = copy_addr_to_reg (from_addr);
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = 1;
1421 }
1422 if (!data.autinc_from && CONSTANT_P (from_addr))
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1425 {
1426 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.autinc_to = 1;
1428 data.explicit_inc_to = -1;
1429 }
1430 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1431 {
1432 data.to_addr = copy_addr_to_reg (to_addr);
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = 1;
1435 }
1436 if (!data.autinc_to && CONSTANT_P (to_addr))
1437 data.to_addr = copy_addr_to_reg (to_addr);
1438 }
1439
1440 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1441 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1442 align = MOVE_MAX;
1443
1444 /* First move what we can in the largest integer mode, then go to
1445 successively smaller modes. */
1446
1447 while (max_size > 1)
1448 {
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1453
1454 if (mode == VOIDmode)
1455 break;
1456
1457 icode = mov_optab->handlers[(int) mode].insn_code;
1458 if (icode != CODE_FOR_nothing
1459 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1460 (unsigned int) GET_MODE_SIZE (mode)))
1461 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462
1463 max_size = GET_MODE_SIZE (mode);
1464 }
1465
1466 /* The code above should have handled everything. */
1467 if (data.len > 0)
1468 abort ();
1469 }
1470
1471 /* Return number of insns required to move L bytes by pieces.
1472 ALIGN (in bytes) is maximum alignment we can assume. */
1473
1474 static int
1475 move_by_pieces_ninsns (l, align)
1476 unsigned int l;
1477 unsigned int align;
1478 {
1479 register int n_insns = 0;
1480 int max_size = MOVE_MAX + 1;
1481
1482 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1483 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 align = MOVE_MAX;
1485
1486 while (max_size > 1)
1487 {
1488 enum machine_mode mode = VOIDmode, tmode;
1489 enum insn_code icode;
1490
1491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1493 if (GET_MODE_SIZE (tmode) < max_size)
1494 mode = tmode;
1495
1496 if (mode == VOIDmode)
1497 break;
1498
1499 icode = mov_optab->handlers[(int) mode].insn_code;
1500 if (icode != CODE_FOR_nothing
1501 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508 }
1509
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) PARAMS ((rtx, ...));
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519 {
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx_MEM (mode, data->to_addr)
1529 : copy_rtx (change_address (data->to, mode,
1530 plus_constant (data->to_addr,
1531 data->offset))));
1532 MEM_IN_STRUCT_P (to1) = data->to_struct;
1533 RTX_UNCHANGING_P (to1) = data->to_readonly;
1534
1535 from1
1536 = (data->autinc_from
1537 ? gen_rtx_MEM (mode, data->from_addr)
1538 : copy_rtx (change_address (data->from, mode,
1539 plus_constant (data->from_addr,
1540 data->offset))));
1541 MEM_IN_STRUCT_P (from1) = data->from_struct;
1542 RTX_UNCHANGING_P (from1) = data->from_readonly;
1543
1544 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1546 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1547 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1548
1549 emit_insn ((*genfun) (to1, from1));
1550 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1552 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1554
1555 if (! data->reverse) data->offset += size;
1556
1557 data->len -= size;
1558 }
1559 }
1560 \f
1561 /* Emit code to move a block Y to a block X.
1562 This may be done with string-move instructions,
1563 with multiple scalar move instructions, or with a library call.
1564
1565 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 with mode BLKmode.
1567 SIZE is an rtx that says how long they are.
1568 ALIGN is the maximum alignment we can assume they have,
1569 measured in bytes.
1570
1571 Return the address of the new block, if memcpy is called and returns it,
1572 0 otherwise. */
1573
1574 rtx
1575 emit_block_move (x, y, size, align)
1576 rtx x, y;
1577 rtx size;
1578 unsigned int align;
1579 {
1580 rtx retval = 0;
1581 #ifdef TARGET_MEM_FUNCTIONS
1582 static tree fn;
1583 tree call_expr, arg_list;
1584 #endif
1585
1586 if (GET_MODE (x) != BLKmode)
1587 abort ();
1588
1589 if (GET_MODE (y) != BLKmode)
1590 abort ();
1591
1592 x = protect_from_queue (x, 1);
1593 y = protect_from_queue (y, 0);
1594 size = protect_from_queue (size, 0);
1595
1596 if (GET_CODE (x) != MEM)
1597 abort ();
1598 if (GET_CODE (y) != MEM)
1599 abort ();
1600 if (size == 0)
1601 abort ();
1602
1603 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1604 move_by_pieces (x, y, INTVAL (size), align);
1605 else
1606 {
1607 /* Try the most limited insn first, because there's no point
1608 including more than one in the machine description unless
1609 the more limited one has some advantage. */
1610
1611 rtx opalign = GEN_INT (align);
1612 enum machine_mode mode;
1613
1614 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1615 mode = GET_MODE_WIDER_MODE (mode))
1616 {
1617 enum insn_code code = movstr_optab[(int) mode];
1618 insn_operand_predicate_fn pred;
1619
1620 if (code != CODE_FOR_nothing
1621 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1622 here because if SIZE is less than the mode mask, as it is
1623 returned by the macro, it will definitely be less than the
1624 actual mode mask. */
1625 && ((GET_CODE (size) == CONST_INT
1626 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1627 <= (GET_MODE_MASK (mode) >> 1)))
1628 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1629 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1630 || (*pred) (x, BLKmode))
1631 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1632 || (*pred) (y, BLKmode))
1633 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1634 || (*pred) (opalign, VOIDmode)))
1635 {
1636 rtx op2;
1637 rtx last = get_last_insn ();
1638 rtx pat;
1639
1640 op2 = convert_to_mode (mode, size, 1);
1641 pred = insn_data[(int) code].operand[2].predicate;
1642 if (pred != 0 && ! (*pred) (op2, mode))
1643 op2 = copy_to_mode_reg (mode, op2);
1644
1645 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1646 if (pat)
1647 {
1648 emit_insn (pat);
1649 return 0;
1650 }
1651 else
1652 delete_insns_since (last);
1653 }
1654 }
1655
1656 /* X, Y, or SIZE may have been passed through protect_from_queue.
1657
1658 It is unsafe to save the value generated by protect_from_queue
1659 and reuse it later. Consider what happens if emit_queue is
1660 called before the return value from protect_from_queue is used.
1661
1662 Expansion of the CALL_EXPR below will call emit_queue before
1663 we are finished emitting RTL for argument setup. So if we are
1664 not careful we could get the wrong value for an argument.
1665
1666 To avoid this problem we go ahead and emit code to copy X, Y &
1667 SIZE into new pseudos. We can then place those new pseudos
1668 into an RTL_EXPR and use them later, even after a call to
1669 emit_queue.
1670
1671 Note this is not strictly needed for library calls since they
1672 do not call emit_queue before loading their arguments. However,
1673 we may need to have library calls call emit_queue in the future
1674 since failing to do so could cause problems for targets which
1675 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1676 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1677 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1678
1679 #ifdef TARGET_MEM_FUNCTIONS
1680 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1681 #else
1682 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1683 TREE_UNSIGNED (integer_type_node));
1684 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1685 #endif
1686
1687 #ifdef TARGET_MEM_FUNCTIONS
1688 /* It is incorrect to use the libcall calling conventions to call
1689 memcpy in this context.
1690
1691 This could be a user call to memcpy and the user may wish to
1692 examine the return value from memcpy.
1693
1694 For targets where libcalls and normal calls have different conventions
1695 for returning pointers, we could end up generating incorrect code.
1696
1697 So instead of using a libcall sequence we build up a suitable
1698 CALL_EXPR and expand the call in the normal fashion. */
1699 if (fn == NULL_TREE)
1700 {
1701 tree fntype;
1702
1703 /* This was copied from except.c, I don't know if all this is
1704 necessary in this context or not. */
1705 fn = get_identifier ("memcpy");
1706 push_obstacks_nochange ();
1707 end_temporary_allocation ();
1708 fntype = build_pointer_type (void_type_node);
1709 fntype = build_function_type (fntype, NULL_TREE);
1710 fn = build_decl (FUNCTION_DECL, fn, fntype);
1711 ggc_add_tree_root (&fn, 1);
1712 DECL_EXTERNAL (fn) = 1;
1713 TREE_PUBLIC (fn) = 1;
1714 DECL_ARTIFICIAL (fn) = 1;
1715 make_decl_rtl (fn, NULL_PTR, 1);
1716 assemble_external (fn);
1717 pop_obstacks ();
1718 }
1719
1720 /* We need to make an argument list for the function call.
1721
1722 memcpy has three arguments, the first two are void * addresses and
1723 the last is a size_t byte count for the copy. */
1724 arg_list
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), x));
1727 TREE_CHAIN (arg_list)
1728 = build_tree_list (NULL_TREE,
1729 make_tree (build_pointer_type (void_type_node), y));
1730 TREE_CHAIN (TREE_CHAIN (arg_list))
1731 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1732 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1733
1734 /* Now we have to build up the CALL_EXPR itself. */
1735 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1736 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1737 call_expr, arg_list, NULL_TREE);
1738 TREE_SIDE_EFFECTS (call_expr) = 1;
1739
1740 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1741 #else
1742 emit_library_call (bcopy_libfunc, 0,
1743 VOIDmode, 3, y, Pmode, x, Pmode,
1744 convert_to_mode (TYPE_MODE (integer_type_node), size,
1745 TREE_UNSIGNED (integer_type_node)),
1746 TYPE_MODE (integer_type_node));
1747 #endif
1748 }
1749
1750 return retval;
1751 }
1752 \f
1753 /* Copy all or part of a value X into registers starting at REGNO.
1754 The number of registers to be filled is NREGS. */
1755
1756 void
1757 move_block_to_reg (regno, x, nregs, mode)
1758 int regno;
1759 rtx x;
1760 int nregs;
1761 enum machine_mode mode;
1762 {
1763 int i;
1764 #ifdef HAVE_load_multiple
1765 rtx pat;
1766 rtx last;
1767 #endif
1768
1769 if (nregs == 0)
1770 return;
1771
1772 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1773 x = validize_mem (force_const_mem (mode, x));
1774
1775 /* See if the machine can do this with a load multiple insn. */
1776 #ifdef HAVE_load_multiple
1777 if (HAVE_load_multiple)
1778 {
1779 last = get_last_insn ();
1780 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1781 GEN_INT (nregs));
1782 if (pat)
1783 {
1784 emit_insn (pat);
1785 return;
1786 }
1787 else
1788 delete_insns_since (last);
1789 }
1790 #endif
1791
1792 for (i = 0; i < nregs; i++)
1793 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1794 operand_subword_force (x, i, mode));
1795 }
1796
1797 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1798 The number of registers to be filled is NREGS. SIZE indicates the number
1799 of bytes in the object X. */
1800
1801
1802 void
1803 move_block_from_reg (regno, x, nregs, size)
1804 int regno;
1805 rtx x;
1806 int nregs;
1807 int size;
1808 {
1809 int i;
1810 #ifdef HAVE_store_multiple
1811 rtx pat;
1812 rtx last;
1813 #endif
1814 enum machine_mode mode;
1815
1816 /* If SIZE is that of a mode no bigger than a word, just use that
1817 mode's store operation. */
1818 if (size <= UNITS_PER_WORD
1819 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1820 {
1821 emit_move_insn (change_address (x, mode, NULL),
1822 gen_rtx_REG (mode, regno));
1823 return;
1824 }
1825
1826 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1827 to the left before storing to memory. Note that the previous test
1828 doesn't handle all cases (e.g. SIZE == 3). */
1829 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1830 {
1831 rtx tem = operand_subword (x, 0, 1, BLKmode);
1832 rtx shift;
1833
1834 if (tem == 0)
1835 abort ();
1836
1837 shift = expand_shift (LSHIFT_EXPR, word_mode,
1838 gen_rtx_REG (word_mode, regno),
1839 build_int_2 ((UNITS_PER_WORD - size)
1840 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1841 emit_move_insn (tem, shift);
1842 return;
1843 }
1844
1845 /* See if the machine can do this with a store multiple insn. */
1846 #ifdef HAVE_store_multiple
1847 if (HAVE_store_multiple)
1848 {
1849 last = get_last_insn ();
1850 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1851 GEN_INT (nregs));
1852 if (pat)
1853 {
1854 emit_insn (pat);
1855 return;
1856 }
1857 else
1858 delete_insns_since (last);
1859 }
1860 #endif
1861
1862 for (i = 0; i < nregs; i++)
1863 {
1864 rtx tem = operand_subword (x, i, 1, BLKmode);
1865
1866 if (tem == 0)
1867 abort ();
1868
1869 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1870 }
1871 }
1872
1873 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1874 registers represented by a PARALLEL. SSIZE represents the total size of
1875 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1876 SRC in bits. */
1877 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1878 the balance will be in what would be the low-order memory addresses, i.e.
1879 left justified for big endian, right justified for little endian. This
1880 happens to be true for the targets currently using this support. If this
1881 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1882 would be needed. */
1883
1884 void
1885 emit_group_load (dst, orig_src, ssize, align)
1886 rtx dst, orig_src;
1887 unsigned int align;
1888 int ssize;
1889 {
1890 rtx *tmps, src;
1891 int start, i;
1892
1893 if (GET_CODE (dst) != PARALLEL)
1894 abort ();
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (dst, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902
1903 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1904
1905 /* If we won't be loading directly from memory, protect the real source
1906 from strange tricks we might play. */
1907 src = orig_src;
1908 if (GET_CODE (src) != MEM)
1909 {
1910 if (GET_CODE (src) == VOIDmode)
1911 src = gen_reg_rtx (GET_MODE (dst));
1912 else
1913 src = gen_reg_rtx (GET_MODE (orig_src));
1914 emit_move_insn (src, orig_src);
1915 }
1916
1917 /* Process the pieces. */
1918 for (i = start; i < XVECLEN (dst, 0); i++)
1919 {
1920 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1921 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1922 int bytelen = GET_MODE_SIZE (mode);
1923 int shift = 0;
1924
1925 /* Handle trailing fragments that run over the size of the struct. */
1926 if (ssize >= 0 && bytepos + bytelen > ssize)
1927 {
1928 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1929 bytelen = ssize - bytepos;
1930 if (bytelen <= 0)
1931 abort ();
1932 }
1933
1934 /* Optimize the access just a bit. */
1935 if (GET_CODE (src) == MEM
1936 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 {
1940 tmps[i] = gen_reg_rtx (mode);
1941 emit_move_insn (tmps[i],
1942 change_address (src, mode,
1943 plus_constant (XEXP (src, 0),
1944 bytepos)));
1945 }
1946 else if (GET_CODE (src) == CONCAT)
1947 {
1948 if (bytepos == 0
1949 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1950 tmps[i] = XEXP (src, 0);
1951 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1952 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1953 tmps[i] = XEXP (src, 1);
1954 else
1955 abort ();
1956 }
1957 else
1958 {
1959 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1960 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1961 mode, mode, align, ssize);
1962 }
1963
1964 if (BYTES_BIG_ENDIAN && shift)
1965 {
1966 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1967 tmps[i], 0, OPTAB_WIDEN);
1968 }
1969 }
1970 emit_queue();
1971
1972 /* Copy the extracted pieces into the proper (probable) hard regs. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1974 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1975 }
1976
1977 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1978 registers represented by a PARALLEL. SSIZE represents the total size of
1979 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1980
1981 void
1982 emit_group_store (orig_dst, src, ssize, align)
1983 rtx orig_dst, src;
1984 int ssize;
1985 unsigned int align;
1986 {
1987 rtx *tmps, dst;
1988 int start, i;
1989
1990 if (GET_CODE (src) != PARALLEL)
1991 abort ();
1992
1993 /* Check for a NULL entry, used to indicate that the parameter goes
1994 both on the stack and in registers. */
1995 if (XEXP (XVECEXP (src, 0, 0), 0))
1996 start = 0;
1997 else
1998 start = 1;
1999
2000 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2001
2002 /* Copy the (probable) hard regs into pseudos. */
2003 for (i = start; i < XVECLEN (src, 0); i++)
2004 {
2005 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2006 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2007 emit_move_insn (tmps[i], reg);
2008 }
2009 emit_queue();
2010
2011 /* If we won't be storing directly into memory, protect the real destination
2012 from strange tricks we might play. */
2013 dst = orig_dst;
2014 if (GET_CODE (dst) == PARALLEL)
2015 {
2016 rtx temp;
2017
2018 /* We can get a PARALLEL dst if there is a conditional expression in
2019 a return statement. In that case, the dst and src are the same,
2020 so no action is necessary. */
2021 if (rtx_equal_p (dst, src))
2022 return;
2023
2024 /* It is unclear if we can ever reach here, but we may as well handle
2025 it. Allocate a temporary, and split this into a store/load to/from
2026 the temporary. */
2027
2028 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2029 emit_group_store (temp, src, ssize, align);
2030 emit_group_load (dst, temp, ssize, align);
2031 return;
2032 }
2033 else if (GET_CODE (dst) != MEM)
2034 {
2035 dst = gen_reg_rtx (GET_MODE (orig_dst));
2036 /* Make life a bit easier for combine. */
2037 emit_move_insn (dst, const0_rtx);
2038 }
2039 else if (! MEM_IN_STRUCT_P (dst))
2040 {
2041 /* store_bit_field requires that memory operations have
2042 mem_in_struct_p set; we might not. */
2043
2044 dst = copy_rtx (orig_dst);
2045 MEM_SET_IN_STRUCT_P (dst, 1);
2046 }
2047
2048 /* Process the pieces. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2050 {
2051 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2052 enum machine_mode mode = GET_MODE (tmps[i]);
2053 int bytelen = GET_MODE_SIZE (mode);
2054
2055 /* Handle trailing fragments that run over the size of the struct. */
2056 if (ssize >= 0 && bytepos + bytelen > ssize)
2057 {
2058 if (BYTES_BIG_ENDIAN)
2059 {
2060 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2061 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2062 tmps[i], 0, OPTAB_WIDEN);
2063 }
2064 bytelen = ssize - bytepos;
2065 }
2066
2067 /* Optimize the access just a bit. */
2068 if (GET_CODE (dst) == MEM
2069 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2070 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2071 && bytelen == GET_MODE_SIZE (mode))
2072 emit_move_insn (change_address (dst, mode,
2073 plus_constant (XEXP (dst, 0),
2074 bytepos)),
2075 tmps[i]);
2076 else
2077 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2079 }
2080
2081 emit_queue();
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment. */
2096
2097 rtx
2098 copy_blkmode_from_reg (tgtblk,srcreg,type)
2099 rtx tgtblk;
2100 rtx srcreg;
2101 tree type;
2102 {
2103 int bytes = int_size_in_bytes (type);
2104 rtx src = NULL, dst = NULL;
2105 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2106 int bitpos, xbitpos, big_endian_correction = 0;
2107
2108 if (tgtblk == 0)
2109 {
2110 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2111 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2112 preserve_temp_slots (tgtblk);
2113 }
2114
2115 /* This code assumes srcreg is at least a full word. If it isn't,
2116 copy it into a new pseudo which is a full word. */
2117 if (GET_MODE (srcreg) != BLKmode
2118 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2119 srcreg = convert_to_mode (word_mode, srcreg,
2120 TREE_UNSIGNED (type));
2121
2122 /* Structures whose size is not a multiple of a word are aligned
2123 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2124 machine, this means we must skip the empty high order bytes when
2125 calculating the bit offset. */
2126 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2127 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2128 * BITS_PER_UNIT));
2129
2130 /* Copy the structure BITSIZE bites at a time.
2131
2132 We could probably emit more efficient code for machines
2133 which do not use strict alignment, but it doesn't seem
2134 worth the effort at the current time. */
2135 for (bitpos = 0, xbitpos = big_endian_correction;
2136 bitpos < bytes * BITS_PER_UNIT;
2137 bitpos += bitsize, xbitpos += bitsize)
2138 {
2139
2140 /* We need a new source operand each time xbitpos is on a
2141 word boundary and when xbitpos == big_endian_correction
2142 (the first time through). */
2143 if (xbitpos % BITS_PER_WORD == 0
2144 || xbitpos == big_endian_correction)
2145 src = operand_subword_force (srcreg,
2146 xbitpos / BITS_PER_WORD,
2147 BLKmode);
2148
2149 /* We need a new destination operand each time bitpos is on
2150 a word boundary. */
2151 if (bitpos % BITS_PER_WORD == 0)
2152 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2153
2154 /* Use xbitpos for the source extraction (right justified) and
2155 xbitpos for the destination store (left justified). */
2156 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2157 extract_bit_field (src, bitsize,
2158 xbitpos % BITS_PER_WORD, 1,
2159 NULL_RTX, word_mode,
2160 word_mode,
2161 bitsize / BITS_PER_UNIT,
2162 BITS_PER_WORD),
2163 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2164 }
2165 return tgtblk;
2166 }
2167
2168
2169 /* Add a USE expression for REG to the (possibly empty) list pointed
2170 to by CALL_FUSAGE. REG must denote a hard register. */
2171
2172 void
2173 use_reg (call_fusage, reg)
2174 rtx *call_fusage, reg;
2175 {
2176 if (GET_CODE (reg) != REG
2177 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2178 abort();
2179
2180 *call_fusage
2181 = gen_rtx_EXPR_LIST (VOIDmode,
2182 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2183 }
2184
2185 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2186 starting at REGNO. All of these registers must be hard registers. */
2187
2188 void
2189 use_regs (call_fusage, regno, nregs)
2190 rtx *call_fusage;
2191 int regno;
2192 int nregs;
2193 {
2194 int i;
2195
2196 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2197 abort ();
2198
2199 for (i = 0; i < nregs; i++)
2200 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2201 }
2202
2203 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2204 PARALLEL REGS. This is for calls that pass values in multiple
2205 non-contiguous locations. The Irix 6 ABI has examples of this. */
2206
2207 void
2208 use_group_regs (call_fusage, regs)
2209 rtx *call_fusage;
2210 rtx regs;
2211 {
2212 int i;
2213
2214 for (i = 0; i < XVECLEN (regs, 0); i++)
2215 {
2216 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2217
2218 /* A NULL entry means the parameter goes both on the stack and in
2219 registers. This can also be a MEM for targets that pass values
2220 partially on the stack and partially in registers. */
2221 if (reg != 0 && GET_CODE (reg) == REG)
2222 use_reg (call_fusage, reg);
2223 }
2224 }
2225 \f
2226 /* Generate several move instructions to clear LEN bytes of block TO.
2227 (A MEM rtx with BLKmode). The caller must pass TO through
2228 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2229 we can assume. */
2230
2231 static void
2232 clear_by_pieces (to, len, align)
2233 rtx to;
2234 int len;
2235 unsigned int align;
2236 {
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
2242
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2249
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2255
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2257
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2263 {
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2269
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2271 {
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2275 }
2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2281 }
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2284 }
2285
2286 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2289
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2292
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2306 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2307
2308 max_size = GET_MODE_SIZE (mode);
2309 }
2310
2311 /* The code above should have handled everything. */
2312 if (data.len != 0)
2313 abort ();
2314 }
2315
2316 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2317 with move instructions for mode MODE. GENFUN is the gen_... function
2318 to make a move insn for that mode. DATA has all the other info. */
2319
2320 static void
2321 clear_by_pieces_1 (genfun, mode, data)
2322 rtx (*genfun) PARAMS ((rtx, ...));
2323 enum machine_mode mode;
2324 struct clear_by_pieces *data;
2325 {
2326 register int size = GET_MODE_SIZE (mode);
2327 register rtx to1;
2328
2329 while (data->len >= size)
2330 {
2331 if (data->reverse) data->offset -= size;
2332
2333 to1 = (data->autinc_to
2334 ? gen_rtx_MEM (mode, data->to_addr)
2335 : copy_rtx (change_address (data->to, mode,
2336 plus_constant (data->to_addr,
2337 data->offset))));
2338 MEM_IN_STRUCT_P (to1) = data->to_struct;
2339
2340 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2342
2343 emit_insn ((*genfun) (to1, const0_rtx));
2344 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2345 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2346
2347 if (! data->reverse) data->offset += size;
2348
2349 data->len -= size;
2350 }
2351 }
2352 \f
2353 /* Write zeros through the storage of OBJECT.
2354 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2355 the maximum alignment we can is has, measured in bytes.
2356
2357 If we call a function that returns the length of the block, return it. */
2358
2359 rtx
2360 clear_storage (object, size, align)
2361 rtx object;
2362 rtx size;
2363 unsigned int align;
2364 {
2365 #ifdef TARGET_MEM_FUNCTIONS
2366 static tree fn;
2367 tree call_expr, arg_list;
2368 #endif
2369 rtx retval = 0;
2370
2371 if (GET_MODE (object) == BLKmode)
2372 {
2373 object = protect_from_queue (object, 1);
2374 size = protect_from_queue (size, 0);
2375
2376 if (GET_CODE (size) == CONST_INT
2377 && MOVE_BY_PIECES_P (INTVAL (size), align))
2378 clear_by_pieces (object, INTVAL (size), align);
2379
2380 else
2381 {
2382 /* Try the most limited insn first, because there's no point
2383 including more than one in the machine description unless
2384 the more limited one has some advantage. */
2385
2386 rtx opalign = GEN_INT (align);
2387 enum machine_mode mode;
2388
2389 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2390 mode = GET_MODE_WIDER_MODE (mode))
2391 {
2392 enum insn_code code = clrstr_optab[(int) mode];
2393 insn_operand_predicate_fn pred;
2394
2395 if (code != CODE_FOR_nothing
2396 /* We don't need MODE to be narrower than
2397 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2398 the mode mask, as it is returned by the macro, it will
2399 definitely be less than the actual mode mask. */
2400 && ((GET_CODE (size) == CONST_INT
2401 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2402 <= (GET_MODE_MASK (mode) >> 1)))
2403 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2404 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2405 || (*pred) (object, BLKmode))
2406 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2407 || (*pred) (opalign, VOIDmode)))
2408 {
2409 rtx op1;
2410 rtx last = get_last_insn ();
2411 rtx pat;
2412
2413 op1 = convert_to_mode (mode, size, 1);
2414 pred = insn_data[(int) code].operand[1].predicate;
2415 if (pred != 0 && ! (*pred) (op1, mode))
2416 op1 = copy_to_mode_reg (mode, op1);
2417
2418 pat = GEN_FCN ((int) code) (object, op1, opalign);
2419 if (pat)
2420 {
2421 emit_insn (pat);
2422 return 0;
2423 }
2424 else
2425 delete_insns_since (last);
2426 }
2427 }
2428
2429 /* OBJECT or SIZE may have been passed through protect_from_queue.
2430
2431 It is unsafe to save the value generated by protect_from_queue
2432 and reuse it later. Consider what happens if emit_queue is
2433 called before the return value from protect_from_queue is used.
2434
2435 Expansion of the CALL_EXPR below will call emit_queue before
2436 we are finished emitting RTL for argument setup. So if we are
2437 not careful we could get the wrong value for an argument.
2438
2439 To avoid this problem we go ahead and emit code to copy OBJECT
2440 and SIZE into new pseudos. We can then place those new pseudos
2441 into an RTL_EXPR and use them later, even after a call to
2442 emit_queue.
2443
2444 Note this is not strictly needed for library calls since they
2445 do not call emit_queue before loading their arguments. However,
2446 we may need to have library calls call emit_queue in the future
2447 since failing to do so could cause problems for targets which
2448 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2449 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2450
2451 #ifdef TARGET_MEM_FUNCTIONS
2452 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2453 #else
2454 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2455 TREE_UNSIGNED (integer_type_node));
2456 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2457 #endif
2458
2459
2460 #ifdef TARGET_MEM_FUNCTIONS
2461 /* It is incorrect to use the libcall calling conventions to call
2462 memset in this context.
2463
2464 This could be a user call to memset and the user may wish to
2465 examine the return value from memset.
2466
2467 For targets where libcalls and normal calls have different
2468 conventions for returning pointers, we could end up generating
2469 incorrect code.
2470
2471 So instead of using a libcall sequence we build up a suitable
2472 CALL_EXPR and expand the call in the normal fashion. */
2473 if (fn == NULL_TREE)
2474 {
2475 tree fntype;
2476
2477 /* This was copied from except.c, I don't know if all this is
2478 necessary in this context or not. */
2479 fn = get_identifier ("memset");
2480 push_obstacks_nochange ();
2481 end_temporary_allocation ();
2482 fntype = build_pointer_type (void_type_node);
2483 fntype = build_function_type (fntype, NULL_TREE);
2484 fn = build_decl (FUNCTION_DECL, fn, fntype);
2485 ggc_add_tree_root (&fn, 1);
2486 DECL_EXTERNAL (fn) = 1;
2487 TREE_PUBLIC (fn) = 1;
2488 DECL_ARTIFICIAL (fn) = 1;
2489 make_decl_rtl (fn, NULL_PTR, 1);
2490 assemble_external (fn);
2491 pop_obstacks ();
2492 }
2493
2494 /* We need to make an argument list for the function call.
2495
2496 memset has three arguments, the first is a void * addresses, the
2497 second a integer with the initialization value, the last is a
2498 size_t byte count for the copy. */
2499 arg_list
2500 = build_tree_list (NULL_TREE,
2501 make_tree (build_pointer_type (void_type_node),
2502 object));
2503 TREE_CHAIN (arg_list)
2504 = build_tree_list (NULL_TREE,
2505 make_tree (integer_type_node, const0_rtx));
2506 TREE_CHAIN (TREE_CHAIN (arg_list))
2507 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2508 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2509
2510 /* Now we have to build up the CALL_EXPR itself. */
2511 call_expr = build1 (ADDR_EXPR,
2512 build_pointer_type (TREE_TYPE (fn)), fn);
2513 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2514 call_expr, arg_list, NULL_TREE);
2515 TREE_SIDE_EFFECTS (call_expr) = 1;
2516
2517 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2518 #else
2519 emit_library_call (bzero_libfunc, 0,
2520 VOIDmode, 2, object, Pmode, size,
2521 TYPE_MODE (integer_type_node));
2522 #endif
2523 }
2524 }
2525 else
2526 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2527
2528 return retval;
2529 }
2530
2531 /* Generate code to copy Y into X.
2532 Both Y and X must have the same mode, except that
2533 Y can be a constant with VOIDmode.
2534 This mode cannot be BLKmode; use emit_block_move for that.
2535
2536 Return the last instruction emitted. */
2537
2538 rtx
2539 emit_move_insn (x, y)
2540 rtx x, y;
2541 {
2542 enum machine_mode mode = GET_MODE (x);
2543
2544 x = protect_from_queue (x, 1);
2545 y = protect_from_queue (y, 0);
2546
2547 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2548 abort ();
2549
2550 /* Never force constant_p_rtx to memory. */
2551 if (GET_CODE (y) == CONSTANT_P_RTX)
2552 ;
2553 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2554 y = force_const_mem (mode, y);
2555
2556 /* If X or Y are memory references, verify that their addresses are valid
2557 for the machine. */
2558 if (GET_CODE (x) == MEM
2559 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2560 && ! push_operand (x, GET_MODE (x)))
2561 || (flag_force_addr
2562 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2563 x = change_address (x, VOIDmode, XEXP (x, 0));
2564
2565 if (GET_CODE (y) == MEM
2566 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2567 || (flag_force_addr
2568 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2569 y = change_address (y, VOIDmode, XEXP (y, 0));
2570
2571 if (mode == BLKmode)
2572 abort ();
2573
2574 return emit_move_insn_1 (x, y);
2575 }
2576
2577 /* Low level part of emit_move_insn.
2578 Called just like emit_move_insn, but assumes X and Y
2579 are basically valid. */
2580
2581 rtx
2582 emit_move_insn_1 (x, y)
2583 rtx x, y;
2584 {
2585 enum machine_mode mode = GET_MODE (x);
2586 enum machine_mode submode;
2587 enum mode_class class = GET_MODE_CLASS (mode);
2588 int i;
2589
2590 if (mode >= MAX_MACHINE_MODE)
2591 abort ();
2592
2593 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2594 return
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2596
2597 /* Expand complex moves by moving real part and imag part, if possible. */
2598 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2599 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2600 * BITS_PER_UNIT),
2601 (class == MODE_COMPLEX_INT
2602 ? MODE_INT : MODE_FLOAT),
2603 0))
2604 && (mov_optab->handlers[(int) submode].insn_code
2605 != CODE_FOR_nothing))
2606 {
2607 /* Don't split destination if it is a stack push. */
2608 int stack = push_operand (x, GET_MODE (x));
2609
2610 /* If this is a stack, push the highpart first, so it
2611 will be in the argument order.
2612
2613 In that case, change_address is used only to convert
2614 the mode, not to change the address. */
2615 if (stack)
2616 {
2617 /* Note that the real part always precedes the imag part in memory
2618 regardless of machine's endianness. */
2619 #ifdef STACK_GROWS_DOWNWARD
2620 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2621 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2622 gen_imagpart (submode, y)));
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 #else
2627 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2628 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2629 gen_realpart (submode, y)));
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_imagpart (submode, y)));
2633 #endif
2634 }
2635 else
2636 {
2637 rtx realpart_x, realpart_y;
2638 rtx imagpart_x, imagpart_y;
2639
2640 /* If this is a complex value with each part being smaller than a
2641 word, the usual calling sequence will likely pack the pieces into
2642 a single register. Unfortunately, SUBREG of hard registers only
2643 deals in terms of words, so we have a problem converting input
2644 arguments to the CONCAT of two registers that is used elsewhere
2645 for complex values. If this is before reload, we can copy it into
2646 memory and reload. FIXME, we should see about using extract and
2647 insert on integer registers, but complex short and complex char
2648 variables should be rarely used. */
2649 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2650 && (reload_in_progress | reload_completed) == 0)
2651 {
2652 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2653 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2654
2655 if (packed_dest_p || packed_src_p)
2656 {
2657 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2658 ? MODE_FLOAT : MODE_INT);
2659
2660 enum machine_mode reg_mode =
2661 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2662
2663 if (reg_mode != BLKmode)
2664 {
2665 rtx mem = assign_stack_temp (reg_mode,
2666 GET_MODE_SIZE (mode), 0);
2667
2668 rtx cmem = change_address (mem, mode, NULL_RTX);
2669
2670 cfun->cannot_inline = "function uses short complex types";
2671
2672 if (packed_dest_p)
2673 {
2674 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2675 emit_move_insn_1 (cmem, y);
2676 return emit_move_insn_1 (sreg, mem);
2677 }
2678 else
2679 {
2680 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2681 emit_move_insn_1 (mem, sreg);
2682 return emit_move_insn_1 (x, cmem);
2683 }
2684 }
2685 }
2686 }
2687
2688 realpart_x = gen_realpart (submode, x);
2689 realpart_y = gen_realpart (submode, y);
2690 imagpart_x = gen_imagpart (submode, x);
2691 imagpart_y = gen_imagpart (submode, y);
2692
2693 /* Show the output dies here. This is necessary for SUBREGs
2694 of pseudos since we cannot track their lifetimes correctly;
2695 hard regs shouldn't appear here except as return values.
2696 We never want to emit such a clobber after reload. */
2697 if (x != y
2698 && ! (reload_in_progress || reload_completed)
2699 && (GET_CODE (realpart_x) == SUBREG
2700 || GET_CODE (imagpart_x) == SUBREG))
2701 {
2702 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2703 }
2704
2705 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2706 (realpart_x, realpart_y));
2707 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2708 (imagpart_x, imagpart_y));
2709 }
2710
2711 return get_last_insn ();
2712 }
2713
2714 /* This will handle any multi-word mode that lacks a move_insn pattern.
2715 However, you will get better code if you define such patterns,
2716 even if they must turn into multiple assembler instructions. */
2717 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2718 {
2719 rtx last_insn = 0;
2720 rtx seq;
2721 int need_clobber;
2722
2723 #ifdef PUSH_ROUNDING
2724
2725 /* If X is a push on the stack, do the push now and replace
2726 X with a reference to the stack pointer. */
2727 if (push_operand (x, GET_MODE (x)))
2728 {
2729 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2730 x = change_address (x, VOIDmode, stack_pointer_rtx);
2731 }
2732 #endif
2733
2734 start_sequence ();
2735
2736 need_clobber = 0;
2737 for (i = 0;
2738 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2739 i++)
2740 {
2741 rtx xpart = operand_subword (x, i, 1, mode);
2742 rtx ypart = operand_subword (y, i, 1, mode);
2743
2744 /* If we can't get a part of Y, put Y into memory if it is a
2745 constant. Otherwise, force it into a register. If we still
2746 can't get a part of Y, abort. */
2747 if (ypart == 0 && CONSTANT_P (y))
2748 {
2749 y = force_const_mem (mode, y);
2750 ypart = operand_subword (y, i, 1, mode);
2751 }
2752 else if (ypart == 0)
2753 ypart = operand_subword_force (y, i, mode);
2754
2755 if (xpart == 0 || ypart == 0)
2756 abort ();
2757
2758 need_clobber |= (GET_CODE (xpart) == SUBREG);
2759
2760 last_insn = emit_move_insn (xpart, ypart);
2761 }
2762
2763 seq = gen_sequence ();
2764 end_sequence ();
2765
2766 /* Show the output dies here. This is necessary for SUBREGs
2767 of pseudos since we cannot track their lifetimes correctly;
2768 hard regs shouldn't appear here except as return values.
2769 We never want to emit such a clobber after reload. */
2770 if (x != y
2771 && ! (reload_in_progress || reload_completed)
2772 && need_clobber != 0)
2773 {
2774 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2775 }
2776
2777 emit_insn (seq);
2778
2779 return last_insn;
2780 }
2781 else
2782 abort ();
2783 }
2784 \f
2785 /* Pushing data onto the stack. */
2786
2787 /* Push a block of length SIZE (perhaps variable)
2788 and return an rtx to address the beginning of the block.
2789 Note that it is not possible for the value returned to be a QUEUED.
2790 The value may be virtual_outgoing_args_rtx.
2791
2792 EXTRA is the number of bytes of padding to push in addition to SIZE.
2793 BELOW nonzero means this padding comes at low addresses;
2794 otherwise, the padding comes at high addresses. */
2795
2796 rtx
2797 push_block (size, extra, below)
2798 rtx size;
2799 int extra, below;
2800 {
2801 register rtx temp;
2802
2803 size = convert_modes (Pmode, ptr_mode, size, 1);
2804 if (CONSTANT_P (size))
2805 anti_adjust_stack (plus_constant (size, extra));
2806 else if (GET_CODE (size) == REG && extra == 0)
2807 anti_adjust_stack (size);
2808 else
2809 {
2810 rtx temp = copy_to_mode_reg (Pmode, size);
2811 if (extra != 0)
2812 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2813 temp, 0, OPTAB_LIB_WIDEN);
2814 anti_adjust_stack (temp);
2815 }
2816
2817 #if defined (STACK_GROWS_DOWNWARD) \
2818 || (defined (ARGS_GROW_DOWNWARD) \
2819 && !defined (ACCUMULATE_OUTGOING_ARGS))
2820
2821 /* Return the lowest stack address when STACK or ARGS grow downward and
2822 we are not aaccumulating outgoing arguments (the c4x port uses such
2823 conventions). */
2824 temp = virtual_outgoing_args_rtx;
2825 if (extra != 0 && below)
2826 temp = plus_constant (temp, extra);
2827 #else
2828 if (GET_CODE (size) == CONST_INT)
2829 temp = plus_constant (virtual_outgoing_args_rtx,
2830 - INTVAL (size) - (below ? 0 : extra));
2831 else if (extra != 0 && !below)
2832 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2833 negate_rtx (Pmode, plus_constant (size, extra)));
2834 else
2835 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2836 negate_rtx (Pmode, size));
2837 #endif
2838
2839 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2840 }
2841
2842 rtx
2843 gen_push_operand ()
2844 {
2845 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2846 }
2847
2848 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2849 block of SIZE bytes. */
2850
2851 static rtx
2852 get_push_address (size)
2853 int size;
2854 {
2855 register rtx temp;
2856
2857 if (STACK_PUSH_CODE == POST_DEC)
2858 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2859 else if (STACK_PUSH_CODE == POST_INC)
2860 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2861 else
2862 temp = stack_pointer_rtx;
2863
2864 return copy_to_reg (temp);
2865 }
2866
2867 /* Generate code to push X onto the stack, assuming it has mode MODE and
2868 type TYPE.
2869 MODE is redundant except when X is a CONST_INT (since they don't
2870 carry mode info).
2871 SIZE is an rtx for the size of data to be copied (in bytes),
2872 needed only if X is BLKmode.
2873
2874 ALIGN (in bytes) is maximum alignment we can assume.
2875
2876 If PARTIAL and REG are both nonzero, then copy that many of the first
2877 words of X into registers starting with REG, and push the rest of X.
2878 The amount of space pushed is decreased by PARTIAL words,
2879 rounded *down* to a multiple of PARM_BOUNDARY.
2880 REG must be a hard register in this case.
2881 If REG is zero but PARTIAL is not, take any all others actions for an
2882 argument partially in registers, but do not actually load any
2883 registers.
2884
2885 EXTRA is the amount in bytes of extra space to leave next to this arg.
2886 This is ignored if an argument block has already been allocated.
2887
2888 On a machine that lacks real push insns, ARGS_ADDR is the address of
2889 the bottom of the argument block for this call. We use indexing off there
2890 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2891 argument block has not been preallocated.
2892
2893 ARGS_SO_FAR is the size of args previously pushed for this call.
2894
2895 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2896 for arguments passed in registers. If nonzero, it will be the number
2897 of bytes required. */
2898
2899 void
2900 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2901 args_addr, args_so_far, reg_parm_stack_space,
2902 alignment_pad)
2903 register rtx x;
2904 enum machine_mode mode;
2905 tree type;
2906 rtx size;
2907 unsigned int align;
2908 int partial;
2909 rtx reg;
2910 int extra;
2911 rtx args_addr;
2912 rtx args_so_far;
2913 int reg_parm_stack_space;
2914 rtx alignment_pad;
2915 {
2916 rtx xinner;
2917 enum direction stack_direction
2918 #ifdef STACK_GROWS_DOWNWARD
2919 = downward;
2920 #else
2921 = upward;
2922 #endif
2923
2924 /* Decide where to pad the argument: `downward' for below,
2925 `upward' for above, or `none' for don't pad it.
2926 Default is below for small data on big-endian machines; else above. */
2927 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2928
2929 /* Invert direction if stack is post-update. */
2930 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2931 if (where_pad != none)
2932 where_pad = (where_pad == downward ? upward : downward);
2933
2934 xinner = x = protect_from_queue (x, 0);
2935
2936 if (mode == BLKmode)
2937 {
2938 /* Copy a block into the stack, entirely or partially. */
2939
2940 register rtx temp;
2941 int used = partial * UNITS_PER_WORD;
2942 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2943 int skip;
2944
2945 if (size == 0)
2946 abort ();
2947
2948 used -= offset;
2949
2950 /* USED is now the # of bytes we need not copy to the stack
2951 because registers will take care of them. */
2952
2953 if (partial != 0)
2954 xinner = change_address (xinner, BLKmode,
2955 plus_constant (XEXP (xinner, 0), used));
2956
2957 /* If the partial register-part of the arg counts in its stack size,
2958 skip the part of stack space corresponding to the registers.
2959 Otherwise, start copying to the beginning of the stack space,
2960 by setting SKIP to 0. */
2961 skip = (reg_parm_stack_space == 0) ? 0 : used;
2962
2963 #ifdef PUSH_ROUNDING
2964 /* Do it with several push insns if that doesn't take lots of insns
2965 and if there is no difficulty with push insns that skip bytes
2966 on the stack for alignment purposes. */
2967 if (args_addr == 0
2968 && GET_CODE (size) == CONST_INT
2969 && skip == 0
2970 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2971 /* Here we avoid the case of a structure whose weak alignment
2972 forces many pushes of a small amount of data,
2973 and such small pushes do rounding that causes trouble. */
2974 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2975 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2976 || PUSH_ROUNDING (align) == align)
2977 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2978 {
2979 /* Push padding now if padding above and stack grows down,
2980 or if padding below and stack grows up.
2981 But if space already allocated, this has already been done. */
2982 if (extra && args_addr == 0
2983 && where_pad != none && where_pad != stack_direction)
2984 anti_adjust_stack (GEN_INT (extra));
2985
2986 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2987 INTVAL (size) - used, align);
2988
2989 if (current_function_check_memory_usage && ! in_check_memory_usage)
2990 {
2991 rtx temp;
2992
2993 in_check_memory_usage = 1;
2994 temp = get_push_address (INTVAL(size) - used);
2995 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2996 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2997 temp, Pmode,
2998 XEXP (xinner, 0), Pmode,
2999 GEN_INT (INTVAL(size) - used),
3000 TYPE_MODE (sizetype));
3001 else
3002 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3003 temp, Pmode,
3004 GEN_INT (INTVAL(size) - used),
3005 TYPE_MODE (sizetype),
3006 GEN_INT (MEMORY_USE_RW),
3007 TYPE_MODE (integer_type_node));
3008 in_check_memory_usage = 0;
3009 }
3010 }
3011 else
3012 #endif /* PUSH_ROUNDING */
3013 {
3014 /* Otherwise make space on the stack and copy the data
3015 to the address of that space. */
3016
3017 /* Deduct words put into registers from the size we must copy. */
3018 if (partial != 0)
3019 {
3020 if (GET_CODE (size) == CONST_INT)
3021 size = GEN_INT (INTVAL (size) - used);
3022 else
3023 size = expand_binop (GET_MODE (size), sub_optab, size,
3024 GEN_INT (used), NULL_RTX, 0,
3025 OPTAB_LIB_WIDEN);
3026 }
3027
3028 /* Get the address of the stack space.
3029 In this case, we do not deal with EXTRA separately.
3030 A single stack adjust will do. */
3031 if (! args_addr)
3032 {
3033 temp = push_block (size, extra, where_pad == downward);
3034 extra = 0;
3035 }
3036 else if (GET_CODE (args_so_far) == CONST_INT)
3037 temp = memory_address (BLKmode,
3038 plus_constant (args_addr,
3039 skip + INTVAL (args_so_far)));
3040 else
3041 temp = memory_address (BLKmode,
3042 plus_constant (gen_rtx_PLUS (Pmode,
3043 args_addr,
3044 args_so_far),
3045 skip));
3046 if (current_function_check_memory_usage && ! in_check_memory_usage)
3047 {
3048 rtx target;
3049
3050 in_check_memory_usage = 1;
3051 target = copy_to_reg (temp);
3052 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3053 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3054 target, Pmode,
3055 XEXP (xinner, 0), Pmode,
3056 size, TYPE_MODE (sizetype));
3057 else
3058 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3059 target, Pmode,
3060 size, TYPE_MODE (sizetype),
3061 GEN_INT (MEMORY_USE_RW),
3062 TYPE_MODE (integer_type_node));
3063 in_check_memory_usage = 0;
3064 }
3065
3066 /* TEMP is the address of the block. Copy the data there. */
3067 if (GET_CODE (size) == CONST_INT
3068 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3069 {
3070 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3071 INTVAL (size), align);
3072 goto ret;
3073 }
3074 else
3075 {
3076 rtx opalign = GEN_INT (align);
3077 enum machine_mode mode;
3078 rtx target = gen_rtx_MEM (BLKmode, temp);
3079
3080 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3081 mode != VOIDmode;
3082 mode = GET_MODE_WIDER_MODE (mode))
3083 {
3084 enum insn_code code = movstr_optab[(int) mode];
3085 insn_operand_predicate_fn pred;
3086
3087 if (code != CODE_FOR_nothing
3088 && ((GET_CODE (size) == CONST_INT
3089 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3090 <= (GET_MODE_MASK (mode) >> 1)))
3091 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3092 && (!(pred = insn_data[(int) code].operand[0].predicate)
3093 || ((*pred) (target, BLKmode)))
3094 && (!(pred = insn_data[(int) code].operand[1].predicate)
3095 || ((*pred) (xinner, BLKmode)))
3096 && (!(pred = insn_data[(int) code].operand[3].predicate)
3097 || ((*pred) (opalign, VOIDmode))))
3098 {
3099 rtx op2 = convert_to_mode (mode, size, 1);
3100 rtx last = get_last_insn ();
3101 rtx pat;
3102
3103 pred = insn_data[(int) code].operand[2].predicate;
3104 if (pred != 0 && ! (*pred) (op2, mode))
3105 op2 = copy_to_mode_reg (mode, op2);
3106
3107 pat = GEN_FCN ((int) code) (target, xinner,
3108 op2, opalign);
3109 if (pat)
3110 {
3111 emit_insn (pat);
3112 goto ret;
3113 }
3114 else
3115 delete_insns_since (last);
3116 }
3117 }
3118 }
3119
3120 #ifndef ACCUMULATE_OUTGOING_ARGS
3121 /* If the source is referenced relative to the stack pointer,
3122 copy it to another register to stabilize it. We do not need
3123 to do this if we know that we won't be changing sp. */
3124
3125 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3126 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3127 temp = copy_to_reg (temp);
3128 #endif
3129
3130 /* Make inhibit_defer_pop nonzero around the library call
3131 to force it to pop the bcopy-arguments right away. */
3132 NO_DEFER_POP;
3133 #ifdef TARGET_MEM_FUNCTIONS
3134 emit_library_call (memcpy_libfunc, 0,
3135 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3136 convert_to_mode (TYPE_MODE (sizetype),
3137 size, TREE_UNSIGNED (sizetype)),
3138 TYPE_MODE (sizetype));
3139 #else
3140 emit_library_call (bcopy_libfunc, 0,
3141 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3142 convert_to_mode (TYPE_MODE (integer_type_node),
3143 size,
3144 TREE_UNSIGNED (integer_type_node)),
3145 TYPE_MODE (integer_type_node));
3146 #endif
3147 OK_DEFER_POP;
3148 }
3149 }
3150 else if (partial > 0)
3151 {
3152 /* Scalar partly in registers. */
3153
3154 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3155 int i;
3156 int not_stack;
3157 /* # words of start of argument
3158 that we must make space for but need not store. */
3159 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3160 int args_offset = INTVAL (args_so_far);
3161 int skip;
3162
3163 /* Push padding now if padding above and stack grows down,
3164 or if padding below and stack grows up.
3165 But if space already allocated, this has already been done. */
3166 if (extra && args_addr == 0
3167 && where_pad != none && where_pad != stack_direction)
3168 anti_adjust_stack (GEN_INT (extra));
3169
3170 /* If we make space by pushing it, we might as well push
3171 the real data. Otherwise, we can leave OFFSET nonzero
3172 and leave the space uninitialized. */
3173 if (args_addr == 0)
3174 offset = 0;
3175
3176 /* Now NOT_STACK gets the number of words that we don't need to
3177 allocate on the stack. */
3178 not_stack = partial - offset;
3179
3180 /* If the partial register-part of the arg counts in its stack size,
3181 skip the part of stack space corresponding to the registers.
3182 Otherwise, start copying to the beginning of the stack space,
3183 by setting SKIP to 0. */
3184 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3185
3186 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3187 x = validize_mem (force_const_mem (mode, x));
3188
3189 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3190 SUBREGs of such registers are not allowed. */
3191 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3192 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3193 x = copy_to_reg (x);
3194
3195 /* Loop over all the words allocated on the stack for this arg. */
3196 /* We can do it by words, because any scalar bigger than a word
3197 has a size a multiple of a word. */
3198 #ifndef PUSH_ARGS_REVERSED
3199 for (i = not_stack; i < size; i++)
3200 #else
3201 for (i = size - 1; i >= not_stack; i--)
3202 #endif
3203 if (i >= not_stack + offset)
3204 emit_push_insn (operand_subword_force (x, i, mode),
3205 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3206 0, args_addr,
3207 GEN_INT (args_offset + ((i - not_stack + skip)
3208 * UNITS_PER_WORD)),
3209 reg_parm_stack_space, alignment_pad);
3210 }
3211 else
3212 {
3213 rtx addr;
3214 rtx target = NULL_RTX;
3215
3216 /* Push padding now if padding above and stack grows down,
3217 or if padding below and stack grows up.
3218 But if space already allocated, this has already been done. */
3219 if (extra && args_addr == 0
3220 && where_pad != none && where_pad != stack_direction)
3221 anti_adjust_stack (GEN_INT (extra));
3222
3223 #ifdef PUSH_ROUNDING
3224 if (args_addr == 0)
3225 addr = gen_push_operand ();
3226 else
3227 #endif
3228 {
3229 if (GET_CODE (args_so_far) == CONST_INT)
3230 addr
3231 = memory_address (mode,
3232 plus_constant (args_addr,
3233 INTVAL (args_so_far)));
3234 else
3235 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3236 args_so_far));
3237 target = addr;
3238 }
3239
3240 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3241
3242 if (current_function_check_memory_usage && ! in_check_memory_usage)
3243 {
3244 in_check_memory_usage = 1;
3245 if (target == 0)
3246 target = get_push_address (GET_MODE_SIZE (mode));
3247
3248 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3249 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3250 target, Pmode,
3251 XEXP (x, 0), Pmode,
3252 GEN_INT (GET_MODE_SIZE (mode)),
3253 TYPE_MODE (sizetype));
3254 else
3255 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3256 target, Pmode,
3257 GEN_INT (GET_MODE_SIZE (mode)),
3258 TYPE_MODE (sizetype),
3259 GEN_INT (MEMORY_USE_RW),
3260 TYPE_MODE (integer_type_node));
3261 in_check_memory_usage = 0;
3262 }
3263 }
3264
3265 ret:
3266 /* If part should go in registers, copy that part
3267 into the appropriate registers. Do this now, at the end,
3268 since mem-to-mem copies above may do function calls. */
3269 if (partial > 0 && reg != 0)
3270 {
3271 /* Handle calls that pass values in multiple non-contiguous locations.
3272 The Irix 6 ABI has examples of this. */
3273 if (GET_CODE (reg) == PARALLEL)
3274 emit_group_load (reg, x, -1, align); /* ??? size? */
3275 else
3276 move_block_to_reg (REGNO (reg), x, partial, mode);
3277 }
3278
3279 if (extra && args_addr == 0 && where_pad == stack_direction)
3280 anti_adjust_stack (GEN_INT (extra));
3281
3282 if (alignment_pad)
3283 anti_adjust_stack (alignment_pad);
3284 }
3285 \f
3286 /* Expand an assignment that stores the value of FROM into TO.
3287 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3288 (This may contain a QUEUED rtx;
3289 if the value is constant, this rtx is a constant.)
3290 Otherwise, the returned value is NULL_RTX.
3291
3292 SUGGEST_REG is no longer actually used.
3293 It used to mean, copy the value through a register
3294 and return that register, if that is possible.
3295 We now use WANT_VALUE to decide whether to do this. */
3296
3297 rtx
3298 expand_assignment (to, from, want_value, suggest_reg)
3299 tree to, from;
3300 int want_value;
3301 int suggest_reg ATTRIBUTE_UNUSED;
3302 {
3303 register rtx to_rtx = 0;
3304 rtx result;
3305
3306 /* Don't crash if the lhs of the assignment was erroneous. */
3307
3308 if (TREE_CODE (to) == ERROR_MARK)
3309 {
3310 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3311 return want_value ? result : NULL_RTX;
3312 }
3313
3314 /* Assignment of a structure component needs special treatment
3315 if the structure component's rtx is not simply a MEM.
3316 Assignment of an array element at a constant index, and assignment of
3317 an array element in an unaligned packed structure field, has the same
3318 problem. */
3319
3320 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3321 || TREE_CODE (to) == ARRAY_REF)
3322 {
3323 enum machine_mode mode1;
3324 int bitsize;
3325 int bitpos;
3326 tree offset;
3327 int unsignedp;
3328 int volatilep = 0;
3329 tree tem;
3330 unsigned int alignment;
3331
3332 push_temp_slots ();
3333 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3334 &unsignedp, &volatilep, &alignment);
3335
3336 /* If we are going to use store_bit_field and extract_bit_field,
3337 make sure to_rtx will be safe for multiple use. */
3338
3339 if (mode1 == VOIDmode && want_value)
3340 tem = stabilize_reference (tem);
3341
3342 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3343 if (offset != 0)
3344 {
3345 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3346
3347 if (GET_CODE (to_rtx) != MEM)
3348 abort ();
3349
3350 if (GET_MODE (offset_rtx) != ptr_mode)
3351 {
3352 #ifdef POINTERS_EXTEND_UNSIGNED
3353 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3354 #else
3355 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3356 #endif
3357 }
3358
3359 /* A constant address in TO_RTX can have VOIDmode, we must not try
3360 to call force_reg for that case. Avoid that case. */
3361 if (GET_CODE (to_rtx) == MEM
3362 && GET_MODE (to_rtx) == BLKmode
3363 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3364 && bitsize
3365 && (bitpos % bitsize) == 0
3366 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3367 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3368 {
3369 rtx temp = change_address (to_rtx, mode1,
3370 plus_constant (XEXP (to_rtx, 0),
3371 (bitpos /
3372 BITS_PER_UNIT)));
3373 if (GET_CODE (XEXP (temp, 0)) == REG)
3374 to_rtx = temp;
3375 else
3376 to_rtx = change_address (to_rtx, mode1,
3377 force_reg (GET_MODE (XEXP (temp, 0)),
3378 XEXP (temp, 0)));
3379 bitpos = 0;
3380 }
3381
3382 to_rtx = change_address (to_rtx, VOIDmode,
3383 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3384 force_reg (ptr_mode,
3385 offset_rtx)));
3386 }
3387
3388 if (volatilep)
3389 {
3390 if (GET_CODE (to_rtx) == MEM)
3391 {
3392 /* When the offset is zero, to_rtx is the address of the
3393 structure we are storing into, and hence may be shared.
3394 We must make a new MEM before setting the volatile bit. */
3395 if (offset == 0)
3396 to_rtx = copy_rtx (to_rtx);
3397
3398 MEM_VOLATILE_P (to_rtx) = 1;
3399 }
3400 #if 0 /* This was turned off because, when a field is volatile
3401 in an object which is not volatile, the object may be in a register,
3402 and then we would abort over here. */
3403 else
3404 abort ();
3405 #endif
3406 }
3407
3408 if (TREE_CODE (to) == COMPONENT_REF
3409 && TREE_READONLY (TREE_OPERAND (to, 1)))
3410 {
3411 if (offset == 0)
3412 to_rtx = copy_rtx (to_rtx);
3413
3414 RTX_UNCHANGING_P (to_rtx) = 1;
3415 }
3416
3417 /* Check the access. */
3418 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3419 {
3420 rtx to_addr;
3421 int size;
3422 int best_mode_size;
3423 enum machine_mode best_mode;
3424
3425 best_mode = get_best_mode (bitsize, bitpos,
3426 TYPE_ALIGN (TREE_TYPE (tem)),
3427 mode1, volatilep);
3428 if (best_mode == VOIDmode)
3429 best_mode = QImode;
3430
3431 best_mode_size = GET_MODE_BITSIZE (best_mode);
3432 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3433 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3434 size *= GET_MODE_SIZE (best_mode);
3435
3436 /* Check the access right of the pointer. */
3437 if (size)
3438 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3439 to_addr, Pmode,
3440 GEN_INT (size), TYPE_MODE (sizetype),
3441 GEN_INT (MEMORY_USE_WO),
3442 TYPE_MODE (integer_type_node));
3443 }
3444
3445 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3446 (want_value
3447 /* Spurious cast makes HPUX compiler happy. */
3448 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3449 : VOIDmode),
3450 unsignedp,
3451 /* Required alignment of containing datum. */
3452 alignment,
3453 int_size_in_bytes (TREE_TYPE (tem)),
3454 get_alias_set (to));
3455 preserve_temp_slots (result);
3456 free_temp_slots ();
3457 pop_temp_slots ();
3458
3459 /* If the value is meaningful, convert RESULT to the proper mode.
3460 Otherwise, return nothing. */
3461 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3462 TYPE_MODE (TREE_TYPE (from)),
3463 result,
3464 TREE_UNSIGNED (TREE_TYPE (to)))
3465 : NULL_RTX);
3466 }
3467
3468 /* If the rhs is a function call and its value is not an aggregate,
3469 call the function before we start to compute the lhs.
3470 This is needed for correct code for cases such as
3471 val = setjmp (buf) on machines where reference to val
3472 requires loading up part of an address in a separate insn.
3473
3474 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3475 a promoted variable where the zero- or sign- extension needs to be done.
3476 Handling this in the normal way is safe because no computation is done
3477 before the call. */
3478 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3479 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3480 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3481 {
3482 rtx value;
3483
3484 push_temp_slots ();
3485 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3486 if (to_rtx == 0)
3487 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3488
3489 /* Handle calls that return values in multiple non-contiguous locations.
3490 The Irix 6 ABI has examples of this. */
3491 if (GET_CODE (to_rtx) == PARALLEL)
3492 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3493 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3494 else if (GET_MODE (to_rtx) == BLKmode)
3495 emit_block_move (to_rtx, value, expr_size (from),
3496 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3497 else
3498 {
3499 #ifdef POINTERS_EXTEND_UNSIGNED
3500 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3501 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3502 value = convert_memory_address (GET_MODE (to_rtx), value);
3503 #endif
3504 emit_move_insn (to_rtx, value);
3505 }
3506 preserve_temp_slots (to_rtx);
3507 free_temp_slots ();
3508 pop_temp_slots ();
3509 return want_value ? to_rtx : NULL_RTX;
3510 }
3511
3512 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3513 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3514
3515 if (to_rtx == 0)
3516 {
3517 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3518 if (GET_CODE (to_rtx) == MEM)
3519 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3520 }
3521
3522 /* Don't move directly into a return register. */
3523 if (TREE_CODE (to) == RESULT_DECL
3524 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3525 {
3526 rtx temp;
3527
3528 push_temp_slots ();
3529 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3530
3531 if (GET_CODE (to_rtx) == PARALLEL)
3532 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3533 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3534 else
3535 emit_move_insn (to_rtx, temp);
3536
3537 preserve_temp_slots (to_rtx);
3538 free_temp_slots ();
3539 pop_temp_slots ();
3540 return want_value ? to_rtx : NULL_RTX;
3541 }
3542
3543 /* In case we are returning the contents of an object which overlaps
3544 the place the value is being stored, use a safe function when copying
3545 a value through a pointer into a structure value return block. */
3546 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3547 && current_function_returns_struct
3548 && !current_function_returns_pcc_struct)
3549 {
3550 rtx from_rtx, size;
3551
3552 push_temp_slots ();
3553 size = expr_size (from);
3554 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3555 EXPAND_MEMORY_USE_DONT);
3556
3557 /* Copy the rights of the bitmap. */
3558 if (current_function_check_memory_usage)
3559 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3560 XEXP (to_rtx, 0), Pmode,
3561 XEXP (from_rtx, 0), Pmode,
3562 convert_to_mode (TYPE_MODE (sizetype),
3563 size, TREE_UNSIGNED (sizetype)),
3564 TYPE_MODE (sizetype));
3565
3566 #ifdef TARGET_MEM_FUNCTIONS
3567 emit_library_call (memcpy_libfunc, 0,
3568 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3569 XEXP (from_rtx, 0), Pmode,
3570 convert_to_mode (TYPE_MODE (sizetype),
3571 size, TREE_UNSIGNED (sizetype)),
3572 TYPE_MODE (sizetype));
3573 #else
3574 emit_library_call (bcopy_libfunc, 0,
3575 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3576 XEXP (to_rtx, 0), Pmode,
3577 convert_to_mode (TYPE_MODE (integer_type_node),
3578 size, TREE_UNSIGNED (integer_type_node)),
3579 TYPE_MODE (integer_type_node));
3580 #endif
3581
3582 preserve_temp_slots (to_rtx);
3583 free_temp_slots ();
3584 pop_temp_slots ();
3585 return want_value ? to_rtx : NULL_RTX;
3586 }
3587
3588 /* Compute FROM and store the value in the rtx we got. */
3589
3590 push_temp_slots ();
3591 result = store_expr (from, to_rtx, want_value);
3592 preserve_temp_slots (result);
3593 free_temp_slots ();
3594 pop_temp_slots ();
3595 return want_value ? result : NULL_RTX;
3596 }
3597
3598 /* Generate code for computing expression EXP,
3599 and storing the value into TARGET.
3600 TARGET may contain a QUEUED rtx.
3601
3602 If WANT_VALUE is nonzero, return a copy of the value
3603 not in TARGET, so that we can be sure to use the proper
3604 value in a containing expression even if TARGET has something
3605 else stored in it. If possible, we copy the value through a pseudo
3606 and return that pseudo. Or, if the value is constant, we try to
3607 return the constant. In some cases, we return a pseudo
3608 copied *from* TARGET.
3609
3610 If the mode is BLKmode then we may return TARGET itself.
3611 It turns out that in BLKmode it doesn't cause a problem.
3612 because C has no operators that could combine two different
3613 assignments into the same BLKmode object with different values
3614 with no sequence point. Will other languages need this to
3615 be more thorough?
3616
3617 If WANT_VALUE is 0, we return NULL, to make sure
3618 to catch quickly any cases where the caller uses the value
3619 and fails to set WANT_VALUE. */
3620
3621 rtx
3622 store_expr (exp, target, want_value)
3623 register tree exp;
3624 register rtx target;
3625 int want_value;
3626 {
3627 register rtx temp;
3628 int dont_return_target = 0;
3629
3630 if (TREE_CODE (exp) == COMPOUND_EXPR)
3631 {
3632 /* Perform first part of compound expression, then assign from second
3633 part. */
3634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3635 emit_queue ();
3636 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3637 }
3638 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3639 {
3640 /* For conditional expression, get safe form of the target. Then
3641 test the condition, doing the appropriate assignment on either
3642 side. This avoids the creation of unnecessary temporaries.
3643 For non-BLKmode, it is more efficient not to do this. */
3644
3645 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3646
3647 emit_queue ();
3648 target = protect_from_queue (target, 1);
3649
3650 do_pending_stack_adjust ();
3651 NO_DEFER_POP;
3652 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3653 start_cleanup_deferral ();
3654 store_expr (TREE_OPERAND (exp, 1), target, 0);
3655 end_cleanup_deferral ();
3656 emit_queue ();
3657 emit_jump_insn (gen_jump (lab2));
3658 emit_barrier ();
3659 emit_label (lab1);
3660 start_cleanup_deferral ();
3661 store_expr (TREE_OPERAND (exp, 2), target, 0);
3662 end_cleanup_deferral ();
3663 emit_queue ();
3664 emit_label (lab2);
3665 OK_DEFER_POP;
3666
3667 return want_value ? target : NULL_RTX;
3668 }
3669 else if (queued_subexp_p (target))
3670 /* If target contains a postincrement, let's not risk
3671 using it as the place to generate the rhs. */
3672 {
3673 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3674 {
3675 /* Expand EXP into a new pseudo. */
3676 temp = gen_reg_rtx (GET_MODE (target));
3677 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3678 }
3679 else
3680 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3681
3682 /* If target is volatile, ANSI requires accessing the value
3683 *from* the target, if it is accessed. So make that happen.
3684 In no case return the target itself. */
3685 if (! MEM_VOLATILE_P (target) && want_value)
3686 dont_return_target = 1;
3687 }
3688 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3689 && GET_MODE (target) != BLKmode)
3690 /* If target is in memory and caller wants value in a register instead,
3691 arrange that. Pass TARGET as target for expand_expr so that,
3692 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3693 We know expand_expr will not use the target in that case.
3694 Don't do this if TARGET is volatile because we are supposed
3695 to write it and then read it. */
3696 {
3697 temp = expand_expr (exp, target, GET_MODE (target), 0);
3698 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3699 temp = copy_to_reg (temp);
3700 dont_return_target = 1;
3701 }
3702 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3703 /* If this is an scalar in a register that is stored in a wider mode
3704 than the declared mode, compute the result into its declared mode
3705 and then convert to the wider mode. Our value is the computed
3706 expression. */
3707 {
3708 /* If we don't want a value, we can do the conversion inside EXP,
3709 which will often result in some optimizations. Do the conversion
3710 in two steps: first change the signedness, if needed, then
3711 the extend. But don't do this if the type of EXP is a subtype
3712 of something else since then the conversion might involve
3713 more than just converting modes. */
3714 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3715 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3716 {
3717 if (TREE_UNSIGNED (TREE_TYPE (exp))
3718 != SUBREG_PROMOTED_UNSIGNED_P (target))
3719 exp
3720 = convert
3721 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3722 TREE_TYPE (exp)),
3723 exp);
3724
3725 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3726 SUBREG_PROMOTED_UNSIGNED_P (target)),
3727 exp);
3728 }
3729
3730 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3731
3732 /* If TEMP is a volatile MEM and we want a result value, make
3733 the access now so it gets done only once. Likewise if
3734 it contains TARGET. */
3735 if (GET_CODE (temp) == MEM && want_value
3736 && (MEM_VOLATILE_P (temp)
3737 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3738 temp = copy_to_reg (temp);
3739
3740 /* If TEMP is a VOIDmode constant, use convert_modes to make
3741 sure that we properly convert it. */
3742 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3743 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3744 TYPE_MODE (TREE_TYPE (exp)), temp,
3745 SUBREG_PROMOTED_UNSIGNED_P (target));
3746
3747 convert_move (SUBREG_REG (target), temp,
3748 SUBREG_PROMOTED_UNSIGNED_P (target));
3749
3750 /* If we promoted a constant, change the mode back down to match
3751 target. Otherwise, the caller might get confused by a result whose
3752 mode is larger than expected. */
3753
3754 if (want_value && GET_MODE (temp) != GET_MODE (target)
3755 && GET_MODE (temp) != VOIDmode)
3756 {
3757 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3758 SUBREG_PROMOTED_VAR_P (temp) = 1;
3759 SUBREG_PROMOTED_UNSIGNED_P (temp)
3760 = SUBREG_PROMOTED_UNSIGNED_P (target);
3761 }
3762
3763 return want_value ? temp : NULL_RTX;
3764 }
3765 else
3766 {
3767 temp = expand_expr (exp, target, GET_MODE (target), 0);
3768 /* Return TARGET if it's a specified hardware register.
3769 If TARGET is a volatile mem ref, either return TARGET
3770 or return a reg copied *from* TARGET; ANSI requires this.
3771
3772 Otherwise, if TEMP is not TARGET, return TEMP
3773 if it is constant (for efficiency),
3774 or if we really want the correct value. */
3775 if (!(target && GET_CODE (target) == REG
3776 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3777 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3778 && ! rtx_equal_p (temp, target)
3779 && (CONSTANT_P (temp) || want_value))
3780 dont_return_target = 1;
3781 }
3782
3783 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3784 the same as that of TARGET, adjust the constant. This is needed, for
3785 example, in case it is a CONST_DOUBLE and we want only a word-sized
3786 value. */
3787 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3788 && TREE_CODE (exp) != ERROR_MARK
3789 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3790 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3791 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3792
3793 if (current_function_check_memory_usage
3794 && GET_CODE (target) == MEM
3795 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3796 {
3797 if (GET_CODE (temp) == MEM)
3798 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3799 XEXP (target, 0), Pmode,
3800 XEXP (temp, 0), Pmode,
3801 expr_size (exp), TYPE_MODE (sizetype));
3802 else
3803 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3804 XEXP (target, 0), Pmode,
3805 expr_size (exp), TYPE_MODE (sizetype),
3806 GEN_INT (MEMORY_USE_WO),
3807 TYPE_MODE (integer_type_node));
3808 }
3809
3810 /* If value was not generated in the target, store it there.
3811 Convert the value to TARGET's type first if nec. */
3812 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3813 one or both of them are volatile memory refs, we have to distinguish
3814 two cases:
3815 - expand_expr has used TARGET. In this case, we must not generate
3816 another copy. This can be detected by TARGET being equal according
3817 to == .
3818 - expand_expr has not used TARGET - that means that the source just
3819 happens to have the same RTX form. Since temp will have been created
3820 by expand_expr, it will compare unequal according to == .
3821 We must generate a copy in this case, to reach the correct number
3822 of volatile memory references. */
3823
3824 if ((! rtx_equal_p (temp, target)
3825 || (temp != target && (side_effects_p (temp)
3826 || side_effects_p (target))))
3827 && TREE_CODE (exp) != ERROR_MARK)
3828 {
3829 target = protect_from_queue (target, 1);
3830 if (GET_MODE (temp) != GET_MODE (target)
3831 && GET_MODE (temp) != VOIDmode)
3832 {
3833 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3834 if (dont_return_target)
3835 {
3836 /* In this case, we will return TEMP,
3837 so make sure it has the proper mode.
3838 But don't forget to store the value into TARGET. */
3839 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3840 emit_move_insn (target, temp);
3841 }
3842 else
3843 convert_move (target, temp, unsignedp);
3844 }
3845
3846 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3847 {
3848 /* Handle copying a string constant into an array.
3849 The string constant may be shorter than the array.
3850 So copy just the string's actual length, and clear the rest. */
3851 rtx size;
3852 rtx addr;
3853
3854 /* Get the size of the data type of the string,
3855 which is actually the size of the target. */
3856 size = expr_size (exp);
3857 if (GET_CODE (size) == CONST_INT
3858 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3859 emit_block_move (target, temp, size,
3860 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3861 else
3862 {
3863 /* Compute the size of the data to copy from the string. */
3864 tree copy_size
3865 = size_binop (MIN_EXPR,
3866 make_tree (sizetype, size),
3867 size_int (TREE_STRING_LENGTH (exp)));
3868 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3869 VOIDmode, 0);
3870 rtx label = 0;
3871
3872 /* Copy that much. */
3873 emit_block_move (target, temp, copy_size_rtx,
3874 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3875
3876 /* Figure out how much is left in TARGET that we have to clear.
3877 Do all calculations in ptr_mode. */
3878
3879 addr = XEXP (target, 0);
3880 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3881
3882 if (GET_CODE (copy_size_rtx) == CONST_INT)
3883 {
3884 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3885 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3886 }
3887 else
3888 {
3889 addr = force_reg (ptr_mode, addr);
3890 addr = expand_binop (ptr_mode, add_optab, addr,
3891 copy_size_rtx, NULL_RTX, 0,
3892 OPTAB_LIB_WIDEN);
3893
3894 size = expand_binop (ptr_mode, sub_optab, size,
3895 copy_size_rtx, NULL_RTX, 0,
3896 OPTAB_LIB_WIDEN);
3897
3898 label = gen_label_rtx ();
3899 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3900 GET_MODE (size), 0, 0, label);
3901 }
3902
3903 if (size != const0_rtx)
3904 {
3905 /* Be sure we can write on ADDR. */
3906 if (current_function_check_memory_usage)
3907 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3908 addr, Pmode,
3909 size, TYPE_MODE (sizetype),
3910 GEN_INT (MEMORY_USE_WO),
3911 TYPE_MODE (integer_type_node));
3912 #ifdef TARGET_MEM_FUNCTIONS
3913 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3914 addr, ptr_mode,
3915 const0_rtx, TYPE_MODE (integer_type_node),
3916 convert_to_mode (TYPE_MODE (sizetype),
3917 size,
3918 TREE_UNSIGNED (sizetype)),
3919 TYPE_MODE (sizetype));
3920 #else
3921 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3922 addr, ptr_mode,
3923 convert_to_mode (TYPE_MODE (integer_type_node),
3924 size,
3925 TREE_UNSIGNED (integer_type_node)),
3926 TYPE_MODE (integer_type_node));
3927 #endif
3928 }
3929
3930 if (label)
3931 emit_label (label);
3932 }
3933 }
3934 /* Handle calls that return values in multiple non-contiguous locations.
3935 The Irix 6 ABI has examples of this. */
3936 else if (GET_CODE (target) == PARALLEL)
3937 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3938 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3939 else if (GET_MODE (temp) == BLKmode)
3940 emit_block_move (target, temp, expr_size (exp),
3941 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3942 else
3943 emit_move_insn (target, temp);
3944 }
3945
3946 /* If we don't want a value, return NULL_RTX. */
3947 if (! want_value)
3948 return NULL_RTX;
3949
3950 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3951 ??? The latter test doesn't seem to make sense. */
3952 else if (dont_return_target && GET_CODE (temp) != MEM)
3953 return temp;
3954
3955 /* Return TARGET itself if it is a hard register. */
3956 else if (want_value && GET_MODE (target) != BLKmode
3957 && ! (GET_CODE (target) == REG
3958 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3959 return copy_to_reg (target);
3960
3961 else
3962 return target;
3963 }
3964 \f
3965 /* Return 1 if EXP just contains zeros. */
3966
3967 static int
3968 is_zeros_p (exp)
3969 tree exp;
3970 {
3971 tree elt;
3972
3973 switch (TREE_CODE (exp))
3974 {
3975 case CONVERT_EXPR:
3976 case NOP_EXPR:
3977 case NON_LVALUE_EXPR:
3978 return is_zeros_p (TREE_OPERAND (exp, 0));
3979
3980 case INTEGER_CST:
3981 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3982
3983 case COMPLEX_CST:
3984 return
3985 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3986
3987 case REAL_CST:
3988 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3989
3990 case CONSTRUCTOR:
3991 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3992 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3993 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3994 if (! is_zeros_p (TREE_VALUE (elt)))
3995 return 0;
3996
3997 return 1;
3998
3999 default:
4000 return 0;
4001 }
4002 }
4003
4004 /* Return 1 if EXP contains mostly (3/4) zeros. */
4005
4006 static int
4007 mostly_zeros_p (exp)
4008 tree exp;
4009 {
4010 if (TREE_CODE (exp) == CONSTRUCTOR)
4011 {
4012 int elts = 0, zeros = 0;
4013 tree elt = CONSTRUCTOR_ELTS (exp);
4014 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4015 {
4016 /* If there are no ranges of true bits, it is all zero. */
4017 return elt == NULL_TREE;
4018 }
4019 for (; elt; elt = TREE_CHAIN (elt))
4020 {
4021 /* We do not handle the case where the index is a RANGE_EXPR,
4022 so the statistic will be somewhat inaccurate.
4023 We do make a more accurate count in store_constructor itself,
4024 so since this function is only used for nested array elements,
4025 this should be close enough. */
4026 if (mostly_zeros_p (TREE_VALUE (elt)))
4027 zeros++;
4028 elts++;
4029 }
4030
4031 return 4 * zeros >= 3 * elts;
4032 }
4033
4034 return is_zeros_p (exp);
4035 }
4036 \f
4037 /* Helper function for store_constructor.
4038 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4039 TYPE is the type of the CONSTRUCTOR, not the element type.
4040 ALIGN and CLEARED are as for store_constructor.
4041
4042 This provides a recursive shortcut back to store_constructor when it isn't
4043 necessary to go through store_field. This is so that we can pass through
4044 the cleared field to let store_constructor know that we may not have to
4045 clear a substructure if the outer structure has already been cleared. */
4046
4047 static void
4048 store_constructor_field (target, bitsize, bitpos,
4049 mode, exp, type, align, cleared)
4050 rtx target;
4051 int bitsize, bitpos;
4052 enum machine_mode mode;
4053 tree exp, type;
4054 unsigned int align;
4055 int cleared;
4056 {
4057 if (TREE_CODE (exp) == CONSTRUCTOR
4058 && bitpos % BITS_PER_UNIT == 0
4059 /* If we have a non-zero bitpos for a register target, then we just
4060 let store_field do the bitfield handling. This is unlikely to
4061 generate unnecessary clear instructions anyways. */
4062 && (bitpos == 0 || GET_CODE (target) == MEM))
4063 {
4064 if (bitpos != 0)
4065 target
4066 = change_address (target,
4067 GET_MODE (target) == BLKmode
4068 || 0 != (bitpos
4069 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4070 ? BLKmode : VOIDmode,
4071 plus_constant (XEXP (target, 0),
4072 bitpos / BITS_PER_UNIT));
4073 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4074 }
4075 else
4076 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4077 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4078 int_size_in_bytes (type), 0);
4079 }
4080
4081 /* Store the value of constructor EXP into the rtx TARGET.
4082 TARGET is either a REG or a MEM.
4083 ALIGN is the maximum known alignment for TARGET, in bits.
4084 CLEARED is true if TARGET is known to have been zero'd.
4085 SIZE is the number of bytes of TARGET we are allowed to modify: this
4086 may not be the same as the size of EXP if we are assigning to a field
4087 which has been packed to exclude padding bits. */
4088
4089 static void
4090 store_constructor (exp, target, align, cleared, size)
4091 tree exp;
4092 rtx target;
4093 unsigned int align;
4094 int cleared;
4095 int size;
4096 {
4097 tree type = TREE_TYPE (exp);
4098 #ifdef WORD_REGISTER_OPERATIONS
4099 rtx exp_size = expr_size (exp);
4100 #endif
4101
4102 /* We know our target cannot conflict, since safe_from_p has been called. */
4103 #if 0
4104 /* Don't try copying piece by piece into a hard register
4105 since that is vulnerable to being clobbered by EXP.
4106 Instead, construct in a pseudo register and then copy it all. */
4107 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4108 {
4109 rtx temp = gen_reg_rtx (GET_MODE (target));
4110 store_constructor (exp, temp, align, cleared, size);
4111 emit_move_insn (target, temp);
4112 return;
4113 }
4114 #endif
4115
4116 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4117 || TREE_CODE (type) == QUAL_UNION_TYPE)
4118 {
4119 register tree elt;
4120
4121 /* Inform later passes that the whole union value is dead. */
4122 if ((TREE_CODE (type) == UNION_TYPE
4123 || TREE_CODE (type) == QUAL_UNION_TYPE)
4124 && ! cleared)
4125 {
4126 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4127
4128 /* If the constructor is empty, clear the union. */
4129 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4130 clear_storage (target, expr_size (exp),
4131 TYPE_ALIGN (type) / BITS_PER_UNIT);
4132 }
4133
4134 /* If we are building a static constructor into a register,
4135 set the initial value as zero so we can fold the value into
4136 a constant. But if more than one register is involved,
4137 this probably loses. */
4138 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4139 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4140 {
4141 if (! cleared)
4142 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4143
4144 cleared = 1;
4145 }
4146
4147 /* If the constructor has fewer fields than the structure
4148 or if we are initializing the structure to mostly zeros,
4149 clear the whole structure first. */
4150 else if (size > 0
4151 && ((list_length (CONSTRUCTOR_ELTS (exp))
4152 != list_length (TYPE_FIELDS (type)))
4153 || mostly_zeros_p (exp)))
4154 {
4155 if (! cleared)
4156 clear_storage (target, GEN_INT (size),
4157 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4158
4159 cleared = 1;
4160 }
4161 else if (! cleared)
4162 /* Inform later passes that the old value is dead. */
4163 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4164
4165 /* Store each element of the constructor into
4166 the corresponding field of TARGET. */
4167
4168 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4169 {
4170 register tree field = TREE_PURPOSE (elt);
4171 #ifdef WORD_REGISTER_OPERATIONS
4172 tree value = TREE_VALUE (elt);
4173 #endif
4174 register enum machine_mode mode;
4175 int bitsize;
4176 int bitpos = 0;
4177 int unsignedp;
4178 tree pos, constant = 0, offset = 0;
4179 rtx to_rtx = target;
4180
4181 /* Just ignore missing fields.
4182 We cleared the whole structure, above,
4183 if any fields are missing. */
4184 if (field == 0)
4185 continue;
4186
4187 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4188 continue;
4189
4190 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4191 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4192 else
4193 bitsize = -1;
4194
4195 unsignedp = TREE_UNSIGNED (field);
4196 mode = DECL_MODE (field);
4197 if (DECL_BIT_FIELD (field))
4198 mode = VOIDmode;
4199
4200 pos = DECL_FIELD_BITPOS (field);
4201 if (TREE_CODE (pos) == INTEGER_CST)
4202 constant = pos;
4203 else if (TREE_CODE (pos) == PLUS_EXPR
4204 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4205 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4206 else
4207 offset = pos;
4208
4209 if (constant)
4210 bitpos = TREE_INT_CST_LOW (constant);
4211
4212 if (offset)
4213 {
4214 rtx offset_rtx;
4215
4216 if (contains_placeholder_p (offset))
4217 offset = build (WITH_RECORD_EXPR, bitsizetype,
4218 offset, make_tree (TREE_TYPE (exp), target));
4219
4220 offset = size_binop (EXACT_DIV_EXPR, offset,
4221 bitsize_int (BITS_PER_UNIT));
4222 offset = convert (sizetype, offset);
4223
4224 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4225 if (GET_CODE (to_rtx) != MEM)
4226 abort ();
4227
4228 if (GET_MODE (offset_rtx) != ptr_mode)
4229 {
4230 #ifdef POINTERS_EXTEND_UNSIGNED
4231 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4232 #else
4233 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4234 #endif
4235 }
4236
4237 to_rtx
4238 = change_address (to_rtx, VOIDmode,
4239 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4240 force_reg (ptr_mode,
4241 offset_rtx)));
4242 }
4243
4244 if (TREE_READONLY (field))
4245 {
4246 if (GET_CODE (to_rtx) == MEM)
4247 to_rtx = copy_rtx (to_rtx);
4248
4249 RTX_UNCHANGING_P (to_rtx) = 1;
4250 }
4251
4252 #ifdef WORD_REGISTER_OPERATIONS
4253 /* If this initializes a field that is smaller than a word, at the
4254 start of a word, try to widen it to a full word.
4255 This special case allows us to output C++ member function
4256 initializations in a form that the optimizers can understand. */
4257 if (constant
4258 && GET_CODE (target) == REG
4259 && bitsize < BITS_PER_WORD
4260 && bitpos % BITS_PER_WORD == 0
4261 && GET_MODE_CLASS (mode) == MODE_INT
4262 && TREE_CODE (value) == INTEGER_CST
4263 && GET_CODE (exp_size) == CONST_INT
4264 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4265 {
4266 tree type = TREE_TYPE (value);
4267 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4268 {
4269 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4270 value = convert (type, value);
4271 }
4272 if (BYTES_BIG_ENDIAN)
4273 value
4274 = fold (build (LSHIFT_EXPR, type, value,
4275 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4276 bitsize = BITS_PER_WORD;
4277 mode = word_mode;
4278 }
4279 #endif
4280 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4281 TREE_VALUE (elt), type,
4282 MIN (align,
4283 DECL_ALIGN (TREE_PURPOSE (elt))),
4284 cleared);
4285 }
4286 }
4287 else if (TREE_CODE (type) == ARRAY_TYPE)
4288 {
4289 register tree elt;
4290 register int i;
4291 int need_to_clear;
4292 tree domain = TYPE_DOMAIN (type);
4293 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4294 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4295 tree elttype = TREE_TYPE (type);
4296
4297 /* If the constructor has fewer elements than the array,
4298 clear the whole array first. Similarly if this is
4299 static constructor of a non-BLKmode object. */
4300 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4301 need_to_clear = 1;
4302 else
4303 {
4304 HOST_WIDE_INT count = 0, zero_count = 0;
4305 need_to_clear = 0;
4306 /* This loop is a more accurate version of the loop in
4307 mostly_zeros_p (it handles RANGE_EXPR in an index).
4308 It is also needed to check for missing elements. */
4309 for (elt = CONSTRUCTOR_ELTS (exp);
4310 elt != NULL_TREE;
4311 elt = TREE_CHAIN (elt))
4312 {
4313 tree index = TREE_PURPOSE (elt);
4314 HOST_WIDE_INT this_node_count;
4315 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4316 {
4317 tree lo_index = TREE_OPERAND (index, 0);
4318 tree hi_index = TREE_OPERAND (index, 1);
4319 if (TREE_CODE (lo_index) != INTEGER_CST
4320 || TREE_CODE (hi_index) != INTEGER_CST)
4321 {
4322 need_to_clear = 1;
4323 break;
4324 }
4325 this_node_count = TREE_INT_CST_LOW (hi_index)
4326 - TREE_INT_CST_LOW (lo_index) + 1;
4327 }
4328 else
4329 this_node_count = 1;
4330 count += this_node_count;
4331 if (mostly_zeros_p (TREE_VALUE (elt)))
4332 zero_count += this_node_count;
4333 }
4334 /* Clear the entire array first if there are any missing elements,
4335 or if the incidence of zero elements is >= 75%. */
4336 if (count < maxelt - minelt + 1
4337 || 4 * zero_count >= 3 * count)
4338 need_to_clear = 1;
4339 }
4340 if (need_to_clear && size > 0)
4341 {
4342 if (! cleared)
4343 clear_storage (target, GEN_INT (size),
4344 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4345 cleared = 1;
4346 }
4347 else
4348 /* Inform later passes that the old value is dead. */
4349 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4350
4351 /* Store each element of the constructor into
4352 the corresponding element of TARGET, determined
4353 by counting the elements. */
4354 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4355 elt;
4356 elt = TREE_CHAIN (elt), i++)
4357 {
4358 register enum machine_mode mode;
4359 int bitsize;
4360 int bitpos;
4361 int unsignedp;
4362 tree value = TREE_VALUE (elt);
4363 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4364 tree index = TREE_PURPOSE (elt);
4365 rtx xtarget = target;
4366
4367 if (cleared && is_zeros_p (value))
4368 continue;
4369
4370 unsignedp = TREE_UNSIGNED (elttype);
4371 mode = TYPE_MODE (elttype);
4372 if (mode == BLKmode)
4373 {
4374 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4375 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4376 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4377 else
4378 bitsize = -1;
4379 }
4380 else
4381 bitsize = GET_MODE_BITSIZE (mode);
4382
4383 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4384 {
4385 tree lo_index = TREE_OPERAND (index, 0);
4386 tree hi_index = TREE_OPERAND (index, 1);
4387 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4388 struct nesting *loop;
4389 HOST_WIDE_INT lo, hi, count;
4390 tree position;
4391
4392 /* If the range is constant and "small", unroll the loop. */
4393 if (TREE_CODE (lo_index) == INTEGER_CST
4394 && TREE_CODE (hi_index) == INTEGER_CST
4395 && (lo = TREE_INT_CST_LOW (lo_index),
4396 hi = TREE_INT_CST_LOW (hi_index),
4397 count = hi - lo + 1,
4398 (GET_CODE (target) != MEM
4399 || count <= 2
4400 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4401 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4402 <= 40 * 8))))
4403 {
4404 lo -= minelt; hi -= minelt;
4405 for (; lo <= hi; lo++)
4406 {
4407 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4408 store_constructor_field (target, bitsize, bitpos, mode,
4409 value, type, align, cleared);
4410 }
4411 }
4412 else
4413 {
4414 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4415 loop_top = gen_label_rtx ();
4416 loop_end = gen_label_rtx ();
4417
4418 unsignedp = TREE_UNSIGNED (domain);
4419
4420 index = build_decl (VAR_DECL, NULL_TREE, domain);
4421
4422 DECL_RTL (index) = index_r
4423 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4424 &unsignedp, 0));
4425
4426 if (TREE_CODE (value) == SAVE_EXPR
4427 && SAVE_EXPR_RTL (value) == 0)
4428 {
4429 /* Make sure value gets expanded once before the
4430 loop. */
4431 expand_expr (value, const0_rtx, VOIDmode, 0);
4432 emit_queue ();
4433 }
4434 store_expr (lo_index, index_r, 0);
4435 loop = expand_start_loop (0);
4436
4437 /* Assign value to element index. */
4438 position
4439 = convert (ssizetype,
4440 fold (build (MINUS_EXPR, TREE_TYPE (index),
4441 index, TYPE_MIN_VALUE (domain))));
4442 position = size_binop (MULT_EXPR, position,
4443 convert (ssizetype,
4444 TYPE_SIZE_UNIT (elttype)));
4445
4446 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4447 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4448 xtarget = change_address (target, mode, addr);
4449 if (TREE_CODE (value) == CONSTRUCTOR)
4450 store_constructor (value, xtarget, align, cleared,
4451 bitsize / BITS_PER_UNIT);
4452 else
4453 store_expr (value, xtarget, 0);
4454
4455 expand_exit_loop_if_false (loop,
4456 build (LT_EXPR, integer_type_node,
4457 index, hi_index));
4458
4459 expand_increment (build (PREINCREMENT_EXPR,
4460 TREE_TYPE (index),
4461 index, integer_one_node), 0, 0);
4462 expand_end_loop ();
4463 emit_label (loop_end);
4464 }
4465 }
4466 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4467 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4468 {
4469 rtx pos_rtx, addr;
4470 tree position;
4471
4472 if (index == 0)
4473 index = ssize_int (1);
4474
4475 if (minelt)
4476 index = convert (ssizetype,
4477 fold (build (MINUS_EXPR, index,
4478 TYPE_MIN_VALUE (domain))));
4479 position = size_binop (MULT_EXPR, index,
4480 convert (ssizetype,
4481 TYPE_SIZE_UNIT (elttype)));
4482 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4483 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4484 xtarget = change_address (target, mode, addr);
4485 store_expr (value, xtarget, 0);
4486 }
4487 else
4488 {
4489 if (index != 0)
4490 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4491 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4492 else
4493 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4494 store_constructor_field (target, bitsize, bitpos, mode, value,
4495 type, align, cleared);
4496 }
4497 }
4498 }
4499 /* set constructor assignments */
4500 else if (TREE_CODE (type) == SET_TYPE)
4501 {
4502 tree elt = CONSTRUCTOR_ELTS (exp);
4503 int nbytes = int_size_in_bytes (type), nbits;
4504 tree domain = TYPE_DOMAIN (type);
4505 tree domain_min, domain_max, bitlength;
4506
4507 /* The default implementation strategy is to extract the constant
4508 parts of the constructor, use that to initialize the target,
4509 and then "or" in whatever non-constant ranges we need in addition.
4510
4511 If a large set is all zero or all ones, it is
4512 probably better to set it using memset (if available) or bzero.
4513 Also, if a large set has just a single range, it may also be
4514 better to first clear all the first clear the set (using
4515 bzero/memset), and set the bits we want. */
4516
4517 /* Check for all zeros. */
4518 if (elt == NULL_TREE && size > 0)
4519 {
4520 if (!cleared)
4521 clear_storage (target, GEN_INT (size),
4522 TYPE_ALIGN (type) / BITS_PER_UNIT);
4523 return;
4524 }
4525
4526 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4527 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4528 bitlength = size_binop (PLUS_EXPR,
4529 size_diffop (domain_max, domain_min),
4530 ssize_int (1));
4531
4532 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4533 abort ();
4534 nbits = TREE_INT_CST_LOW (bitlength);
4535
4536 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4537 are "complicated" (more than one range), initialize (the
4538 constant parts) by copying from a constant. */
4539 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4540 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4541 {
4542 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4543 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4544 char *bit_buffer = (char *) alloca (nbits);
4545 HOST_WIDE_INT word = 0;
4546 int bit_pos = 0;
4547 int ibit = 0;
4548 int offset = 0; /* In bytes from beginning of set. */
4549 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4550 for (;;)
4551 {
4552 if (bit_buffer[ibit])
4553 {
4554 if (BYTES_BIG_ENDIAN)
4555 word |= (1 << (set_word_size - 1 - bit_pos));
4556 else
4557 word |= 1 << bit_pos;
4558 }
4559 bit_pos++; ibit++;
4560 if (bit_pos >= set_word_size || ibit == nbits)
4561 {
4562 if (word != 0 || ! cleared)
4563 {
4564 rtx datum = GEN_INT (word);
4565 rtx to_rtx;
4566 /* The assumption here is that it is safe to use
4567 XEXP if the set is multi-word, but not if
4568 it's single-word. */
4569 if (GET_CODE (target) == MEM)
4570 {
4571 to_rtx = plus_constant (XEXP (target, 0), offset);
4572 to_rtx = change_address (target, mode, to_rtx);
4573 }
4574 else if (offset == 0)
4575 to_rtx = target;
4576 else
4577 abort ();
4578 emit_move_insn (to_rtx, datum);
4579 }
4580 if (ibit == nbits)
4581 break;
4582 word = 0;
4583 bit_pos = 0;
4584 offset += set_word_size / BITS_PER_UNIT;
4585 }
4586 }
4587 }
4588 else if (!cleared)
4589 {
4590 /* Don't bother clearing storage if the set is all ones. */
4591 if (TREE_CHAIN (elt) != NULL_TREE
4592 || (TREE_PURPOSE (elt) == NULL_TREE
4593 ? nbits != 1
4594 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4595 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4596 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4597 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4598 != nbits))))
4599 clear_storage (target, expr_size (exp),
4600 TYPE_ALIGN (type) / BITS_PER_UNIT);
4601 }
4602
4603 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4604 {
4605 /* start of range of element or NULL */
4606 tree startbit = TREE_PURPOSE (elt);
4607 /* end of range of element, or element value */
4608 tree endbit = TREE_VALUE (elt);
4609 #ifdef TARGET_MEM_FUNCTIONS
4610 HOST_WIDE_INT startb, endb;
4611 #endif
4612 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4613
4614 bitlength_rtx = expand_expr (bitlength,
4615 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4616
4617 /* handle non-range tuple element like [ expr ] */
4618 if (startbit == NULL_TREE)
4619 {
4620 startbit = save_expr (endbit);
4621 endbit = startbit;
4622 }
4623 startbit = convert (sizetype, startbit);
4624 endbit = convert (sizetype, endbit);
4625 if (! integer_zerop (domain_min))
4626 {
4627 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4628 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4629 }
4630 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4631 EXPAND_CONST_ADDRESS);
4632 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4633 EXPAND_CONST_ADDRESS);
4634
4635 if (REG_P (target))
4636 {
4637 targetx = assign_stack_temp (GET_MODE (target),
4638 GET_MODE_SIZE (GET_MODE (target)),
4639 0);
4640 emit_move_insn (targetx, target);
4641 }
4642 else if (GET_CODE (target) == MEM)
4643 targetx = target;
4644 else
4645 abort ();
4646
4647 #ifdef TARGET_MEM_FUNCTIONS
4648 /* Optimization: If startbit and endbit are
4649 constants divisible by BITS_PER_UNIT,
4650 call memset instead. */
4651 if (TREE_CODE (startbit) == INTEGER_CST
4652 && TREE_CODE (endbit) == INTEGER_CST
4653 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4654 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4655 {
4656 emit_library_call (memset_libfunc, 0,
4657 VOIDmode, 3,
4658 plus_constant (XEXP (targetx, 0),
4659 startb / BITS_PER_UNIT),
4660 Pmode,
4661 constm1_rtx, TYPE_MODE (integer_type_node),
4662 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4663 TYPE_MODE (sizetype));
4664 }
4665 else
4666 #endif
4667 {
4668 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4669 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4670 bitlength_rtx, TYPE_MODE (sizetype),
4671 startbit_rtx, TYPE_MODE (sizetype),
4672 endbit_rtx, TYPE_MODE (sizetype));
4673 }
4674 if (REG_P (target))
4675 emit_move_insn (target, targetx);
4676 }
4677 }
4678
4679 else
4680 abort ();
4681 }
4682
4683 /* Store the value of EXP (an expression tree)
4684 into a subfield of TARGET which has mode MODE and occupies
4685 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4686 If MODE is VOIDmode, it means that we are storing into a bit-field.
4687
4688 If VALUE_MODE is VOIDmode, return nothing in particular.
4689 UNSIGNEDP is not used in this case.
4690
4691 Otherwise, return an rtx for the value stored. This rtx
4692 has mode VALUE_MODE if that is convenient to do.
4693 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4694
4695 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4696 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4697
4698 ALIAS_SET is the alias set for the destination. This value will
4699 (in general) be different from that for TARGET, since TARGET is a
4700 reference to the containing structure. */
4701
4702 static rtx
4703 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4704 unsignedp, align, total_size, alias_set)
4705 rtx target;
4706 int bitsize, bitpos;
4707 enum machine_mode mode;
4708 tree exp;
4709 enum machine_mode value_mode;
4710 int unsignedp;
4711 unsigned int align;
4712 int total_size;
4713 int alias_set;
4714 {
4715 HOST_WIDE_INT width_mask = 0;
4716
4717 if (TREE_CODE (exp) == ERROR_MARK)
4718 return const0_rtx;
4719
4720 if (bitsize < HOST_BITS_PER_WIDE_INT)
4721 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4722
4723 /* If we are storing into an unaligned field of an aligned union that is
4724 in a register, we may have the mode of TARGET being an integer mode but
4725 MODE == BLKmode. In that case, get an aligned object whose size and
4726 alignment are the same as TARGET and store TARGET into it (we can avoid
4727 the store if the field being stored is the entire width of TARGET). Then
4728 call ourselves recursively to store the field into a BLKmode version of
4729 that object. Finally, load from the object into TARGET. This is not
4730 very efficient in general, but should only be slightly more expensive
4731 than the otherwise-required unaligned accesses. Perhaps this can be
4732 cleaned up later. */
4733
4734 if (mode == BLKmode
4735 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4736 {
4737 rtx object = assign_stack_temp (GET_MODE (target),
4738 GET_MODE_SIZE (GET_MODE (target)), 0);
4739 rtx blk_object = copy_rtx (object);
4740
4741 MEM_SET_IN_STRUCT_P (object, 1);
4742 MEM_SET_IN_STRUCT_P (blk_object, 1);
4743 PUT_MODE (blk_object, BLKmode);
4744
4745 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4746 emit_move_insn (object, target);
4747
4748 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4749 align, total_size, alias_set);
4750
4751 /* Even though we aren't returning target, we need to
4752 give it the updated value. */
4753 emit_move_insn (target, object);
4754
4755 return blk_object;
4756 }
4757
4758 /* If the structure is in a register or if the component
4759 is a bit field, we cannot use addressing to access it.
4760 Use bit-field techniques or SUBREG to store in it. */
4761
4762 if (mode == VOIDmode
4763 || (mode != BLKmode && ! direct_store[(int) mode]
4764 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4765 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4766 || GET_CODE (target) == REG
4767 || GET_CODE (target) == SUBREG
4768 /* If the field isn't aligned enough to store as an ordinary memref,
4769 store it as a bit field. */
4770 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4771 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4772 || bitpos % GET_MODE_ALIGNMENT (mode)))
4773 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4774 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4775 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4776 /* If the RHS and field are a constant size and the size of the
4777 RHS isn't the same size as the bitfield, we must use bitfield
4778 operations. */
4779 || ((bitsize >= 0
4780 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4781 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4782 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4783 {
4784 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4785
4786 /* If BITSIZE is narrower than the size of the type of EXP
4787 we will be narrowing TEMP. Normally, what's wanted are the
4788 low-order bits. However, if EXP's type is a record and this is
4789 big-endian machine, we want the upper BITSIZE bits. */
4790 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4791 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4792 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4793 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4794 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4795 - bitsize),
4796 temp, 1);
4797
4798 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4799 MODE. */
4800 if (mode != VOIDmode && mode != BLKmode
4801 && mode != TYPE_MODE (TREE_TYPE (exp)))
4802 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4803
4804 /* If the modes of TARGET and TEMP are both BLKmode, both
4805 must be in memory and BITPOS must be aligned on a byte
4806 boundary. If so, we simply do a block copy. */
4807 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4808 {
4809 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4810
4811 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4812 || bitpos % BITS_PER_UNIT != 0)
4813 abort ();
4814
4815 target = change_address (target, VOIDmode,
4816 plus_constant (XEXP (target, 0),
4817 bitpos / BITS_PER_UNIT));
4818
4819 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4820 align = MIN (exp_align, align);
4821
4822 /* Find an alignment that is consistent with the bit position. */
4823 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4824 align >>= 1;
4825
4826 emit_block_move (target, temp,
4827 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4828 / BITS_PER_UNIT),
4829 align);
4830
4831 return value_mode == VOIDmode ? const0_rtx : target;
4832 }
4833
4834 /* Store the value in the bitfield. */
4835 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4836 if (value_mode != VOIDmode)
4837 {
4838 /* The caller wants an rtx for the value. */
4839 /* If possible, avoid refetching from the bitfield itself. */
4840 if (width_mask != 0
4841 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4842 {
4843 tree count;
4844 enum machine_mode tmode;
4845
4846 if (unsignedp)
4847 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4848 tmode = GET_MODE (temp);
4849 if (tmode == VOIDmode)
4850 tmode = value_mode;
4851 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4852 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4853 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4854 }
4855 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4856 NULL_RTX, value_mode, 0, align,
4857 total_size);
4858 }
4859 return const0_rtx;
4860 }
4861 else
4862 {
4863 rtx addr = XEXP (target, 0);
4864 rtx to_rtx;
4865
4866 /* If a value is wanted, it must be the lhs;
4867 so make the address stable for multiple use. */
4868
4869 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4870 && ! CONSTANT_ADDRESS_P (addr)
4871 /* A frame-pointer reference is already stable. */
4872 && ! (GET_CODE (addr) == PLUS
4873 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4874 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4875 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4876 addr = copy_to_reg (addr);
4877
4878 /* Now build a reference to just the desired component. */
4879
4880 to_rtx = copy_rtx (change_address (target, mode,
4881 plus_constant (addr,
4882 (bitpos
4883 / BITS_PER_UNIT))));
4884 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4885 MEM_ALIAS_SET (to_rtx) = alias_set;
4886
4887 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4888 }
4889 }
4890 \f
4891 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4892 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4893 ARRAY_REFs and find the ultimate containing object, which we return.
4894
4895 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4896 bit position, and *PUNSIGNEDP to the signedness of the field.
4897 If the position of the field is variable, we store a tree
4898 giving the variable offset (in units) in *POFFSET.
4899 This offset is in addition to the bit position.
4900 If the position is not variable, we store 0 in *POFFSET.
4901 We set *PALIGNMENT to the alignment in bytes of the address that will be
4902 computed. This is the alignment of the thing we return if *POFFSET
4903 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4904
4905 If any of the extraction expressions is volatile,
4906 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4907
4908 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4909 is a mode that can be used to access the field. In that case, *PBITSIZE
4910 is redundant.
4911
4912 If the field describes a variable-sized object, *PMODE is set to
4913 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4914 this case, but the address of the object can be found. */
4915
4916 tree
4917 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4918 punsignedp, pvolatilep, palignment)
4919 tree exp;
4920 int *pbitsize;
4921 int *pbitpos;
4922 tree *poffset;
4923 enum machine_mode *pmode;
4924 int *punsignedp;
4925 int *pvolatilep;
4926 unsigned int *palignment;
4927 {
4928 tree orig_exp = exp;
4929 tree size_tree = 0;
4930 enum machine_mode mode = VOIDmode;
4931 tree offset = size_zero_node;
4932 unsigned int alignment = BIGGEST_ALIGNMENT;
4933
4934 if (TREE_CODE (exp) == COMPONENT_REF)
4935 {
4936 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4937 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4938 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4939 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4940 }
4941 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4942 {
4943 size_tree = TREE_OPERAND (exp, 1);
4944 *punsignedp = TREE_UNSIGNED (exp);
4945 }
4946 else
4947 {
4948 mode = TYPE_MODE (TREE_TYPE (exp));
4949 if (mode == BLKmode)
4950 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4951
4952 *pbitsize = GET_MODE_BITSIZE (mode);
4953 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4954 }
4955
4956 if (size_tree)
4957 {
4958 if (TREE_CODE (size_tree) != INTEGER_CST)
4959 mode = BLKmode, *pbitsize = -1;
4960 else
4961 *pbitsize = TREE_INT_CST_LOW (size_tree);
4962 }
4963
4964 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4965 and find the ultimate containing object. */
4966
4967 *pbitpos = 0;
4968
4969 while (1)
4970 {
4971 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4972 {
4973 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4974 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4975 : TREE_OPERAND (exp, 2));
4976 tree constant = bitsize_int (0), var = pos;
4977
4978 /* If this field hasn't been filled in yet, don't go
4979 past it. This should only happen when folding expressions
4980 made during type construction. */
4981 if (pos == 0)
4982 break;
4983
4984 /* Assume here that the offset is a multiple of a unit.
4985 If not, there should be an explicitly added constant. */
4986 if (TREE_CODE (pos) == PLUS_EXPR
4987 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4988 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4989 else if (TREE_CODE (pos) == INTEGER_CST)
4990 constant = pos, var = bitsize_int (0);
4991
4992 *pbitpos += TREE_INT_CST_LOW (constant);
4993 offset
4994 = size_binop (PLUS_EXPR, offset,
4995 convert (sizetype,
4996 size_binop (EXACT_DIV_EXPR, var,
4997 bitsize_int (BITS_PER_UNIT))));
4998 }
4999
5000 else if (TREE_CODE (exp) == ARRAY_REF)
5001 {
5002 /* This code is based on the code in case ARRAY_REF in expand_expr
5003 below. We assume here that the size of an array element is
5004 always an integral multiple of BITS_PER_UNIT. */
5005
5006 tree index = TREE_OPERAND (exp, 1);
5007 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5008 tree low_bound
5009 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5010 tree index_type = TREE_TYPE (index);
5011 tree xindex;
5012
5013 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5014 {
5015 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5016 index);
5017 index_type = TREE_TYPE (index);
5018 }
5019
5020 /* Optimize the special-case of a zero lower bound.
5021
5022 We convert the low_bound to sizetype to avoid some problems
5023 with constant folding. (E.g. suppose the lower bound is 1,
5024 and its mode is QI. Without the conversion, (ARRAY
5025 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5026 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5027
5028 But sizetype isn't quite right either (especially if
5029 the lowbound is negative). FIXME */
5030
5031 if (! integer_zerop (low_bound))
5032 index = fold (build (MINUS_EXPR, index_type, index,
5033 convert (sizetype, low_bound)));
5034
5035 if (TREE_CODE (index) == INTEGER_CST)
5036 {
5037 index = convert (sbitsizetype, index);
5038 index_type = TREE_TYPE (index);
5039 }
5040
5041 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5042 convert (sbitsizetype,
5043 TYPE_SIZE (TREE_TYPE (exp)))));
5044
5045 if (TREE_CODE (xindex) == INTEGER_CST
5046 && TREE_INT_CST_HIGH (xindex) == 0)
5047 *pbitpos += TREE_INT_CST_LOW (xindex);
5048 else
5049 {
5050 /* Either the bit offset calculated above is not constant, or
5051 it overflowed. In either case, redo the multiplication
5052 against the size in units. This is especially important
5053 in the non-constant case to avoid a division at runtime. */
5054 xindex
5055 = fold (build (MULT_EXPR, ssizetype, index,
5056 convert (ssizetype,
5057 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5058
5059 if (contains_placeholder_p (xindex))
5060 xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
5061
5062 offset
5063 = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
5064 }
5065 }
5066 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5067 && ! ((TREE_CODE (exp) == NOP_EXPR
5068 || TREE_CODE (exp) == CONVERT_EXPR)
5069 && (TYPE_MODE (TREE_TYPE (exp))
5070 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5071 break;
5072
5073 /* If any reference in the chain is volatile, the effect is volatile. */
5074 if (TREE_THIS_VOLATILE (exp))
5075 *pvolatilep = 1;
5076
5077 /* If the offset is non-constant already, then we can't assume any
5078 alignment more than the alignment here. */
5079 if (! integer_zerop (offset))
5080 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5081
5082 exp = TREE_OPERAND (exp, 0);
5083 }
5084
5085 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5086 alignment = MIN (alignment, DECL_ALIGN (exp));
5087 else if (TREE_TYPE (exp) != 0)
5088 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5089
5090 if (integer_zerop (offset))
5091 offset = 0;
5092
5093 if (offset != 0 && contains_placeholder_p (offset))
5094 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5095
5096 *pmode = mode;
5097 *poffset = offset;
5098 *palignment = alignment / BITS_PER_UNIT;
5099 return exp;
5100 }
5101
5102 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5103 static enum memory_use_mode
5104 get_memory_usage_from_modifier (modifier)
5105 enum expand_modifier modifier;
5106 {
5107 switch (modifier)
5108 {
5109 case EXPAND_NORMAL:
5110 case EXPAND_SUM:
5111 return MEMORY_USE_RO;
5112 break;
5113 case EXPAND_MEMORY_USE_WO:
5114 return MEMORY_USE_WO;
5115 break;
5116 case EXPAND_MEMORY_USE_RW:
5117 return MEMORY_USE_RW;
5118 break;
5119 case EXPAND_MEMORY_USE_DONT:
5120 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5121 MEMORY_USE_DONT, because they are modifiers to a call of
5122 expand_expr in the ADDR_EXPR case of expand_expr. */
5123 case EXPAND_CONST_ADDRESS:
5124 case EXPAND_INITIALIZER:
5125 return MEMORY_USE_DONT;
5126 case EXPAND_MEMORY_USE_BAD:
5127 default:
5128 abort ();
5129 }
5130 }
5131 \f
5132 /* Given an rtx VALUE that may contain additions and multiplications,
5133 return an equivalent value that just refers to a register or memory.
5134 This is done by generating instructions to perform the arithmetic
5135 and returning a pseudo-register containing the value.
5136
5137 The returned value may be a REG, SUBREG, MEM or constant. */
5138
5139 rtx
5140 force_operand (value, target)
5141 rtx value, target;
5142 {
5143 register optab binoptab = 0;
5144 /* Use a temporary to force order of execution of calls to
5145 `force_operand'. */
5146 rtx tmp;
5147 register rtx op2;
5148 /* Use subtarget as the target for operand 0 of a binary operation. */
5149 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5150
5151 /* Check for a PIC address load. */
5152 if (flag_pic
5153 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5154 && XEXP (value, 0) == pic_offset_table_rtx
5155 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5156 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5157 || GET_CODE (XEXP (value, 1)) == CONST))
5158 {
5159 if (!subtarget)
5160 subtarget = gen_reg_rtx (GET_MODE (value));
5161 emit_move_insn (subtarget, value);
5162 return subtarget;
5163 }
5164
5165 if (GET_CODE (value) == PLUS)
5166 binoptab = add_optab;
5167 else if (GET_CODE (value) == MINUS)
5168 binoptab = sub_optab;
5169 else if (GET_CODE (value) == MULT)
5170 {
5171 op2 = XEXP (value, 1);
5172 if (!CONSTANT_P (op2)
5173 && !(GET_CODE (op2) == REG && op2 != subtarget))
5174 subtarget = 0;
5175 tmp = force_operand (XEXP (value, 0), subtarget);
5176 return expand_mult (GET_MODE (value), tmp,
5177 force_operand (op2, NULL_RTX),
5178 target, 0);
5179 }
5180
5181 if (binoptab)
5182 {
5183 op2 = XEXP (value, 1);
5184 if (!CONSTANT_P (op2)
5185 && !(GET_CODE (op2) == REG && op2 != subtarget))
5186 subtarget = 0;
5187 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5188 {
5189 binoptab = add_optab;
5190 op2 = negate_rtx (GET_MODE (value), op2);
5191 }
5192
5193 /* Check for an addition with OP2 a constant integer and our first
5194 operand a PLUS of a virtual register and something else. In that
5195 case, we want to emit the sum of the virtual register and the
5196 constant first and then add the other value. This allows virtual
5197 register instantiation to simply modify the constant rather than
5198 creating another one around this addition. */
5199 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5200 && GET_CODE (XEXP (value, 0)) == PLUS
5201 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5202 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5203 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5204 {
5205 rtx temp = expand_binop (GET_MODE (value), binoptab,
5206 XEXP (XEXP (value, 0), 0), op2,
5207 subtarget, 0, OPTAB_LIB_WIDEN);
5208 return expand_binop (GET_MODE (value), binoptab, temp,
5209 force_operand (XEXP (XEXP (value, 0), 1), 0),
5210 target, 0, OPTAB_LIB_WIDEN);
5211 }
5212
5213 tmp = force_operand (XEXP (value, 0), subtarget);
5214 return expand_binop (GET_MODE (value), binoptab, tmp,
5215 force_operand (op2, NULL_RTX),
5216 target, 0, OPTAB_LIB_WIDEN);
5217 /* We give UNSIGNEDP = 0 to expand_binop
5218 because the only operations we are expanding here are signed ones. */
5219 }
5220 return value;
5221 }
5222 \f
5223 /* Subroutine of expand_expr:
5224 save the non-copied parts (LIST) of an expr (LHS), and return a list
5225 which can restore these values to their previous values,
5226 should something modify their storage. */
5227
5228 static tree
5229 save_noncopied_parts (lhs, list)
5230 tree lhs;
5231 tree list;
5232 {
5233 tree tail;
5234 tree parts = 0;
5235
5236 for (tail = list; tail; tail = TREE_CHAIN (tail))
5237 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5238 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5239 else
5240 {
5241 tree part = TREE_VALUE (tail);
5242 tree part_type = TREE_TYPE (part);
5243 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5244 rtx target = assign_temp (part_type, 0, 1, 1);
5245 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5246 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5247 parts = tree_cons (to_be_saved,
5248 build (RTL_EXPR, part_type, NULL_TREE,
5249 (tree) target),
5250 parts);
5251 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5252 }
5253 return parts;
5254 }
5255
5256 /* Subroutine of expand_expr:
5257 record the non-copied parts (LIST) of an expr (LHS), and return a list
5258 which specifies the initial values of these parts. */
5259
5260 static tree
5261 init_noncopied_parts (lhs, list)
5262 tree lhs;
5263 tree list;
5264 {
5265 tree tail;
5266 tree parts = 0;
5267
5268 for (tail = list; tail; tail = TREE_CHAIN (tail))
5269 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5270 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5271 else if (TREE_PURPOSE (tail))
5272 {
5273 tree part = TREE_VALUE (tail);
5274 tree part_type = TREE_TYPE (part);
5275 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5276 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5277 }
5278 return parts;
5279 }
5280
5281 /* Subroutine of expand_expr: return nonzero iff there is no way that
5282 EXP can reference X, which is being modified. TOP_P is nonzero if this
5283 call is going to be used to determine whether we need a temporary
5284 for EXP, as opposed to a recursive call to this function.
5285
5286 It is always safe for this routine to return zero since it merely
5287 searches for optimization opportunities. */
5288
5289 static int
5290 safe_from_p (x, exp, top_p)
5291 rtx x;
5292 tree exp;
5293 int top_p;
5294 {
5295 rtx exp_rtl = 0;
5296 int i, nops;
5297 static int save_expr_count;
5298 static int save_expr_size = 0;
5299 static tree *save_expr_rewritten;
5300 static tree save_expr_trees[256];
5301
5302 if (x == 0
5303 /* If EXP has varying size, we MUST use a target since we currently
5304 have no way of allocating temporaries of variable size
5305 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5306 So we assume here that something at a higher level has prevented a
5307 clash. This is somewhat bogus, but the best we can do. Only
5308 do this when X is BLKmode and when we are at the top level. */
5309 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5310 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5311 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5312 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5313 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5314 != INTEGER_CST)
5315 && GET_MODE (x) == BLKmode))
5316 return 1;
5317
5318 if (top_p && save_expr_size == 0)
5319 {
5320 int rtn;
5321
5322 save_expr_count = 0;
5323 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5324 save_expr_rewritten = &save_expr_trees[0];
5325
5326 rtn = safe_from_p (x, exp, 1);
5327
5328 for (i = 0; i < save_expr_count; ++i)
5329 {
5330 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5331 abort ();
5332 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5333 }
5334
5335 save_expr_size = 0;
5336
5337 return rtn;
5338 }
5339
5340 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5341 find the underlying pseudo. */
5342 if (GET_CODE (x) == SUBREG)
5343 {
5344 x = SUBREG_REG (x);
5345 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5346 return 0;
5347 }
5348
5349 /* If X is a location in the outgoing argument area, it is always safe. */
5350 if (GET_CODE (x) == MEM
5351 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5352 || (GET_CODE (XEXP (x, 0)) == PLUS
5353 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5354 return 1;
5355
5356 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5357 {
5358 case 'd':
5359 exp_rtl = DECL_RTL (exp);
5360 break;
5361
5362 case 'c':
5363 return 1;
5364
5365 case 'x':
5366 if (TREE_CODE (exp) == TREE_LIST)
5367 return ((TREE_VALUE (exp) == 0
5368 || safe_from_p (x, TREE_VALUE (exp), 0))
5369 && (TREE_CHAIN (exp) == 0
5370 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5371 else if (TREE_CODE (exp) == ERROR_MARK)
5372 return 1; /* An already-visited SAVE_EXPR? */
5373 else
5374 return 0;
5375
5376 case '1':
5377 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5378
5379 case '2':
5380 case '<':
5381 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5382 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5383
5384 case 'e':
5385 case 'r':
5386 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5387 the expression. If it is set, we conflict iff we are that rtx or
5388 both are in memory. Otherwise, we check all operands of the
5389 expression recursively. */
5390
5391 switch (TREE_CODE (exp))
5392 {
5393 case ADDR_EXPR:
5394 return (staticp (TREE_OPERAND (exp, 0))
5395 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5396 || TREE_STATIC (exp));
5397
5398 case INDIRECT_REF:
5399 if (GET_CODE (x) == MEM)
5400 return 0;
5401 break;
5402
5403 case CALL_EXPR:
5404 exp_rtl = CALL_EXPR_RTL (exp);
5405 if (exp_rtl == 0)
5406 {
5407 /* Assume that the call will clobber all hard registers and
5408 all of memory. */
5409 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5410 || GET_CODE (x) == MEM)
5411 return 0;
5412 }
5413
5414 break;
5415
5416 case RTL_EXPR:
5417 /* If a sequence exists, we would have to scan every instruction
5418 in the sequence to see if it was safe. This is probably not
5419 worthwhile. */
5420 if (RTL_EXPR_SEQUENCE (exp))
5421 return 0;
5422
5423 exp_rtl = RTL_EXPR_RTL (exp);
5424 break;
5425
5426 case WITH_CLEANUP_EXPR:
5427 exp_rtl = RTL_EXPR_RTL (exp);
5428 break;
5429
5430 case CLEANUP_POINT_EXPR:
5431 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5432
5433 case SAVE_EXPR:
5434 exp_rtl = SAVE_EXPR_RTL (exp);
5435 if (exp_rtl)
5436 break;
5437
5438 /* This SAVE_EXPR might appear many times in the top-level
5439 safe_from_p() expression, and if it has a complex
5440 subexpression, examining it multiple times could result
5441 in a combinatorial explosion. E.g. on an Alpha
5442 running at least 200MHz, a Fortran test case compiled with
5443 optimization took about 28 minutes to compile -- even though
5444 it was only a few lines long, and the complicated line causing
5445 so much time to be spent in the earlier version of safe_from_p()
5446 had only 293 or so unique nodes.
5447
5448 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5449 where it is so we can turn it back in the top-level safe_from_p()
5450 when we're done. */
5451
5452 /* For now, don't bother re-sizing the array. */
5453 if (save_expr_count >= save_expr_size)
5454 return 0;
5455 save_expr_rewritten[save_expr_count++] = exp;
5456
5457 nops = tree_code_length[(int) SAVE_EXPR];
5458 for (i = 0; i < nops; i++)
5459 {
5460 tree operand = TREE_OPERAND (exp, i);
5461 if (operand == NULL_TREE)
5462 continue;
5463 TREE_SET_CODE (exp, ERROR_MARK);
5464 if (!safe_from_p (x, operand, 0))
5465 return 0;
5466 TREE_SET_CODE (exp, SAVE_EXPR);
5467 }
5468 TREE_SET_CODE (exp, ERROR_MARK);
5469 return 1;
5470
5471 case BIND_EXPR:
5472 /* The only operand we look at is operand 1. The rest aren't
5473 part of the expression. */
5474 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5475
5476 case METHOD_CALL_EXPR:
5477 /* This takes a rtx argument, but shouldn't appear here. */
5478 abort ();
5479
5480 default:
5481 break;
5482 }
5483
5484 /* If we have an rtx, we do not need to scan our operands. */
5485 if (exp_rtl)
5486 break;
5487
5488 nops = tree_code_length[(int) TREE_CODE (exp)];
5489 for (i = 0; i < nops; i++)
5490 if (TREE_OPERAND (exp, i) != 0
5491 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5492 return 0;
5493 }
5494
5495 /* If we have an rtl, find any enclosed object. Then see if we conflict
5496 with it. */
5497 if (exp_rtl)
5498 {
5499 if (GET_CODE (exp_rtl) == SUBREG)
5500 {
5501 exp_rtl = SUBREG_REG (exp_rtl);
5502 if (GET_CODE (exp_rtl) == REG
5503 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5504 return 0;
5505 }
5506
5507 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5508 are memory and EXP is not readonly. */
5509 return ! (rtx_equal_p (x, exp_rtl)
5510 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5511 && ! TREE_READONLY (exp)));
5512 }
5513
5514 /* If we reach here, it is safe. */
5515 return 1;
5516 }
5517
5518 /* Subroutine of expand_expr: return nonzero iff EXP is an
5519 expression whose type is statically determinable. */
5520
5521 static int
5522 fixed_type_p (exp)
5523 tree exp;
5524 {
5525 if (TREE_CODE (exp) == PARM_DECL
5526 || TREE_CODE (exp) == VAR_DECL
5527 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5528 || TREE_CODE (exp) == COMPONENT_REF
5529 || TREE_CODE (exp) == ARRAY_REF)
5530 return 1;
5531 return 0;
5532 }
5533
5534 /* Subroutine of expand_expr: return rtx if EXP is a
5535 variable or parameter; else return 0. */
5536
5537 static rtx
5538 var_rtx (exp)
5539 tree exp;
5540 {
5541 STRIP_NOPS (exp);
5542 switch (TREE_CODE (exp))
5543 {
5544 case PARM_DECL:
5545 case VAR_DECL:
5546 return DECL_RTL (exp);
5547 default:
5548 return 0;
5549 }
5550 }
5551
5552 #ifdef MAX_INTEGER_COMPUTATION_MODE
5553 void
5554 check_max_integer_computation_mode (exp)
5555 tree exp;
5556 {
5557 enum tree_code code;
5558 enum machine_mode mode;
5559
5560 /* Strip any NOPs that don't change the mode. */
5561 STRIP_NOPS (exp);
5562 code = TREE_CODE (exp);
5563
5564 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5565 if (code == NOP_EXPR
5566 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5567 return;
5568
5569 /* First check the type of the overall operation. We need only look at
5570 unary, binary and relational operations. */
5571 if (TREE_CODE_CLASS (code) == '1'
5572 || TREE_CODE_CLASS (code) == '2'
5573 || TREE_CODE_CLASS (code) == '<')
5574 {
5575 mode = TYPE_MODE (TREE_TYPE (exp));
5576 if (GET_MODE_CLASS (mode) == MODE_INT
5577 && mode > MAX_INTEGER_COMPUTATION_MODE)
5578 fatal ("unsupported wide integer operation");
5579 }
5580
5581 /* Check operand of a unary op. */
5582 if (TREE_CODE_CLASS (code) == '1')
5583 {
5584 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5585 if (GET_MODE_CLASS (mode) == MODE_INT
5586 && mode > MAX_INTEGER_COMPUTATION_MODE)
5587 fatal ("unsupported wide integer operation");
5588 }
5589
5590 /* Check operands of a binary/comparison op. */
5591 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5592 {
5593 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5594 if (GET_MODE_CLASS (mode) == MODE_INT
5595 && mode > MAX_INTEGER_COMPUTATION_MODE)
5596 fatal ("unsupported wide integer operation");
5597
5598 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5599 if (GET_MODE_CLASS (mode) == MODE_INT
5600 && mode > MAX_INTEGER_COMPUTATION_MODE)
5601 fatal ("unsupported wide integer operation");
5602 }
5603 }
5604 #endif
5605
5606 \f
5607 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5608 has any readonly fields. If any of the fields have types that
5609 contain readonly fields, return true as well. */
5610
5611 static int
5612 readonly_fields_p (type)
5613 tree type;
5614 {
5615 tree field;
5616
5617 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5618 if (TREE_CODE (field) == FIELD_DECL
5619 && (TREE_READONLY (field)
5620 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5621 && readonly_fields_p (TREE_TYPE (field)))))
5622 return 1;
5623
5624 return 0;
5625 }
5626 \f
5627 /* expand_expr: generate code for computing expression EXP.
5628 An rtx for the computed value is returned. The value is never null.
5629 In the case of a void EXP, const0_rtx is returned.
5630
5631 The value may be stored in TARGET if TARGET is nonzero.
5632 TARGET is just a suggestion; callers must assume that
5633 the rtx returned may not be the same as TARGET.
5634
5635 If TARGET is CONST0_RTX, it means that the value will be ignored.
5636
5637 If TMODE is not VOIDmode, it suggests generating the
5638 result in mode TMODE. But this is done only when convenient.
5639 Otherwise, TMODE is ignored and the value generated in its natural mode.
5640 TMODE is just a suggestion; callers must assume that
5641 the rtx returned may not have mode TMODE.
5642
5643 Note that TARGET may have neither TMODE nor MODE. In that case, it
5644 probably will not be used.
5645
5646 If MODIFIER is EXPAND_SUM then when EXP is an addition
5647 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5648 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5649 products as above, or REG or MEM, or constant.
5650 Ordinarily in such cases we would output mul or add instructions
5651 and then return a pseudo reg containing the sum.
5652
5653 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5654 it also marks a label as absolutely required (it can't be dead).
5655 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5656 This is used for outputting expressions used in initializers.
5657
5658 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5659 with a constant address even if that address is not normally legitimate.
5660 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5661
5662 rtx
5663 expand_expr (exp, target, tmode, modifier)
5664 register tree exp;
5665 rtx target;
5666 enum machine_mode tmode;
5667 enum expand_modifier modifier;
5668 {
5669 register rtx op0, op1, temp;
5670 tree type = TREE_TYPE (exp);
5671 int unsignedp = TREE_UNSIGNED (type);
5672 register enum machine_mode mode;
5673 register enum tree_code code = TREE_CODE (exp);
5674 optab this_optab;
5675 rtx subtarget, original_target;
5676 int ignore;
5677 tree context;
5678 /* Used by check-memory-usage to make modifier read only. */
5679 enum expand_modifier ro_modifier;
5680
5681 /* Handle ERROR_MARK before anybody tries to access its type. */
5682 if (TREE_CODE (exp) == ERROR_MARK)
5683 {
5684 op0 = CONST0_RTX (tmode);
5685 if (op0 != 0)
5686 return op0;
5687 return const0_rtx;
5688 }
5689
5690 mode = TYPE_MODE (type);
5691 /* Use subtarget as the target for operand 0 of a binary operation. */
5692 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5693 original_target = target;
5694 ignore = (target == const0_rtx
5695 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5696 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5697 || code == COND_EXPR)
5698 && TREE_CODE (type) == VOID_TYPE));
5699
5700 /* Make a read-only version of the modifier. */
5701 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5702 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5703 ro_modifier = modifier;
5704 else
5705 ro_modifier = EXPAND_NORMAL;
5706
5707 /* Don't use hard regs as subtargets, because the combiner
5708 can only handle pseudo regs. */
5709 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5710 subtarget = 0;
5711 /* Avoid subtargets inside loops,
5712 since they hide some invariant expressions. */
5713 if (preserve_subexpressions_p ())
5714 subtarget = 0;
5715
5716 /* If we are going to ignore this result, we need only do something
5717 if there is a side-effect somewhere in the expression. If there
5718 is, short-circuit the most common cases here. Note that we must
5719 not call expand_expr with anything but const0_rtx in case this
5720 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5721
5722 if (ignore)
5723 {
5724 if (! TREE_SIDE_EFFECTS (exp))
5725 return const0_rtx;
5726
5727 /* Ensure we reference a volatile object even if value is ignored, but
5728 don't do this if all we are doing is taking its address. */
5729 if (TREE_THIS_VOLATILE (exp)
5730 && TREE_CODE (exp) != FUNCTION_DECL
5731 && mode != VOIDmode && mode != BLKmode
5732 && modifier != EXPAND_CONST_ADDRESS)
5733 {
5734 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5735 if (GET_CODE (temp) == MEM)
5736 temp = copy_to_reg (temp);
5737 return const0_rtx;
5738 }
5739
5740 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5741 || code == INDIRECT_REF || code == BUFFER_REF)
5742 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5743 VOIDmode, ro_modifier);
5744 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5745 || code == ARRAY_REF)
5746 {
5747 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5748 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5749 return const0_rtx;
5750 }
5751 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5752 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5753 /* If the second operand has no side effects, just evaluate
5754 the first. */
5755 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5756 VOIDmode, ro_modifier);
5757 else if (code == BIT_FIELD_REF)
5758 {
5759 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5760 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5761 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5762 return const0_rtx;
5763 }
5764 ;
5765 target = 0;
5766 }
5767
5768 #ifdef MAX_INTEGER_COMPUTATION_MODE
5769 /* Only check stuff here if the mode we want is different from the mode
5770 of the expression; if it's the same, check_max_integer_computiation_mode
5771 will handle it. Do we really need to check this stuff at all? */
5772
5773 if (target
5774 && GET_MODE (target) != mode
5775 && TREE_CODE (exp) != INTEGER_CST
5776 && TREE_CODE (exp) != PARM_DECL
5777 && TREE_CODE (exp) != ARRAY_REF
5778 && TREE_CODE (exp) != COMPONENT_REF
5779 && TREE_CODE (exp) != BIT_FIELD_REF
5780 && TREE_CODE (exp) != INDIRECT_REF
5781 && TREE_CODE (exp) != CALL_EXPR
5782 && TREE_CODE (exp) != VAR_DECL
5783 && TREE_CODE (exp) != RTL_EXPR)
5784 {
5785 enum machine_mode mode = GET_MODE (target);
5786
5787 if (GET_MODE_CLASS (mode) == MODE_INT
5788 && mode > MAX_INTEGER_COMPUTATION_MODE)
5789 fatal ("unsupported wide integer operation");
5790 }
5791
5792 if (tmode != mode
5793 && TREE_CODE (exp) != INTEGER_CST
5794 && TREE_CODE (exp) != PARM_DECL
5795 && TREE_CODE (exp) != ARRAY_REF
5796 && TREE_CODE (exp) != COMPONENT_REF
5797 && TREE_CODE (exp) != BIT_FIELD_REF
5798 && TREE_CODE (exp) != INDIRECT_REF
5799 && TREE_CODE (exp) != VAR_DECL
5800 && TREE_CODE (exp) != CALL_EXPR
5801 && TREE_CODE (exp) != RTL_EXPR
5802 && GET_MODE_CLASS (tmode) == MODE_INT
5803 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5804 fatal ("unsupported wide integer operation");
5805
5806 check_max_integer_computation_mode (exp);
5807 #endif
5808
5809 /* If will do cse, generate all results into pseudo registers
5810 since 1) that allows cse to find more things
5811 and 2) otherwise cse could produce an insn the machine
5812 cannot support. */
5813
5814 if (! cse_not_expected && mode != BLKmode && target
5815 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5816 target = subtarget;
5817
5818 switch (code)
5819 {
5820 case LABEL_DECL:
5821 {
5822 tree function = decl_function_context (exp);
5823 /* Handle using a label in a containing function. */
5824 if (function != current_function_decl
5825 && function != inline_function_decl && function != 0)
5826 {
5827 struct function *p = find_function_data (function);
5828 /* Allocate in the memory associated with the function
5829 that the label is in. */
5830 push_obstacks (p->function_obstack,
5831 p->function_maybepermanent_obstack);
5832
5833 p->expr->x_forced_labels
5834 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5835 p->expr->x_forced_labels);
5836 pop_obstacks ();
5837 }
5838 else
5839 {
5840 if (modifier == EXPAND_INITIALIZER)
5841 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5842 label_rtx (exp),
5843 forced_labels);
5844 }
5845
5846 temp = gen_rtx_MEM (FUNCTION_MODE,
5847 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5848 if (function != current_function_decl
5849 && function != inline_function_decl && function != 0)
5850 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5851 return temp;
5852 }
5853
5854 case PARM_DECL:
5855 if (DECL_RTL (exp) == 0)
5856 {
5857 error_with_decl (exp, "prior parameter's size depends on `%s'");
5858 return CONST0_RTX (mode);
5859 }
5860
5861 /* ... fall through ... */
5862
5863 case VAR_DECL:
5864 /* If a static var's type was incomplete when the decl was written,
5865 but the type is complete now, lay out the decl now. */
5866 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5867 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5868 {
5869 push_obstacks_nochange ();
5870 end_temporary_allocation ();
5871 layout_decl (exp, 0);
5872 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5873 pop_obstacks ();
5874 }
5875
5876 /* Although static-storage variables start off initialized, according to
5877 ANSI C, a memcpy could overwrite them with uninitialized values. So
5878 we check them too. This also lets us check for read-only variables
5879 accessed via a non-const declaration, in case it won't be detected
5880 any other way (e.g., in an embedded system or OS kernel without
5881 memory protection).
5882
5883 Aggregates are not checked here; they're handled elsewhere. */
5884 if (cfun && current_function_check_memory_usage
5885 && code == VAR_DECL
5886 && GET_CODE (DECL_RTL (exp)) == MEM
5887 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5888 {
5889 enum memory_use_mode memory_usage;
5890 memory_usage = get_memory_usage_from_modifier (modifier);
5891
5892 if (memory_usage != MEMORY_USE_DONT)
5893 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5894 XEXP (DECL_RTL (exp), 0), Pmode,
5895 GEN_INT (int_size_in_bytes (type)),
5896 TYPE_MODE (sizetype),
5897 GEN_INT (memory_usage),
5898 TYPE_MODE (integer_type_node));
5899 }
5900
5901 /* ... fall through ... */
5902
5903 case FUNCTION_DECL:
5904 case RESULT_DECL:
5905 if (DECL_RTL (exp) == 0)
5906 abort ();
5907
5908 /* Ensure variable marked as used even if it doesn't go through
5909 a parser. If it hasn't be used yet, write out an external
5910 definition. */
5911 if (! TREE_USED (exp))
5912 {
5913 assemble_external (exp);
5914 TREE_USED (exp) = 1;
5915 }
5916
5917 /* Show we haven't gotten RTL for this yet. */
5918 temp = 0;
5919
5920 /* Handle variables inherited from containing functions. */
5921 context = decl_function_context (exp);
5922
5923 /* We treat inline_function_decl as an alias for the current function
5924 because that is the inline function whose vars, types, etc.
5925 are being merged into the current function.
5926 See expand_inline_function. */
5927
5928 if (context != 0 && context != current_function_decl
5929 && context != inline_function_decl
5930 /* If var is static, we don't need a static chain to access it. */
5931 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5932 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5933 {
5934 rtx addr;
5935
5936 /* Mark as non-local and addressable. */
5937 DECL_NONLOCAL (exp) = 1;
5938 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5939 abort ();
5940 mark_addressable (exp);
5941 if (GET_CODE (DECL_RTL (exp)) != MEM)
5942 abort ();
5943 addr = XEXP (DECL_RTL (exp), 0);
5944 if (GET_CODE (addr) == MEM)
5945 addr = gen_rtx_MEM (Pmode,
5946 fix_lexical_addr (XEXP (addr, 0), exp));
5947 else
5948 addr = fix_lexical_addr (addr, exp);
5949 temp = change_address (DECL_RTL (exp), mode, addr);
5950 }
5951
5952 /* This is the case of an array whose size is to be determined
5953 from its initializer, while the initializer is still being parsed.
5954 See expand_decl. */
5955
5956 else if (GET_CODE (DECL_RTL (exp)) == MEM
5957 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5958 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5959 XEXP (DECL_RTL (exp), 0));
5960
5961 /* If DECL_RTL is memory, we are in the normal case and either
5962 the address is not valid or it is not a register and -fforce-addr
5963 is specified, get the address into a register. */
5964
5965 else if (GET_CODE (DECL_RTL (exp)) == MEM
5966 && modifier != EXPAND_CONST_ADDRESS
5967 && modifier != EXPAND_SUM
5968 && modifier != EXPAND_INITIALIZER
5969 && (! memory_address_p (DECL_MODE (exp),
5970 XEXP (DECL_RTL (exp), 0))
5971 || (flag_force_addr
5972 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5973 temp = change_address (DECL_RTL (exp), VOIDmode,
5974 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5975
5976 /* If we got something, return it. But first, set the alignment
5977 the address is a register. */
5978 if (temp != 0)
5979 {
5980 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5981 mark_reg_pointer (XEXP (temp, 0),
5982 DECL_ALIGN (exp) / BITS_PER_UNIT);
5983
5984 return temp;
5985 }
5986
5987 /* If the mode of DECL_RTL does not match that of the decl, it
5988 must be a promoted value. We return a SUBREG of the wanted mode,
5989 but mark it so that we know that it was already extended. */
5990
5991 if (GET_CODE (DECL_RTL (exp)) == REG
5992 && GET_MODE (DECL_RTL (exp)) != mode)
5993 {
5994 /* Get the signedness used for this variable. Ensure we get the
5995 same mode we got when the variable was declared. */
5996 if (GET_MODE (DECL_RTL (exp))
5997 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5998 abort ();
5999
6000 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6001 SUBREG_PROMOTED_VAR_P (temp) = 1;
6002 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6003 return temp;
6004 }
6005
6006 return DECL_RTL (exp);
6007
6008 case INTEGER_CST:
6009 return immed_double_const (TREE_INT_CST_LOW (exp),
6010 TREE_INT_CST_HIGH (exp),
6011 mode);
6012
6013 case CONST_DECL:
6014 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6015 EXPAND_MEMORY_USE_BAD);
6016
6017 case REAL_CST:
6018 /* If optimized, generate immediate CONST_DOUBLE
6019 which will be turned into memory by reload if necessary.
6020
6021 We used to force a register so that loop.c could see it. But
6022 this does not allow gen_* patterns to perform optimizations with
6023 the constants. It also produces two insns in cases like "x = 1.0;".
6024 On most machines, floating-point constants are not permitted in
6025 many insns, so we'd end up copying it to a register in any case.
6026
6027 Now, we do the copying in expand_binop, if appropriate. */
6028 return immed_real_const (exp);
6029
6030 case COMPLEX_CST:
6031 case STRING_CST:
6032 if (! TREE_CST_RTL (exp))
6033 output_constant_def (exp);
6034
6035 /* TREE_CST_RTL probably contains a constant address.
6036 On RISC machines where a constant address isn't valid,
6037 make some insns to get that address into a register. */
6038 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6039 && modifier != EXPAND_CONST_ADDRESS
6040 && modifier != EXPAND_INITIALIZER
6041 && modifier != EXPAND_SUM
6042 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6043 || (flag_force_addr
6044 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6045 return change_address (TREE_CST_RTL (exp), VOIDmode,
6046 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6047 return TREE_CST_RTL (exp);
6048
6049 case EXPR_WITH_FILE_LOCATION:
6050 {
6051 rtx to_return;
6052 char *saved_input_filename = input_filename;
6053 int saved_lineno = lineno;
6054 input_filename = EXPR_WFL_FILENAME (exp);
6055 lineno = EXPR_WFL_LINENO (exp);
6056 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6057 emit_line_note (input_filename, lineno);
6058 /* Possibly avoid switching back and force here */
6059 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6060 input_filename = saved_input_filename;
6061 lineno = saved_lineno;
6062 return to_return;
6063 }
6064
6065 case SAVE_EXPR:
6066 context = decl_function_context (exp);
6067
6068 /* If this SAVE_EXPR was at global context, assume we are an
6069 initialization function and move it into our context. */
6070 if (context == 0)
6071 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6072
6073 /* We treat inline_function_decl as an alias for the current function
6074 because that is the inline function whose vars, types, etc.
6075 are being merged into the current function.
6076 See expand_inline_function. */
6077 if (context == current_function_decl || context == inline_function_decl)
6078 context = 0;
6079
6080 /* If this is non-local, handle it. */
6081 if (context)
6082 {
6083 /* The following call just exists to abort if the context is
6084 not of a containing function. */
6085 find_function_data (context);
6086
6087 temp = SAVE_EXPR_RTL (exp);
6088 if (temp && GET_CODE (temp) == REG)
6089 {
6090 put_var_into_stack (exp);
6091 temp = SAVE_EXPR_RTL (exp);
6092 }
6093 if (temp == 0 || GET_CODE (temp) != MEM)
6094 abort ();
6095 return change_address (temp, mode,
6096 fix_lexical_addr (XEXP (temp, 0), exp));
6097 }
6098 if (SAVE_EXPR_RTL (exp) == 0)
6099 {
6100 if (mode == VOIDmode)
6101 temp = const0_rtx;
6102 else
6103 temp = assign_temp (type, 3, 0, 0);
6104
6105 SAVE_EXPR_RTL (exp) = temp;
6106 if (!optimize && GET_CODE (temp) == REG)
6107 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6108 save_expr_regs);
6109
6110 /* If the mode of TEMP does not match that of the expression, it
6111 must be a promoted value. We pass store_expr a SUBREG of the
6112 wanted mode but mark it so that we know that it was already
6113 extended. Note that `unsignedp' was modified above in
6114 this case. */
6115
6116 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6117 {
6118 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6119 SUBREG_PROMOTED_VAR_P (temp) = 1;
6120 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6121 }
6122
6123 if (temp == const0_rtx)
6124 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6125 EXPAND_MEMORY_USE_BAD);
6126 else
6127 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6128
6129 TREE_USED (exp) = 1;
6130 }
6131
6132 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6133 must be a promoted value. We return a SUBREG of the wanted mode,
6134 but mark it so that we know that it was already extended. */
6135
6136 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6137 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6138 {
6139 /* Compute the signedness and make the proper SUBREG. */
6140 promote_mode (type, mode, &unsignedp, 0);
6141 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6142 SUBREG_PROMOTED_VAR_P (temp) = 1;
6143 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6144 return temp;
6145 }
6146
6147 return SAVE_EXPR_RTL (exp);
6148
6149 case UNSAVE_EXPR:
6150 {
6151 rtx temp;
6152 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6153 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6154 return temp;
6155 }
6156
6157 case PLACEHOLDER_EXPR:
6158 {
6159 tree placeholder_expr;
6160
6161 /* If there is an object on the head of the placeholder list,
6162 see if some object in it of type TYPE or a pointer to it. For
6163 further information, see tree.def. */
6164 for (placeholder_expr = placeholder_list;
6165 placeholder_expr != 0;
6166 placeholder_expr = TREE_CHAIN (placeholder_expr))
6167 {
6168 tree need_type = TYPE_MAIN_VARIANT (type);
6169 tree object = 0;
6170 tree old_list = placeholder_list;
6171 tree elt;
6172
6173 /* Find the outermost reference that is of the type we want.
6174 If none, see if any object has a type that is a pointer to
6175 the type we want. */
6176 for (elt = TREE_PURPOSE (placeholder_expr);
6177 elt != 0 && object == 0;
6178 elt
6179 = ((TREE_CODE (elt) == COMPOUND_EXPR
6180 || TREE_CODE (elt) == COND_EXPR)
6181 ? TREE_OPERAND (elt, 1)
6182 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6183 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6184 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6186 ? TREE_OPERAND (elt, 0) : 0))
6187 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6188 object = elt;
6189
6190 for (elt = TREE_PURPOSE (placeholder_expr);
6191 elt != 0 && object == 0;
6192 elt
6193 = ((TREE_CODE (elt) == COMPOUND_EXPR
6194 || TREE_CODE (elt) == COND_EXPR)
6195 ? TREE_OPERAND (elt, 1)
6196 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6197 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6198 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6200 ? TREE_OPERAND (elt, 0) : 0))
6201 if (POINTER_TYPE_P (TREE_TYPE (elt))
6202 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6203 == need_type))
6204 object = build1 (INDIRECT_REF, need_type, elt);
6205
6206 if (object != 0)
6207 {
6208 /* Expand this object skipping the list entries before
6209 it was found in case it is also a PLACEHOLDER_EXPR.
6210 In that case, we want to translate it using subsequent
6211 entries. */
6212 placeholder_list = TREE_CHAIN (placeholder_expr);
6213 temp = expand_expr (object, original_target, tmode,
6214 ro_modifier);
6215 placeholder_list = old_list;
6216 return temp;
6217 }
6218 }
6219 }
6220
6221 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6222 abort ();
6223
6224 case WITH_RECORD_EXPR:
6225 /* Put the object on the placeholder list, expand our first operand,
6226 and pop the list. */
6227 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6228 placeholder_list);
6229 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6230 tmode, ro_modifier);
6231 placeholder_list = TREE_CHAIN (placeholder_list);
6232 return target;
6233
6234 case GOTO_EXPR:
6235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6236 expand_goto (TREE_OPERAND (exp, 0));
6237 else
6238 expand_computed_goto (TREE_OPERAND (exp, 0));
6239 return const0_rtx;
6240
6241 case EXIT_EXPR:
6242 expand_exit_loop_if_false (NULL_PTR,
6243 invert_truthvalue (TREE_OPERAND (exp, 0)));
6244 return const0_rtx;
6245
6246 case LABELED_BLOCK_EXPR:
6247 if (LABELED_BLOCK_BODY (exp))
6248 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6249 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6250 return const0_rtx;
6251
6252 case EXIT_BLOCK_EXPR:
6253 if (EXIT_BLOCK_RETURN (exp))
6254 sorry ("returned value in block_exit_expr");
6255 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6256 return const0_rtx;
6257
6258 case LOOP_EXPR:
6259 push_temp_slots ();
6260 expand_start_loop (1);
6261 expand_expr_stmt (TREE_OPERAND (exp, 0));
6262 expand_end_loop ();
6263 pop_temp_slots ();
6264
6265 return const0_rtx;
6266
6267 case BIND_EXPR:
6268 {
6269 tree vars = TREE_OPERAND (exp, 0);
6270 int vars_need_expansion = 0;
6271
6272 /* Need to open a binding contour here because
6273 if there are any cleanups they must be contained here. */
6274 expand_start_bindings (2);
6275
6276 /* Mark the corresponding BLOCK for output in its proper place. */
6277 if (TREE_OPERAND (exp, 2) != 0
6278 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6279 insert_block (TREE_OPERAND (exp, 2));
6280
6281 /* If VARS have not yet been expanded, expand them now. */
6282 while (vars)
6283 {
6284 if (DECL_RTL (vars) == 0)
6285 {
6286 vars_need_expansion = 1;
6287 expand_decl (vars);
6288 }
6289 expand_decl_init (vars);
6290 vars = TREE_CHAIN (vars);
6291 }
6292
6293 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6294
6295 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6296
6297 return temp;
6298 }
6299
6300 case RTL_EXPR:
6301 if (RTL_EXPR_SEQUENCE (exp))
6302 {
6303 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6304 abort ();
6305 emit_insns (RTL_EXPR_SEQUENCE (exp));
6306 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6307 }
6308 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6309 free_temps_for_rtl_expr (exp);
6310 return RTL_EXPR_RTL (exp);
6311
6312 case CONSTRUCTOR:
6313 /* If we don't need the result, just ensure we evaluate any
6314 subexpressions. */
6315 if (ignore)
6316 {
6317 tree elt;
6318 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6319 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6320 EXPAND_MEMORY_USE_BAD);
6321 return const0_rtx;
6322 }
6323
6324 /* All elts simple constants => refer to a constant in memory. But
6325 if this is a non-BLKmode mode, let it store a field at a time
6326 since that should make a CONST_INT or CONST_DOUBLE when we
6327 fold. Likewise, if we have a target we can use, it is best to
6328 store directly into the target unless the type is large enough
6329 that memcpy will be used. If we are making an initializer and
6330 all operands are constant, put it in memory as well. */
6331 else if ((TREE_STATIC (exp)
6332 && ((mode == BLKmode
6333 && ! (target != 0 && safe_from_p (target, exp, 1)))
6334 || TREE_ADDRESSABLE (exp)
6335 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6336 && (!MOVE_BY_PIECES_P
6337 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6338 TYPE_ALIGN (type) / BITS_PER_UNIT))
6339 && ! mostly_zeros_p (exp))))
6340 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6341 {
6342 rtx constructor = output_constant_def (exp);
6343 if (modifier != EXPAND_CONST_ADDRESS
6344 && modifier != EXPAND_INITIALIZER
6345 && modifier != EXPAND_SUM
6346 && (! memory_address_p (GET_MODE (constructor),
6347 XEXP (constructor, 0))
6348 || (flag_force_addr
6349 && GET_CODE (XEXP (constructor, 0)) != REG)))
6350 constructor = change_address (constructor, VOIDmode,
6351 XEXP (constructor, 0));
6352 return constructor;
6353 }
6354
6355 else
6356 {
6357 /* Handle calls that pass values in multiple non-contiguous
6358 locations. The Irix 6 ABI has examples of this. */
6359 if (target == 0 || ! safe_from_p (target, exp, 1)
6360 || GET_CODE (target) == PARALLEL)
6361 {
6362 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6363 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6364 else
6365 target = assign_temp (type, 0, 1, 1);
6366 }
6367
6368 if (TREE_READONLY (exp))
6369 {
6370 if (GET_CODE (target) == MEM)
6371 target = copy_rtx (target);
6372
6373 RTX_UNCHANGING_P (target) = 1;
6374 }
6375
6376 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6377 int_size_in_bytes (TREE_TYPE (exp)));
6378 return target;
6379 }
6380
6381 case INDIRECT_REF:
6382 {
6383 tree exp1 = TREE_OPERAND (exp, 0);
6384 tree exp2;
6385 tree index;
6386 tree string = string_constant (exp1, &index);
6387 int i;
6388
6389 /* Try to optimize reads from const strings. */
6390 if (string
6391 && TREE_CODE (string) == STRING_CST
6392 && TREE_CODE (index) == INTEGER_CST
6393 && !TREE_INT_CST_HIGH (index)
6394 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6395 && GET_MODE_CLASS (mode) == MODE_INT
6396 && GET_MODE_SIZE (mode) == 1
6397 && modifier != EXPAND_MEMORY_USE_WO)
6398 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6399
6400 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6401 op0 = memory_address (mode, op0);
6402
6403 if (cfun && current_function_check_memory_usage
6404 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6405 {
6406 enum memory_use_mode memory_usage;
6407 memory_usage = get_memory_usage_from_modifier (modifier);
6408
6409 if (memory_usage != MEMORY_USE_DONT)
6410 {
6411 in_check_memory_usage = 1;
6412 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6413 op0, Pmode,
6414 GEN_INT (int_size_in_bytes (type)),
6415 TYPE_MODE (sizetype),
6416 GEN_INT (memory_usage),
6417 TYPE_MODE (integer_type_node));
6418 in_check_memory_usage = 0;
6419 }
6420 }
6421
6422 temp = gen_rtx_MEM (mode, op0);
6423 /* If address was computed by addition,
6424 mark this as an element of an aggregate. */
6425 if (TREE_CODE (exp1) == PLUS_EXPR
6426 || (TREE_CODE (exp1) == SAVE_EXPR
6427 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6428 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6429 || (TREE_CODE (exp1) == ADDR_EXPR
6430 && (exp2 = TREE_OPERAND (exp1, 0))
6431 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6432 MEM_SET_IN_STRUCT_P (temp, 1);
6433
6434 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6435 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6436
6437 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6438 here, because, in C and C++, the fact that a location is accessed
6439 through a pointer to const does not mean that the value there can
6440 never change. Languages where it can never change should
6441 also set TREE_STATIC. */
6442 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6443
6444 /* If we are writing to this object and its type is a record with
6445 readonly fields, we must mark it as readonly so it will
6446 conflict with readonly references to those fields. */
6447 if (modifier == EXPAND_MEMORY_USE_WO
6448 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6449 RTX_UNCHANGING_P (temp) = 1;
6450
6451 return temp;
6452 }
6453
6454 case ARRAY_REF:
6455 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6456 abort ();
6457
6458 {
6459 tree array = TREE_OPERAND (exp, 0);
6460 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6461 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6462 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6463 HOST_WIDE_INT i;
6464
6465 /* Optimize the special-case of a zero lower bound.
6466
6467 We convert the low_bound to sizetype to avoid some problems
6468 with constant folding. (E.g. suppose the lower bound is 1,
6469 and its mode is QI. Without the conversion, (ARRAY
6470 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6471 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6472
6473 if (! integer_zerop (low_bound))
6474 index = size_diffop (index, convert (sizetype, low_bound));
6475
6476 /* Fold an expression like: "foo"[2].
6477 This is not done in fold so it won't happen inside &.
6478 Don't fold if this is for wide characters since it's too
6479 difficult to do correctly and this is a very rare case. */
6480
6481 if (TREE_CODE (array) == STRING_CST
6482 && TREE_CODE (index) == INTEGER_CST
6483 && !TREE_INT_CST_HIGH (index)
6484 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6485 && GET_MODE_CLASS (mode) == MODE_INT
6486 && GET_MODE_SIZE (mode) == 1)
6487 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6488
6489 /* If this is a constant index into a constant array,
6490 just get the value from the array. Handle both the cases when
6491 we have an explicit constructor and when our operand is a variable
6492 that was declared const. */
6493
6494 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6495 {
6496 if (TREE_CODE (index) == INTEGER_CST
6497 && TREE_INT_CST_HIGH (index) == 0)
6498 {
6499 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6500
6501 i = TREE_INT_CST_LOW (index);
6502 while (elem && i--)
6503 elem = TREE_CHAIN (elem);
6504 if (elem)
6505 return expand_expr (fold (TREE_VALUE (elem)), target,
6506 tmode, ro_modifier);
6507 }
6508 }
6509
6510 else if (optimize >= 1
6511 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6512 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6513 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6514 {
6515 if (TREE_CODE (index) == INTEGER_CST)
6516 {
6517 tree init = DECL_INITIAL (array);
6518
6519 i = TREE_INT_CST_LOW (index);
6520 if (TREE_CODE (init) == CONSTRUCTOR)
6521 {
6522 tree elem = CONSTRUCTOR_ELTS (init);
6523
6524 while (elem
6525 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6526 elem = TREE_CHAIN (elem);
6527 if (elem)
6528 return expand_expr (fold (TREE_VALUE (elem)), target,
6529 tmode, ro_modifier);
6530 }
6531 else if (TREE_CODE (init) == STRING_CST
6532 && TREE_INT_CST_HIGH (index) == 0
6533 && (TREE_INT_CST_LOW (index)
6534 < TREE_STRING_LENGTH (init)))
6535 return (GEN_INT
6536 (TREE_STRING_POINTER
6537 (init)[TREE_INT_CST_LOW (index)]));
6538 }
6539 }
6540 }
6541
6542 /* ... fall through ... */
6543
6544 case COMPONENT_REF:
6545 case BIT_FIELD_REF:
6546 /* If the operand is a CONSTRUCTOR, we can just extract the
6547 appropriate field if it is present. Don't do this if we have
6548 already written the data since we want to refer to that copy
6549 and varasm.c assumes that's what we'll do. */
6550 if (code != ARRAY_REF
6551 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6552 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6553 {
6554 tree elt;
6555
6556 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6557 elt = TREE_CHAIN (elt))
6558 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6559 /* We can normally use the value of the field in the
6560 CONSTRUCTOR. However, if this is a bitfield in
6561 an integral mode that we can fit in a HOST_WIDE_INT,
6562 we must mask only the number of bits in the bitfield,
6563 since this is done implicitly by the constructor. If
6564 the bitfield does not meet either of those conditions,
6565 we can't do this optimization. */
6566 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6567 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6568 == MODE_INT)
6569 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6570 <= HOST_BITS_PER_WIDE_INT))))
6571 {
6572 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6573 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6574 {
6575 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6576
6577 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6578 {
6579 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6580 op0 = expand_and (op0, op1, target);
6581 }
6582 else
6583 {
6584 enum machine_mode imode
6585 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6586 tree count
6587 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6588 0);
6589
6590 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6591 target, 0);
6592 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6593 target, 0);
6594 }
6595 }
6596
6597 return op0;
6598 }
6599 }
6600
6601 {
6602 enum machine_mode mode1;
6603 int bitsize;
6604 int bitpos;
6605 tree offset;
6606 int volatilep = 0;
6607 unsigned int alignment;
6608 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6609 &mode1, &unsignedp, &volatilep,
6610 &alignment);
6611
6612 /* If we got back the original object, something is wrong. Perhaps
6613 we are evaluating an expression too early. In any event, don't
6614 infinitely recurse. */
6615 if (tem == exp)
6616 abort ();
6617
6618 /* If TEM's type is a union of variable size, pass TARGET to the inner
6619 computation, since it will need a temporary and TARGET is known
6620 to have to do. This occurs in unchecked conversion in Ada. */
6621
6622 op0 = expand_expr (tem,
6623 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6624 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6625 != INTEGER_CST)
6626 ? target : NULL_RTX),
6627 VOIDmode,
6628 (modifier == EXPAND_INITIALIZER
6629 || modifier == EXPAND_CONST_ADDRESS)
6630 ? modifier : EXPAND_NORMAL);
6631
6632 /* If this is a constant, put it into a register if it is a
6633 legitimate constant and OFFSET is 0 and memory if it isn't. */
6634 if (CONSTANT_P (op0))
6635 {
6636 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6637 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6638 && offset == 0)
6639 op0 = force_reg (mode, op0);
6640 else
6641 op0 = validize_mem (force_const_mem (mode, op0));
6642 }
6643
6644 if (offset != 0)
6645 {
6646 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6647
6648 /* If this object is in memory, put it into a register.
6649 This case can't occur in C, but can in Ada if we have
6650 unchecked conversion of an expression from a scalar type to
6651 an array or record type. */
6652 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6653 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6654 {
6655 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6656
6657 mark_temp_addr_taken (memloc);
6658 emit_move_insn (memloc, op0);
6659 op0 = memloc;
6660 }
6661
6662 if (GET_CODE (op0) != MEM)
6663 abort ();
6664
6665 if (GET_MODE (offset_rtx) != ptr_mode)
6666 {
6667 #ifdef POINTERS_EXTEND_UNSIGNED
6668 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6669 #else
6670 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6671 #endif
6672 }
6673
6674 /* A constant address in OP0 can have VOIDmode, we must not try
6675 to call force_reg for that case. Avoid that case. */
6676 if (GET_CODE (op0) == MEM
6677 && GET_MODE (op0) == BLKmode
6678 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6679 && bitsize != 0
6680 && (bitpos % bitsize) == 0
6681 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6682 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6683 {
6684 rtx temp = change_address (op0, mode1,
6685 plus_constant (XEXP (op0, 0),
6686 (bitpos /
6687 BITS_PER_UNIT)));
6688 if (GET_CODE (XEXP (temp, 0)) == REG)
6689 op0 = temp;
6690 else
6691 op0 = change_address (op0, mode1,
6692 force_reg (GET_MODE (XEXP (temp, 0)),
6693 XEXP (temp, 0)));
6694 bitpos = 0;
6695 }
6696
6697
6698 op0 = change_address (op0, VOIDmode,
6699 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6700 force_reg (ptr_mode,
6701 offset_rtx)));
6702 }
6703
6704 /* Don't forget about volatility even if this is a bitfield. */
6705 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6706 {
6707 op0 = copy_rtx (op0);
6708 MEM_VOLATILE_P (op0) = 1;
6709 }
6710
6711 /* Check the access. */
6712 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6713 {
6714 enum memory_use_mode memory_usage;
6715 memory_usage = get_memory_usage_from_modifier (modifier);
6716
6717 if (memory_usage != MEMORY_USE_DONT)
6718 {
6719 rtx to;
6720 int size;
6721
6722 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6723 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6724
6725 /* Check the access right of the pointer. */
6726 if (size > BITS_PER_UNIT)
6727 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6728 to, Pmode,
6729 GEN_INT (size / BITS_PER_UNIT),
6730 TYPE_MODE (sizetype),
6731 GEN_INT (memory_usage),
6732 TYPE_MODE (integer_type_node));
6733 }
6734 }
6735
6736 /* In cases where an aligned union has an unaligned object
6737 as a field, we might be extracting a BLKmode value from
6738 an integer-mode (e.g., SImode) object. Handle this case
6739 by doing the extract into an object as wide as the field
6740 (which we know to be the width of a basic mode), then
6741 storing into memory, and changing the mode to BLKmode.
6742 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6743 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6744 if (mode1 == VOIDmode
6745 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6746 || (modifier != EXPAND_CONST_ADDRESS
6747 && modifier != EXPAND_INITIALIZER
6748 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6749 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6750 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6751 /* If the field isn't aligned enough to fetch as a memref,
6752 fetch it as a bit field. */
6753 || (mode1 != BLKmode
6754 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6755 && ((TYPE_ALIGN (TREE_TYPE (tem))
6756 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6757 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6758 || (modifier != EXPAND_CONST_ADDRESS
6759 && modifier != EXPAND_INITIALIZER
6760 && mode == BLKmode
6761 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6762 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6763 || bitpos % TYPE_ALIGN (type) != 0)))
6764 {
6765 enum machine_mode ext_mode = mode;
6766
6767 if (ext_mode == BLKmode
6768 && ! (target != 0 && GET_CODE (op0) == MEM
6769 && GET_CODE (target) == MEM
6770 && bitpos % BITS_PER_UNIT == 0))
6771 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6772
6773 if (ext_mode == BLKmode)
6774 {
6775 /* In this case, BITPOS must start at a byte boundary and
6776 TARGET, if specified, must be a MEM. */
6777 if (GET_CODE (op0) != MEM
6778 || (target != 0 && GET_CODE (target) != MEM)
6779 || bitpos % BITS_PER_UNIT != 0)
6780 abort ();
6781
6782 op0 = change_address (op0, VOIDmode,
6783 plus_constant (XEXP (op0, 0),
6784 bitpos / BITS_PER_UNIT));
6785 if (target == 0)
6786 target = assign_temp (type, 0, 1, 1);
6787
6788 emit_block_move (target, op0,
6789 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6790 / BITS_PER_UNIT),
6791 1);
6792
6793 return target;
6794 }
6795
6796 op0 = validize_mem (op0);
6797
6798 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6799 mark_reg_pointer (XEXP (op0, 0), alignment);
6800
6801 op0 = extract_bit_field (op0, bitsize, bitpos,
6802 unsignedp, target, ext_mode, ext_mode,
6803 alignment,
6804 int_size_in_bytes (TREE_TYPE (tem)));
6805
6806 /* If the result is a record type and BITSIZE is narrower than
6807 the mode of OP0, an integral mode, and this is a big endian
6808 machine, we must put the field into the high-order bits. */
6809 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6810 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6811 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6812 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6813 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6814 - bitsize),
6815 op0, 1);
6816
6817 if (mode == BLKmode)
6818 {
6819 rtx new = assign_stack_temp (ext_mode,
6820 bitsize / BITS_PER_UNIT, 0);
6821
6822 emit_move_insn (new, op0);
6823 op0 = copy_rtx (new);
6824 PUT_MODE (op0, BLKmode);
6825 MEM_SET_IN_STRUCT_P (op0, 1);
6826 }
6827
6828 return op0;
6829 }
6830
6831 /* If the result is BLKmode, use that to access the object
6832 now as well. */
6833 if (mode == BLKmode)
6834 mode1 = BLKmode;
6835
6836 /* Get a reference to just this component. */
6837 if (modifier == EXPAND_CONST_ADDRESS
6838 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6839 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6840 (bitpos / BITS_PER_UNIT)));
6841 else
6842 op0 = change_address (op0, mode1,
6843 plus_constant (XEXP (op0, 0),
6844 (bitpos / BITS_PER_UNIT)));
6845
6846 if (GET_CODE (op0) == MEM)
6847 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6848
6849 if (GET_CODE (XEXP (op0, 0)) == REG)
6850 mark_reg_pointer (XEXP (op0, 0), alignment);
6851
6852 MEM_SET_IN_STRUCT_P (op0, 1);
6853 MEM_VOLATILE_P (op0) |= volatilep;
6854 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6855 || modifier == EXPAND_CONST_ADDRESS
6856 || modifier == EXPAND_INITIALIZER)
6857 return op0;
6858 else if (target == 0)
6859 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6860
6861 convert_move (target, op0, unsignedp);
6862 return target;
6863 }
6864
6865 /* Intended for a reference to a buffer of a file-object in Pascal.
6866 But it's not certain that a special tree code will really be
6867 necessary for these. INDIRECT_REF might work for them. */
6868 case BUFFER_REF:
6869 abort ();
6870
6871 case IN_EXPR:
6872 {
6873 /* Pascal set IN expression.
6874
6875 Algorithm:
6876 rlo = set_low - (set_low%bits_per_word);
6877 the_word = set [ (index - rlo)/bits_per_word ];
6878 bit_index = index % bits_per_word;
6879 bitmask = 1 << bit_index;
6880 return !!(the_word & bitmask); */
6881
6882 tree set = TREE_OPERAND (exp, 0);
6883 tree index = TREE_OPERAND (exp, 1);
6884 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6885 tree set_type = TREE_TYPE (set);
6886 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6887 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6888 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6889 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6890 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6891 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6892 rtx setaddr = XEXP (setval, 0);
6893 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6894 rtx rlow;
6895 rtx diff, quo, rem, addr, bit, result;
6896
6897 preexpand_calls (exp);
6898
6899 /* If domain is empty, answer is no. Likewise if index is constant
6900 and out of bounds. */
6901 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6902 && TREE_CODE (set_low_bound) == INTEGER_CST
6903 && tree_int_cst_lt (set_high_bound, set_low_bound))
6904 || (TREE_CODE (index) == INTEGER_CST
6905 && TREE_CODE (set_low_bound) == INTEGER_CST
6906 && tree_int_cst_lt (index, set_low_bound))
6907 || (TREE_CODE (set_high_bound) == INTEGER_CST
6908 && TREE_CODE (index) == INTEGER_CST
6909 && tree_int_cst_lt (set_high_bound, index))))
6910 return const0_rtx;
6911
6912 if (target == 0)
6913 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6914
6915 /* If we get here, we have to generate the code for both cases
6916 (in range and out of range). */
6917
6918 op0 = gen_label_rtx ();
6919 op1 = gen_label_rtx ();
6920
6921 if (! (GET_CODE (index_val) == CONST_INT
6922 && GET_CODE (lo_r) == CONST_INT))
6923 {
6924 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6925 GET_MODE (index_val), iunsignedp, 0, op1);
6926 }
6927
6928 if (! (GET_CODE (index_val) == CONST_INT
6929 && GET_CODE (hi_r) == CONST_INT))
6930 {
6931 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6932 GET_MODE (index_val), iunsignedp, 0, op1);
6933 }
6934
6935 /* Calculate the element number of bit zero in the first word
6936 of the set. */
6937 if (GET_CODE (lo_r) == CONST_INT)
6938 rlow = GEN_INT (INTVAL (lo_r)
6939 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6940 else
6941 rlow = expand_binop (index_mode, and_optab, lo_r,
6942 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6943 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6944
6945 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6946 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6947
6948 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6949 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6950 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6951 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6952
6953 addr = memory_address (byte_mode,
6954 expand_binop (index_mode, add_optab, diff,
6955 setaddr, NULL_RTX, iunsignedp,
6956 OPTAB_LIB_WIDEN));
6957
6958 /* Extract the bit we want to examine */
6959 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6960 gen_rtx_MEM (byte_mode, addr),
6961 make_tree (TREE_TYPE (index), rem),
6962 NULL_RTX, 1);
6963 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6964 GET_MODE (target) == byte_mode ? target : 0,
6965 1, OPTAB_LIB_WIDEN);
6966
6967 if (result != target)
6968 convert_move (target, result, 1);
6969
6970 /* Output the code to handle the out-of-range case. */
6971 emit_jump (op0);
6972 emit_label (op1);
6973 emit_move_insn (target, const0_rtx);
6974 emit_label (op0);
6975 return target;
6976 }
6977
6978 case WITH_CLEANUP_EXPR:
6979 if (RTL_EXPR_RTL (exp) == 0)
6980 {
6981 RTL_EXPR_RTL (exp)
6982 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6983 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6984
6985 /* That's it for this cleanup. */
6986 TREE_OPERAND (exp, 2) = 0;
6987 }
6988 return RTL_EXPR_RTL (exp);
6989
6990 case CLEANUP_POINT_EXPR:
6991 {
6992 /* Start a new binding layer that will keep track of all cleanup
6993 actions to be performed. */
6994 expand_start_bindings (2);
6995
6996 target_temp_slot_level = temp_slot_level;
6997
6998 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6999 /* If we're going to use this value, load it up now. */
7000 if (! ignore)
7001 op0 = force_not_mem (op0);
7002 preserve_temp_slots (op0);
7003 expand_end_bindings (NULL_TREE, 0, 0);
7004 }
7005 return op0;
7006
7007 case CALL_EXPR:
7008 /* Check for a built-in function. */
7009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7010 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7011 == FUNCTION_DECL)
7012 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7013 return expand_builtin (exp, target, subtarget, tmode, ignore);
7014
7015 /* If this call was expanded already by preexpand_calls,
7016 just return the result we got. */
7017 if (CALL_EXPR_RTL (exp) != 0)
7018 return CALL_EXPR_RTL (exp);
7019
7020 return expand_call (exp, target, ignore);
7021
7022 case NON_LVALUE_EXPR:
7023 case NOP_EXPR:
7024 case CONVERT_EXPR:
7025 case REFERENCE_EXPR:
7026 if (TREE_CODE (type) == UNION_TYPE)
7027 {
7028 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7029
7030 /* If both input and output are BLKmode, this conversion
7031 isn't actually doing anything unless we need to make the
7032 alignment stricter. */
7033 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7034 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7035 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7036 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7037 modifier);
7038
7039 if (target == 0)
7040 {
7041 if (mode != BLKmode)
7042 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7043 else
7044 target = assign_temp (type, 0, 1, 1);
7045 }
7046
7047 if (GET_CODE (target) == MEM)
7048 /* Store data into beginning of memory target. */
7049 store_expr (TREE_OPERAND (exp, 0),
7050 change_address (target, TYPE_MODE (valtype), 0), 0);
7051
7052 else if (GET_CODE (target) == REG)
7053 /* Store this field into a union of the proper type. */
7054 store_field (target,
7055 MIN ((int_size_in_bytes (TREE_TYPE
7056 (TREE_OPERAND (exp, 0)))
7057 * BITS_PER_UNIT),
7058 GET_MODE_BITSIZE (mode)),
7059 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7060 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7061 else
7062 abort ();
7063
7064 /* Return the entire union. */
7065 return target;
7066 }
7067
7068 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7069 {
7070 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7071 ro_modifier);
7072
7073 /* If the signedness of the conversion differs and OP0 is
7074 a promoted SUBREG, clear that indication since we now
7075 have to do the proper extension. */
7076 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7077 && GET_CODE (op0) == SUBREG)
7078 SUBREG_PROMOTED_VAR_P (op0) = 0;
7079
7080 return op0;
7081 }
7082
7083 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7084 if (GET_MODE (op0) == mode)
7085 return op0;
7086
7087 /* If OP0 is a constant, just convert it into the proper mode. */
7088 if (CONSTANT_P (op0))
7089 return
7090 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7091 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7092
7093 if (modifier == EXPAND_INITIALIZER)
7094 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7095
7096 if (target == 0)
7097 return
7098 convert_to_mode (mode, op0,
7099 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7100 else
7101 convert_move (target, op0,
7102 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 return target;
7104
7105 case PLUS_EXPR:
7106 /* We come here from MINUS_EXPR when the second operand is a
7107 constant. */
7108 plus_expr:
7109 this_optab = add_optab;
7110
7111 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7112 something else, make sure we add the register to the constant and
7113 then to the other thing. This case can occur during strength
7114 reduction and doing it this way will produce better code if the
7115 frame pointer or argument pointer is eliminated.
7116
7117 fold-const.c will ensure that the constant is always in the inner
7118 PLUS_EXPR, so the only case we need to do anything about is if
7119 sp, ap, or fp is our second argument, in which case we must swap
7120 the innermost first argument and our second argument. */
7121
7122 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7123 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7124 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7125 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7126 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7127 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7128 {
7129 tree t = TREE_OPERAND (exp, 1);
7130
7131 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7132 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7133 }
7134
7135 /* If the result is to be ptr_mode and we are adding an integer to
7136 something, we might be forming a constant. So try to use
7137 plus_constant. If it produces a sum and we can't accept it,
7138 use force_operand. This allows P = &ARR[const] to generate
7139 efficient code on machines where a SYMBOL_REF is not a valid
7140 address.
7141
7142 If this is an EXPAND_SUM call, always return the sum. */
7143 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7144 || mode == ptr_mode)
7145 {
7146 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7147 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7148 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7149 {
7150 rtx constant_part;
7151
7152 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7153 EXPAND_SUM);
7154 /* Use immed_double_const to ensure that the constant is
7155 truncated according to the mode of OP1, then sign extended
7156 to a HOST_WIDE_INT. Using the constant directly can result
7157 in non-canonical RTL in a 64x32 cross compile. */
7158 constant_part
7159 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7160 (HOST_WIDE_INT) 0,
7161 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7162 op1 = plus_constant (op1, INTVAL (constant_part));
7163 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7164 op1 = force_operand (op1, target);
7165 return op1;
7166 }
7167
7168 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7169 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7170 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7171 {
7172 rtx constant_part;
7173
7174 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7175 EXPAND_SUM);
7176 if (! CONSTANT_P (op0))
7177 {
7178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7179 VOIDmode, modifier);
7180 /* Don't go to both_summands if modifier
7181 says it's not right to return a PLUS. */
7182 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7183 goto binop2;
7184 goto both_summands;
7185 }
7186 /* Use immed_double_const to ensure that the constant is
7187 truncated according to the mode of OP1, then sign extended
7188 to a HOST_WIDE_INT. Using the constant directly can result
7189 in non-canonical RTL in a 64x32 cross compile. */
7190 constant_part
7191 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7192 (HOST_WIDE_INT) 0,
7193 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7194 op0 = plus_constant (op0, INTVAL (constant_part));
7195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7196 op0 = force_operand (op0, target);
7197 return op0;
7198 }
7199 }
7200
7201 /* No sense saving up arithmetic to be done
7202 if it's all in the wrong mode to form part of an address.
7203 And force_operand won't know whether to sign-extend or
7204 zero-extend. */
7205 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7206 || mode != ptr_mode)
7207 goto binop;
7208
7209 preexpand_calls (exp);
7210 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7211 subtarget = 0;
7212
7213 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7214 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7215
7216 both_summands:
7217 /* Make sure any term that's a sum with a constant comes last. */
7218 if (GET_CODE (op0) == PLUS
7219 && CONSTANT_P (XEXP (op0, 1)))
7220 {
7221 temp = op0;
7222 op0 = op1;
7223 op1 = temp;
7224 }
7225 /* If adding to a sum including a constant,
7226 associate it to put the constant outside. */
7227 if (GET_CODE (op1) == PLUS
7228 && CONSTANT_P (XEXP (op1, 1)))
7229 {
7230 rtx constant_term = const0_rtx;
7231
7232 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7233 if (temp != 0)
7234 op0 = temp;
7235 /* Ensure that MULT comes first if there is one. */
7236 else if (GET_CODE (op0) == MULT)
7237 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7238 else
7239 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7240
7241 /* Let's also eliminate constants from op0 if possible. */
7242 op0 = eliminate_constant_term (op0, &constant_term);
7243
7244 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7245 their sum should be a constant. Form it into OP1, since the
7246 result we want will then be OP0 + OP1. */
7247
7248 temp = simplify_binary_operation (PLUS, mode, constant_term,
7249 XEXP (op1, 1));
7250 if (temp != 0)
7251 op1 = temp;
7252 else
7253 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7254 }
7255
7256 /* Put a constant term last and put a multiplication first. */
7257 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7258 temp = op1, op1 = op0, op0 = temp;
7259
7260 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7261 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7262
7263 case MINUS_EXPR:
7264 /* For initializers, we are allowed to return a MINUS of two
7265 symbolic constants. Here we handle all cases when both operands
7266 are constant. */
7267 /* Handle difference of two symbolic constants,
7268 for the sake of an initializer. */
7269 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7270 && really_constant_p (TREE_OPERAND (exp, 0))
7271 && really_constant_p (TREE_OPERAND (exp, 1)))
7272 {
7273 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7274 VOIDmode, ro_modifier);
7275 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7276 VOIDmode, ro_modifier);
7277
7278 /* If the last operand is a CONST_INT, use plus_constant of
7279 the negated constant. Else make the MINUS. */
7280 if (GET_CODE (op1) == CONST_INT)
7281 return plus_constant (op0, - INTVAL (op1));
7282 else
7283 return gen_rtx_MINUS (mode, op0, op1);
7284 }
7285 /* Convert A - const to A + (-const). */
7286 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7287 {
7288 tree negated = fold (build1 (NEGATE_EXPR, type,
7289 TREE_OPERAND (exp, 1)));
7290
7291 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7292 /* If we can't negate the constant in TYPE, leave it alone and
7293 expand_binop will negate it for us. We used to try to do it
7294 here in the signed version of TYPE, but that doesn't work
7295 on POINTER_TYPEs. */;
7296 else
7297 {
7298 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7299 goto plus_expr;
7300 }
7301 }
7302 this_optab = sub_optab;
7303 goto binop;
7304
7305 case MULT_EXPR:
7306 preexpand_calls (exp);
7307 /* If first operand is constant, swap them.
7308 Thus the following special case checks need only
7309 check the second operand. */
7310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7311 {
7312 register tree t1 = TREE_OPERAND (exp, 0);
7313 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7314 TREE_OPERAND (exp, 1) = t1;
7315 }
7316
7317 /* Attempt to return something suitable for generating an
7318 indexed address, for machines that support that. */
7319
7320 if (modifier == EXPAND_SUM && mode == ptr_mode
7321 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7323 {
7324 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7325 EXPAND_SUM);
7326
7327 /* Apply distributive law if OP0 is x+c. */
7328 if (GET_CODE (op0) == PLUS
7329 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7330 return
7331 gen_rtx_PLUS
7332 (mode,
7333 gen_rtx_MULT
7334 (mode, XEXP (op0, 0),
7335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7336 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7337 * INTVAL (XEXP (op0, 1))));
7338
7339 if (GET_CODE (op0) != REG)
7340 op0 = force_operand (op0, NULL_RTX);
7341 if (GET_CODE (op0) != REG)
7342 op0 = copy_to_mode_reg (mode, op0);
7343
7344 return
7345 gen_rtx_MULT (mode, op0,
7346 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7347 }
7348
7349 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7350 subtarget = 0;
7351
7352 /* Check for multiplying things that have been extended
7353 from a narrower type. If this machine supports multiplying
7354 in that narrower type with a result in the desired type,
7355 do it that way, and avoid the explicit type-conversion. */
7356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7357 && TREE_CODE (type) == INTEGER_TYPE
7358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7359 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7360 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7361 && int_fits_type_p (TREE_OPERAND (exp, 1),
7362 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7363 /* Don't use a widening multiply if a shift will do. */
7364 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7365 > HOST_BITS_PER_WIDE_INT)
7366 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7367 ||
7368 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7370 ==
7371 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7372 /* If both operands are extended, they must either both
7373 be zero-extended or both be sign-extended. */
7374 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7375 ==
7376 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7377 {
7378 enum machine_mode innermode
7379 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7380 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7381 ? smul_widen_optab : umul_widen_optab);
7382 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7383 ? umul_widen_optab : smul_widen_optab);
7384 if (mode == GET_MODE_WIDER_MODE (innermode))
7385 {
7386 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7387 {
7388 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7389 NULL_RTX, VOIDmode, 0);
7390 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7392 VOIDmode, 0);
7393 else
7394 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7395 NULL_RTX, VOIDmode, 0);
7396 goto binop2;
7397 }
7398 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7399 && innermode == word_mode)
7400 {
7401 rtx htem;
7402 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7403 NULL_RTX, VOIDmode, 0);
7404 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7406 VOIDmode, 0);
7407 else
7408 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7409 NULL_RTX, VOIDmode, 0);
7410 temp = expand_binop (mode, other_optab, op0, op1, target,
7411 unsignedp, OPTAB_LIB_WIDEN);
7412 htem = expand_mult_highpart_adjust (innermode,
7413 gen_highpart (innermode, temp),
7414 op0, op1,
7415 gen_highpart (innermode, temp),
7416 unsignedp);
7417 emit_move_insn (gen_highpart (innermode, temp), htem);
7418 return temp;
7419 }
7420 }
7421 }
7422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7424 return expand_mult (mode, op0, op1, target, unsignedp);
7425
7426 case TRUNC_DIV_EXPR:
7427 case FLOOR_DIV_EXPR:
7428 case CEIL_DIV_EXPR:
7429 case ROUND_DIV_EXPR:
7430 case EXACT_DIV_EXPR:
7431 preexpand_calls (exp);
7432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7433 subtarget = 0;
7434 /* Possible optimization: compute the dividend with EXPAND_SUM
7435 then if the divisor is constant can optimize the case
7436 where some terms of the dividend have coeffs divisible by it. */
7437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7438 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7439 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7440
7441 case RDIV_EXPR:
7442 this_optab = flodiv_optab;
7443 goto binop;
7444
7445 case TRUNC_MOD_EXPR:
7446 case FLOOR_MOD_EXPR:
7447 case CEIL_MOD_EXPR:
7448 case ROUND_MOD_EXPR:
7449 preexpand_calls (exp);
7450 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7451 subtarget = 0;
7452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7453 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7454 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7455
7456 case FIX_ROUND_EXPR:
7457 case FIX_FLOOR_EXPR:
7458 case FIX_CEIL_EXPR:
7459 abort (); /* Not used for C. */
7460
7461 case FIX_TRUNC_EXPR:
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7463 if (target == 0)
7464 target = gen_reg_rtx (mode);
7465 expand_fix (target, op0, unsignedp);
7466 return target;
7467
7468 case FLOAT_EXPR:
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7470 if (target == 0)
7471 target = gen_reg_rtx (mode);
7472 /* expand_float can't figure out what to do if FROM has VOIDmode.
7473 So give it the correct mode. With -O, cse will optimize this. */
7474 if (GET_MODE (op0) == VOIDmode)
7475 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7476 op0);
7477 expand_float (target, op0,
7478 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7479 return target;
7480
7481 case NEGATE_EXPR:
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7483 temp = expand_unop (mode, neg_optab, op0, target, 0);
7484 if (temp == 0)
7485 abort ();
7486 return temp;
7487
7488 case ABS_EXPR:
7489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7490
7491 /* Handle complex values specially. */
7492 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7493 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7494 return expand_complex_abs (mode, op0, target, unsignedp);
7495
7496 /* Unsigned abs is simply the operand. Testing here means we don't
7497 risk generating incorrect code below. */
7498 if (TREE_UNSIGNED (type))
7499 return op0;
7500
7501 return expand_abs (mode, op0, target,
7502 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7503
7504 case MAX_EXPR:
7505 case MIN_EXPR:
7506 target = original_target;
7507 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7508 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7509 || GET_MODE (target) != mode
7510 || (GET_CODE (target) == REG
7511 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7512 target = gen_reg_rtx (mode);
7513 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7515
7516 /* First try to do it with a special MIN or MAX instruction.
7517 If that does not win, use a conditional jump to select the proper
7518 value. */
7519 this_optab = (TREE_UNSIGNED (type)
7520 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7521 : (code == MIN_EXPR ? smin_optab : smax_optab));
7522
7523 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7524 OPTAB_WIDEN);
7525 if (temp != 0)
7526 return temp;
7527
7528 /* At this point, a MEM target is no longer useful; we will get better
7529 code without it. */
7530
7531 if (GET_CODE (target) == MEM)
7532 target = gen_reg_rtx (mode);
7533
7534 if (target != op0)
7535 emit_move_insn (target, op0);
7536
7537 op0 = gen_label_rtx ();
7538
7539 /* If this mode is an integer too wide to compare properly,
7540 compare word by word. Rely on cse to optimize constant cases. */
7541 if (GET_MODE_CLASS (mode) == MODE_INT
7542 && ! can_compare_p (GE, mode, ccp_jump))
7543 {
7544 if (code == MAX_EXPR)
7545 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7546 target, op1, NULL_RTX, op0);
7547 else
7548 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7549 op1, target, NULL_RTX, op0);
7550 }
7551 else
7552 {
7553 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7554 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7555 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7556 op0);
7557 }
7558 emit_move_insn (target, op1);
7559 emit_label (op0);
7560 return target;
7561
7562 case BIT_NOT_EXPR:
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7564 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7565 if (temp == 0)
7566 abort ();
7567 return temp;
7568
7569 case FFS_EXPR:
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7571 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7572 if (temp == 0)
7573 abort ();
7574 return temp;
7575
7576 /* ??? Can optimize bitwise operations with one arg constant.
7577 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7578 and (a bitwise1 b) bitwise2 b (etc)
7579 but that is probably not worth while. */
7580
7581 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7582 boolean values when we want in all cases to compute both of them. In
7583 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7584 as actual zero-or-1 values and then bitwise anding. In cases where
7585 there cannot be any side effects, better code would be made by
7586 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7587 how to recognize those cases. */
7588
7589 case TRUTH_AND_EXPR:
7590 case BIT_AND_EXPR:
7591 this_optab = and_optab;
7592 goto binop;
7593
7594 case TRUTH_OR_EXPR:
7595 case BIT_IOR_EXPR:
7596 this_optab = ior_optab;
7597 goto binop;
7598
7599 case TRUTH_XOR_EXPR:
7600 case BIT_XOR_EXPR:
7601 this_optab = xor_optab;
7602 goto binop;
7603
7604 case LSHIFT_EXPR:
7605 case RSHIFT_EXPR:
7606 case LROTATE_EXPR:
7607 case RROTATE_EXPR:
7608 preexpand_calls (exp);
7609 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7610 subtarget = 0;
7611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7612 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7613 unsignedp);
7614
7615 /* Could determine the answer when only additive constants differ. Also,
7616 the addition of one can be handled by changing the condition. */
7617 case LT_EXPR:
7618 case LE_EXPR:
7619 case GT_EXPR:
7620 case GE_EXPR:
7621 case EQ_EXPR:
7622 case NE_EXPR:
7623 case UNORDERED_EXPR:
7624 case ORDERED_EXPR:
7625 case UNLT_EXPR:
7626 case UNLE_EXPR:
7627 case UNGT_EXPR:
7628 case UNGE_EXPR:
7629 case UNEQ_EXPR:
7630 preexpand_calls (exp);
7631 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7632 if (temp != 0)
7633 return temp;
7634
7635 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7636 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7637 && original_target
7638 && GET_CODE (original_target) == REG
7639 && (GET_MODE (original_target)
7640 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7641 {
7642 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7643 VOIDmode, 0);
7644
7645 if (temp != original_target)
7646 temp = copy_to_reg (temp);
7647
7648 op1 = gen_label_rtx ();
7649 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7650 GET_MODE (temp), unsignedp, 0, op1);
7651 emit_move_insn (temp, const1_rtx);
7652 emit_label (op1);
7653 return temp;
7654 }
7655
7656 /* If no set-flag instruction, must generate a conditional
7657 store into a temporary variable. Drop through
7658 and handle this like && and ||. */
7659
7660 case TRUTH_ANDIF_EXPR:
7661 case TRUTH_ORIF_EXPR:
7662 if (! ignore
7663 && (target == 0 || ! safe_from_p (target, exp, 1)
7664 /* Make sure we don't have a hard reg (such as function's return
7665 value) live across basic blocks, if not optimizing. */
7666 || (!optimize && GET_CODE (target) == REG
7667 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7668 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7669
7670 if (target)
7671 emit_clr_insn (target);
7672
7673 op1 = gen_label_rtx ();
7674 jumpifnot (exp, op1);
7675
7676 if (target)
7677 emit_0_to_1_insn (target);
7678
7679 emit_label (op1);
7680 return ignore ? const0_rtx : target;
7681
7682 case TRUTH_NOT_EXPR:
7683 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7684 /* The parser is careful to generate TRUTH_NOT_EXPR
7685 only with operands that are always zero or one. */
7686 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7687 target, 1, OPTAB_LIB_WIDEN);
7688 if (temp == 0)
7689 abort ();
7690 return temp;
7691
7692 case COMPOUND_EXPR:
7693 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7694 emit_queue ();
7695 return expand_expr (TREE_OPERAND (exp, 1),
7696 (ignore ? const0_rtx : target),
7697 VOIDmode, 0);
7698
7699 case COND_EXPR:
7700 /* If we would have a "singleton" (see below) were it not for a
7701 conversion in each arm, bring that conversion back out. */
7702 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7703 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7704 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7705 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7706 {
7707 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7708 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7709
7710 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7711 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7712 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7713 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7714 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7715 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7716 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7717 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7718 return expand_expr (build1 (NOP_EXPR, type,
7719 build (COND_EXPR, TREE_TYPE (true),
7720 TREE_OPERAND (exp, 0),
7721 true, false)),
7722 target, tmode, modifier);
7723 }
7724
7725 {
7726 /* Note that COND_EXPRs whose type is a structure or union
7727 are required to be constructed to contain assignments of
7728 a temporary variable, so that we can evaluate them here
7729 for side effect only. If type is void, we must do likewise. */
7730
7731 /* If an arm of the branch requires a cleanup,
7732 only that cleanup is performed. */
7733
7734 tree singleton = 0;
7735 tree binary_op = 0, unary_op = 0;
7736
7737 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7738 convert it to our mode, if necessary. */
7739 if (integer_onep (TREE_OPERAND (exp, 1))
7740 && integer_zerop (TREE_OPERAND (exp, 2))
7741 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7742 {
7743 if (ignore)
7744 {
7745 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7746 ro_modifier);
7747 return const0_rtx;
7748 }
7749
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7751 if (GET_MODE (op0) == mode)
7752 return op0;
7753
7754 if (target == 0)
7755 target = gen_reg_rtx (mode);
7756 convert_move (target, op0, unsignedp);
7757 return target;
7758 }
7759
7760 /* Check for X ? A + B : A. If we have this, we can copy A to the
7761 output and conditionally add B. Similarly for unary operations.
7762 Don't do this if X has side-effects because those side effects
7763 might affect A or B and the "?" operation is a sequence point in
7764 ANSI. (operand_equal_p tests for side effects.) */
7765
7766 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7767 && operand_equal_p (TREE_OPERAND (exp, 2),
7768 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7769 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7770 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7771 && operand_equal_p (TREE_OPERAND (exp, 1),
7772 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7773 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7774 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7775 && operand_equal_p (TREE_OPERAND (exp, 2),
7776 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7777 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7778 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7779 && operand_equal_p (TREE_OPERAND (exp, 1),
7780 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7781 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7782
7783 /* If we are not to produce a result, we have no target. Otherwise,
7784 if a target was specified use it; it will not be used as an
7785 intermediate target unless it is safe. If no target, use a
7786 temporary. */
7787
7788 if (ignore)
7789 temp = 0;
7790 else if (original_target
7791 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7792 || (singleton && GET_CODE (original_target) == REG
7793 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7794 && original_target == var_rtx (singleton)))
7795 && GET_MODE (original_target) == mode
7796 #ifdef HAVE_conditional_move
7797 && (! can_conditionally_move_p (mode)
7798 || GET_CODE (original_target) == REG
7799 || TREE_ADDRESSABLE (type))
7800 #endif
7801 && ! (GET_CODE (original_target) == MEM
7802 && MEM_VOLATILE_P (original_target)))
7803 temp = original_target;
7804 else if (TREE_ADDRESSABLE (type))
7805 abort ();
7806 else
7807 temp = assign_temp (type, 0, 0, 1);
7808
7809 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7810 do the test of X as a store-flag operation, do this as
7811 A + ((X != 0) << log C). Similarly for other simple binary
7812 operators. Only do for C == 1 if BRANCH_COST is low. */
7813 if (temp && singleton && binary_op
7814 && (TREE_CODE (binary_op) == PLUS_EXPR
7815 || TREE_CODE (binary_op) == MINUS_EXPR
7816 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7817 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7818 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7819 : integer_onep (TREE_OPERAND (binary_op, 1)))
7820 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7821 {
7822 rtx result;
7823 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7824 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7825 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7826 : xor_optab);
7827
7828 /* If we had X ? A : A + 1, do this as A + (X == 0).
7829
7830 We have to invert the truth value here and then put it
7831 back later if do_store_flag fails. We cannot simply copy
7832 TREE_OPERAND (exp, 0) to another variable and modify that
7833 because invert_truthvalue can modify the tree pointed to
7834 by its argument. */
7835 if (singleton == TREE_OPERAND (exp, 1))
7836 TREE_OPERAND (exp, 0)
7837 = invert_truthvalue (TREE_OPERAND (exp, 0));
7838
7839 result = do_store_flag (TREE_OPERAND (exp, 0),
7840 (safe_from_p (temp, singleton, 1)
7841 ? temp : NULL_RTX),
7842 mode, BRANCH_COST <= 1);
7843
7844 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7845 result = expand_shift (LSHIFT_EXPR, mode, result,
7846 build_int_2 (tree_log2
7847 (TREE_OPERAND
7848 (binary_op, 1)),
7849 0),
7850 (safe_from_p (temp, singleton, 1)
7851 ? temp : NULL_RTX), 0);
7852
7853 if (result)
7854 {
7855 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7856 return expand_binop (mode, boptab, op1, result, temp,
7857 unsignedp, OPTAB_LIB_WIDEN);
7858 }
7859 else if (singleton == TREE_OPERAND (exp, 1))
7860 TREE_OPERAND (exp, 0)
7861 = invert_truthvalue (TREE_OPERAND (exp, 0));
7862 }
7863
7864 do_pending_stack_adjust ();
7865 NO_DEFER_POP;
7866 op0 = gen_label_rtx ();
7867
7868 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7869 {
7870 if (temp != 0)
7871 {
7872 /* If the target conflicts with the other operand of the
7873 binary op, we can't use it. Also, we can't use the target
7874 if it is a hard register, because evaluating the condition
7875 might clobber it. */
7876 if ((binary_op
7877 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7878 || (GET_CODE (temp) == REG
7879 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7880 temp = gen_reg_rtx (mode);
7881 store_expr (singleton, temp, 0);
7882 }
7883 else
7884 expand_expr (singleton,
7885 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7886 if (singleton == TREE_OPERAND (exp, 1))
7887 jumpif (TREE_OPERAND (exp, 0), op0);
7888 else
7889 jumpifnot (TREE_OPERAND (exp, 0), op0);
7890
7891 start_cleanup_deferral ();
7892 if (binary_op && temp == 0)
7893 /* Just touch the other operand. */
7894 expand_expr (TREE_OPERAND (binary_op, 1),
7895 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7896 else if (binary_op)
7897 store_expr (build (TREE_CODE (binary_op), type,
7898 make_tree (type, temp),
7899 TREE_OPERAND (binary_op, 1)),
7900 temp, 0);
7901 else
7902 store_expr (build1 (TREE_CODE (unary_op), type,
7903 make_tree (type, temp)),
7904 temp, 0);
7905 op1 = op0;
7906 }
7907 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7908 comparison operator. If we have one of these cases, set the
7909 output to A, branch on A (cse will merge these two references),
7910 then set the output to FOO. */
7911 else if (temp
7912 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7913 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7914 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7915 TREE_OPERAND (exp, 1), 0)
7916 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7917 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7918 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7919 {
7920 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7921 temp = gen_reg_rtx (mode);
7922 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7923 jumpif (TREE_OPERAND (exp, 0), op0);
7924
7925 start_cleanup_deferral ();
7926 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7927 op1 = op0;
7928 }
7929 else if (temp
7930 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7931 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7932 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7933 TREE_OPERAND (exp, 2), 0)
7934 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7935 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7936 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7937 {
7938 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7939 temp = gen_reg_rtx (mode);
7940 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7941 jumpifnot (TREE_OPERAND (exp, 0), op0);
7942
7943 start_cleanup_deferral ();
7944 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7945 op1 = op0;
7946 }
7947 else
7948 {
7949 op1 = gen_label_rtx ();
7950 jumpifnot (TREE_OPERAND (exp, 0), op0);
7951
7952 start_cleanup_deferral ();
7953
7954 /* One branch of the cond can be void, if it never returns. For
7955 example A ? throw : E */
7956 if (temp != 0
7957 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7958 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7959 else
7960 expand_expr (TREE_OPERAND (exp, 1),
7961 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7962 end_cleanup_deferral ();
7963 emit_queue ();
7964 emit_jump_insn (gen_jump (op1));
7965 emit_barrier ();
7966 emit_label (op0);
7967 start_cleanup_deferral ();
7968 if (temp != 0
7969 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7970 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7971 else
7972 expand_expr (TREE_OPERAND (exp, 2),
7973 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7974 }
7975
7976 end_cleanup_deferral ();
7977
7978 emit_queue ();
7979 emit_label (op1);
7980 OK_DEFER_POP;
7981
7982 return temp;
7983 }
7984
7985 case TARGET_EXPR:
7986 {
7987 /* Something needs to be initialized, but we didn't know
7988 where that thing was when building the tree. For example,
7989 it could be the return value of a function, or a parameter
7990 to a function which lays down in the stack, or a temporary
7991 variable which must be passed by reference.
7992
7993 We guarantee that the expression will either be constructed
7994 or copied into our original target. */
7995
7996 tree slot = TREE_OPERAND (exp, 0);
7997 tree cleanups = NULL_TREE;
7998 tree exp1;
7999
8000 if (TREE_CODE (slot) != VAR_DECL)
8001 abort ();
8002
8003 if (! ignore)
8004 target = original_target;
8005
8006 /* Set this here so that if we get a target that refers to a
8007 register variable that's already been used, put_reg_into_stack
8008 knows that it should fix up those uses. */
8009 TREE_USED (slot) = 1;
8010
8011 if (target == 0)
8012 {
8013 if (DECL_RTL (slot) != 0)
8014 {
8015 target = DECL_RTL (slot);
8016 /* If we have already expanded the slot, so don't do
8017 it again. (mrs) */
8018 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8019 return target;
8020 }
8021 else
8022 {
8023 target = assign_temp (type, 2, 0, 1);
8024 /* All temp slots at this level must not conflict. */
8025 preserve_temp_slots (target);
8026 DECL_RTL (slot) = target;
8027 if (TREE_ADDRESSABLE (slot))
8028 {
8029 TREE_ADDRESSABLE (slot) = 0;
8030 mark_addressable (slot);
8031 }
8032
8033 /* Since SLOT is not known to the called function
8034 to belong to its stack frame, we must build an explicit
8035 cleanup. This case occurs when we must build up a reference
8036 to pass the reference as an argument. In this case,
8037 it is very likely that such a reference need not be
8038 built here. */
8039
8040 if (TREE_OPERAND (exp, 2) == 0)
8041 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8042 cleanups = TREE_OPERAND (exp, 2);
8043 }
8044 }
8045 else
8046 {
8047 /* This case does occur, when expanding a parameter which
8048 needs to be constructed on the stack. The target
8049 is the actual stack address that we want to initialize.
8050 The function we call will perform the cleanup in this case. */
8051
8052 /* If we have already assigned it space, use that space,
8053 not target that we were passed in, as our target
8054 parameter is only a hint. */
8055 if (DECL_RTL (slot) != 0)
8056 {
8057 target = DECL_RTL (slot);
8058 /* If we have already expanded the slot, so don't do
8059 it again. (mrs) */
8060 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8061 return target;
8062 }
8063 else
8064 {
8065 DECL_RTL (slot) = target;
8066 /* If we must have an addressable slot, then make sure that
8067 the RTL that we just stored in slot is OK. */
8068 if (TREE_ADDRESSABLE (slot))
8069 {
8070 TREE_ADDRESSABLE (slot) = 0;
8071 mark_addressable (slot);
8072 }
8073 }
8074 }
8075
8076 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8077 /* Mark it as expanded. */
8078 TREE_OPERAND (exp, 1) = NULL_TREE;
8079
8080 store_expr (exp1, target, 0);
8081
8082 expand_decl_cleanup (NULL_TREE, cleanups);
8083
8084 return target;
8085 }
8086
8087 case INIT_EXPR:
8088 {
8089 tree lhs = TREE_OPERAND (exp, 0);
8090 tree rhs = TREE_OPERAND (exp, 1);
8091 tree noncopied_parts = 0;
8092 tree lhs_type = TREE_TYPE (lhs);
8093
8094 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8095 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8096 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8097 TYPE_NONCOPIED_PARTS (lhs_type));
8098 while (noncopied_parts != 0)
8099 {
8100 expand_assignment (TREE_VALUE (noncopied_parts),
8101 TREE_PURPOSE (noncopied_parts), 0, 0);
8102 noncopied_parts = TREE_CHAIN (noncopied_parts);
8103 }
8104 return temp;
8105 }
8106
8107 case MODIFY_EXPR:
8108 {
8109 /* If lhs is complex, expand calls in rhs before computing it.
8110 That's so we don't compute a pointer and save it over a call.
8111 If lhs is simple, compute it first so we can give it as a
8112 target if the rhs is just a call. This avoids an extra temp and copy
8113 and that prevents a partial-subsumption which makes bad code.
8114 Actually we could treat component_ref's of vars like vars. */
8115
8116 tree lhs = TREE_OPERAND (exp, 0);
8117 tree rhs = TREE_OPERAND (exp, 1);
8118 tree noncopied_parts = 0;
8119 tree lhs_type = TREE_TYPE (lhs);
8120
8121 temp = 0;
8122
8123 if (TREE_CODE (lhs) != VAR_DECL
8124 && TREE_CODE (lhs) != RESULT_DECL
8125 && TREE_CODE (lhs) != PARM_DECL
8126 && ! (TREE_CODE (lhs) == INDIRECT_REF
8127 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8128 preexpand_calls (exp);
8129
8130 /* Check for |= or &= of a bitfield of size one into another bitfield
8131 of size 1. In this case, (unless we need the result of the
8132 assignment) we can do this more efficiently with a
8133 test followed by an assignment, if necessary.
8134
8135 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8136 things change so we do, this code should be enhanced to
8137 support it. */
8138 if (ignore
8139 && TREE_CODE (lhs) == COMPONENT_REF
8140 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8141 || TREE_CODE (rhs) == BIT_AND_EXPR)
8142 && TREE_OPERAND (rhs, 0) == lhs
8143 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8144 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8145 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8146 {
8147 rtx label = gen_label_rtx ();
8148
8149 do_jump (TREE_OPERAND (rhs, 1),
8150 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8151 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8152 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8153 (TREE_CODE (rhs) == BIT_IOR_EXPR
8154 ? integer_one_node
8155 : integer_zero_node)),
8156 0, 0);
8157 do_pending_stack_adjust ();
8158 emit_label (label);
8159 return const0_rtx;
8160 }
8161
8162 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8163 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8164 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8165 TYPE_NONCOPIED_PARTS (lhs_type));
8166
8167 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8168 while (noncopied_parts != 0)
8169 {
8170 expand_assignment (TREE_PURPOSE (noncopied_parts),
8171 TREE_VALUE (noncopied_parts), 0, 0);
8172 noncopied_parts = TREE_CHAIN (noncopied_parts);
8173 }
8174 return temp;
8175 }
8176
8177 case RETURN_EXPR:
8178 if (!TREE_OPERAND (exp, 0))
8179 expand_null_return ();
8180 else
8181 expand_return (TREE_OPERAND (exp, 0));
8182 return const0_rtx;
8183
8184 case PREINCREMENT_EXPR:
8185 case PREDECREMENT_EXPR:
8186 return expand_increment (exp, 0, ignore);
8187
8188 case POSTINCREMENT_EXPR:
8189 case POSTDECREMENT_EXPR:
8190 /* Faster to treat as pre-increment if result is not used. */
8191 return expand_increment (exp, ! ignore, ignore);
8192
8193 case ADDR_EXPR:
8194 /* If nonzero, TEMP will be set to the address of something that might
8195 be a MEM corresponding to a stack slot. */
8196 temp = 0;
8197
8198 /* Are we taking the address of a nested function? */
8199 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8200 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8201 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8202 && ! TREE_STATIC (exp))
8203 {
8204 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8205 op0 = force_operand (op0, target);
8206 }
8207 /* If we are taking the address of something erroneous, just
8208 return a zero. */
8209 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8210 return const0_rtx;
8211 else
8212 {
8213 /* We make sure to pass const0_rtx down if we came in with
8214 ignore set, to avoid doing the cleanups twice for something. */
8215 op0 = expand_expr (TREE_OPERAND (exp, 0),
8216 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8217 (modifier == EXPAND_INITIALIZER
8218 ? modifier : EXPAND_CONST_ADDRESS));
8219
8220 /* If we are going to ignore the result, OP0 will have been set
8221 to const0_rtx, so just return it. Don't get confused and
8222 think we are taking the address of the constant. */
8223 if (ignore)
8224 return op0;
8225
8226 op0 = protect_from_queue (op0, 0);
8227
8228 /* We would like the object in memory. If it is a constant, we can
8229 have it be statically allocated into memory. For a non-constant,
8230 we need to allocate some memory and store the value into it. */
8231
8232 if (CONSTANT_P (op0))
8233 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8234 op0);
8235 else if (GET_CODE (op0) == MEM)
8236 {
8237 mark_temp_addr_taken (op0);
8238 temp = XEXP (op0, 0);
8239 }
8240
8241 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8242 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8243 {
8244 /* If this object is in a register, it must be not
8245 be BLKmode. */
8246 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8247 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8248
8249 mark_temp_addr_taken (memloc);
8250 emit_move_insn (memloc, op0);
8251 op0 = memloc;
8252 }
8253
8254 if (GET_CODE (op0) != MEM)
8255 abort ();
8256
8257 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8258 {
8259 temp = XEXP (op0, 0);
8260 #ifdef POINTERS_EXTEND_UNSIGNED
8261 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8262 && mode == ptr_mode)
8263 temp = convert_memory_address (ptr_mode, temp);
8264 #endif
8265 return temp;
8266 }
8267
8268 op0 = force_operand (XEXP (op0, 0), target);
8269 }
8270
8271 if (flag_force_addr && GET_CODE (op0) != REG)
8272 op0 = force_reg (Pmode, op0);
8273
8274 if (GET_CODE (op0) == REG
8275 && ! REG_USERVAR_P (op0))
8276 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8277
8278 /* If we might have had a temp slot, add an equivalent address
8279 for it. */
8280 if (temp != 0)
8281 update_temp_slot_address (temp, op0);
8282
8283 #ifdef POINTERS_EXTEND_UNSIGNED
8284 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8285 && mode == ptr_mode)
8286 op0 = convert_memory_address (ptr_mode, op0);
8287 #endif
8288
8289 return op0;
8290
8291 case ENTRY_VALUE_EXPR:
8292 abort ();
8293
8294 /* COMPLEX type for Extended Pascal & Fortran */
8295 case COMPLEX_EXPR:
8296 {
8297 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8298 rtx insns;
8299
8300 /* Get the rtx code of the operands. */
8301 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8302 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8303
8304 if (! target)
8305 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8306
8307 start_sequence ();
8308
8309 /* Move the real (op0) and imaginary (op1) parts to their location. */
8310 emit_move_insn (gen_realpart (mode, target), op0);
8311 emit_move_insn (gen_imagpart (mode, target), op1);
8312
8313 insns = get_insns ();
8314 end_sequence ();
8315
8316 /* Complex construction should appear as a single unit. */
8317 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8318 each with a separate pseudo as destination.
8319 It's not correct for flow to treat them as a unit. */
8320 if (GET_CODE (target) != CONCAT)
8321 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8322 else
8323 emit_insns (insns);
8324
8325 return target;
8326 }
8327
8328 case REALPART_EXPR:
8329 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8330 return gen_realpart (mode, op0);
8331
8332 case IMAGPART_EXPR:
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8334 return gen_imagpart (mode, op0);
8335
8336 case CONJ_EXPR:
8337 {
8338 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8339 rtx imag_t;
8340 rtx insns;
8341
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8343
8344 if (! target)
8345 target = gen_reg_rtx (mode);
8346
8347 start_sequence ();
8348
8349 /* Store the realpart and the negated imagpart to target. */
8350 emit_move_insn (gen_realpart (partmode, target),
8351 gen_realpart (partmode, op0));
8352
8353 imag_t = gen_imagpart (partmode, target);
8354 temp = expand_unop (partmode, neg_optab,
8355 gen_imagpart (partmode, op0), imag_t, 0);
8356 if (temp != imag_t)
8357 emit_move_insn (imag_t, temp);
8358
8359 insns = get_insns ();
8360 end_sequence ();
8361
8362 /* Conjugate should appear as a single unit
8363 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8364 each with a separate pseudo as destination.
8365 It's not correct for flow to treat them as a unit. */
8366 if (GET_CODE (target) != CONCAT)
8367 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8368 else
8369 emit_insns (insns);
8370
8371 return target;
8372 }
8373
8374 case TRY_CATCH_EXPR:
8375 {
8376 tree handler = TREE_OPERAND (exp, 1);
8377
8378 expand_eh_region_start ();
8379
8380 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8381
8382 expand_eh_region_end (handler);
8383
8384 return op0;
8385 }
8386
8387 case TRY_FINALLY_EXPR:
8388 {
8389 tree try_block = TREE_OPERAND (exp, 0);
8390 tree finally_block = TREE_OPERAND (exp, 1);
8391 rtx finally_label = gen_label_rtx ();
8392 rtx done_label = gen_label_rtx ();
8393 rtx return_link = gen_reg_rtx (Pmode);
8394 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8395 (tree) finally_label, (tree) return_link);
8396 TREE_SIDE_EFFECTS (cleanup) = 1;
8397
8398 /* Start a new binding layer that will keep track of all cleanup
8399 actions to be performed. */
8400 expand_start_bindings (2);
8401
8402 target_temp_slot_level = temp_slot_level;
8403
8404 expand_decl_cleanup (NULL_TREE, cleanup);
8405 op0 = expand_expr (try_block, target, tmode, modifier);
8406
8407 preserve_temp_slots (op0);
8408 expand_end_bindings (NULL_TREE, 0, 0);
8409 emit_jump (done_label);
8410 emit_label (finally_label);
8411 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8412 emit_indirect_jump (return_link);
8413 emit_label (done_label);
8414 return op0;
8415 }
8416
8417 case GOTO_SUBROUTINE_EXPR:
8418 {
8419 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8420 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8421 rtx return_address = gen_label_rtx ();
8422 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8423 emit_jump (subr);
8424 emit_label (return_address);
8425 return const0_rtx;
8426 }
8427
8428 case POPDCC_EXPR:
8429 {
8430 rtx dcc = get_dynamic_cleanup_chain ();
8431 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8432 return const0_rtx;
8433 }
8434
8435 case POPDHC_EXPR:
8436 {
8437 rtx dhc = get_dynamic_handler_chain ();
8438 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8439 return const0_rtx;
8440 }
8441
8442 case VA_ARG_EXPR:
8443 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8444
8445 default:
8446 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8447 }
8448
8449 /* Here to do an ordinary binary operator, generating an instruction
8450 from the optab already placed in `this_optab'. */
8451 binop:
8452 preexpand_calls (exp);
8453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8454 subtarget = 0;
8455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8457 binop2:
8458 temp = expand_binop (mode, this_optab, op0, op1, target,
8459 unsignedp, OPTAB_LIB_WIDEN);
8460 if (temp == 0)
8461 abort ();
8462 return temp;
8463 }
8464 \f
8465 /* Similar to expand_expr, except that we don't specify a target, target
8466 mode, or modifier and we return the alignment of the inner type. This is
8467 used in cases where it is not necessary to align the result to the
8468 alignment of its type as long as we know the alignment of the result, for
8469 example for comparisons of BLKmode values. */
8470
8471 static rtx
8472 expand_expr_unaligned (exp, palign)
8473 register tree exp;
8474 unsigned int *palign;
8475 {
8476 register rtx op0;
8477 tree type = TREE_TYPE (exp);
8478 register enum machine_mode mode = TYPE_MODE (type);
8479
8480 /* Default the alignment we return to that of the type. */
8481 *palign = TYPE_ALIGN (type);
8482
8483 /* The only cases in which we do anything special is if the resulting mode
8484 is BLKmode. */
8485 if (mode != BLKmode)
8486 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8487
8488 switch (TREE_CODE (exp))
8489 {
8490 case CONVERT_EXPR:
8491 case NOP_EXPR:
8492 case NON_LVALUE_EXPR:
8493 /* Conversions between BLKmode values don't change the underlying
8494 alignment or value. */
8495 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8496 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8497 break;
8498
8499 case ARRAY_REF:
8500 /* Much of the code for this case is copied directly from expand_expr.
8501 We need to duplicate it here because we will do something different
8502 in the fall-through case, so we need to handle the same exceptions
8503 it does. */
8504 {
8505 tree array = TREE_OPERAND (exp, 0);
8506 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8507 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8508 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8509 HOST_WIDE_INT i;
8510
8511 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8512 abort ();
8513
8514 /* Optimize the special-case of a zero lower bound.
8515
8516 We convert the low_bound to sizetype to avoid some problems
8517 with constant folding. (E.g. suppose the lower bound is 1,
8518 and its mode is QI. Without the conversion, (ARRAY
8519 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8520 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8521
8522 if (! integer_zerop (low_bound))
8523 index = size_diffop (index, convert (sizetype, low_bound));
8524
8525 /* If this is a constant index into a constant array,
8526 just get the value from the array. Handle both the cases when
8527 we have an explicit constructor and when our operand is a variable
8528 that was declared const. */
8529
8530 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8531 {
8532 if (TREE_CODE (index) == INTEGER_CST
8533 && TREE_INT_CST_HIGH (index) == 0)
8534 {
8535 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8536
8537 i = TREE_INT_CST_LOW (index);
8538 while (elem && i--)
8539 elem = TREE_CHAIN (elem);
8540 if (elem)
8541 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8542 palign);
8543 }
8544 }
8545
8546 else if (optimize >= 1
8547 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8548 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8549 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8550 {
8551 if (TREE_CODE (index) == INTEGER_CST)
8552 {
8553 tree init = DECL_INITIAL (array);
8554
8555 i = TREE_INT_CST_LOW (index);
8556 if (TREE_CODE (init) == CONSTRUCTOR)
8557 {
8558 tree elem = CONSTRUCTOR_ELTS (init);
8559
8560 while (elem
8561 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8562 elem = TREE_CHAIN (elem);
8563 if (elem)
8564 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8565 palign);
8566 }
8567 }
8568 }
8569 }
8570
8571 /* ... fall through ... */
8572
8573 case COMPONENT_REF:
8574 case BIT_FIELD_REF:
8575 /* If the operand is a CONSTRUCTOR, we can just extract the
8576 appropriate field if it is present. Don't do this if we have
8577 already written the data since we want to refer to that copy
8578 and varasm.c assumes that's what we'll do. */
8579 if (TREE_CODE (exp) != ARRAY_REF
8580 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8581 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8582 {
8583 tree elt;
8584
8585 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8586 elt = TREE_CHAIN (elt))
8587 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8588 /* Note that unlike the case in expand_expr, we know this is
8589 BLKmode and hence not an integer. */
8590 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8591 }
8592
8593 {
8594 enum machine_mode mode1;
8595 int bitsize;
8596 int bitpos;
8597 tree offset;
8598 int volatilep = 0;
8599 unsigned int alignment;
8600 int unsignedp;
8601 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8602 &mode1, &unsignedp, &volatilep,
8603 &alignment);
8604
8605 /* If we got back the original object, something is wrong. Perhaps
8606 we are evaluating an expression too early. In any event, don't
8607 infinitely recurse. */
8608 if (tem == exp)
8609 abort ();
8610
8611 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8612
8613 /* If this is a constant, put it into a register if it is a
8614 legitimate constant and OFFSET is 0 and memory if it isn't. */
8615 if (CONSTANT_P (op0))
8616 {
8617 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8618
8619 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8620 && offset == 0)
8621 op0 = force_reg (inner_mode, op0);
8622 else
8623 op0 = validize_mem (force_const_mem (inner_mode, op0));
8624 }
8625
8626 if (offset != 0)
8627 {
8628 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8629
8630 /* If this object is in a register, put it into memory.
8631 This case can't occur in C, but can in Ada if we have
8632 unchecked conversion of an expression from a scalar type to
8633 an array or record type. */
8634 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8635 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8636 {
8637 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8638
8639 mark_temp_addr_taken (memloc);
8640 emit_move_insn (memloc, op0);
8641 op0 = memloc;
8642 }
8643
8644 if (GET_CODE (op0) != MEM)
8645 abort ();
8646
8647 if (GET_MODE (offset_rtx) != ptr_mode)
8648 {
8649 #ifdef POINTERS_EXTEND_UNSIGNED
8650 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8651 #else
8652 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8653 #endif
8654 }
8655
8656 op0 = change_address (op0, VOIDmode,
8657 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8658 force_reg (ptr_mode,
8659 offset_rtx)));
8660 }
8661
8662 /* Don't forget about volatility even if this is a bitfield. */
8663 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8664 {
8665 op0 = copy_rtx (op0);
8666 MEM_VOLATILE_P (op0) = 1;
8667 }
8668
8669 /* Check the access. */
8670 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8671 {
8672 rtx to;
8673 int size;
8674
8675 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8676 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8677
8678 /* Check the access right of the pointer. */
8679 if (size > BITS_PER_UNIT)
8680 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8681 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8682 TYPE_MODE (sizetype),
8683 GEN_INT (MEMORY_USE_RO),
8684 TYPE_MODE (integer_type_node));
8685 }
8686
8687 /* In cases where an aligned union has an unaligned object
8688 as a field, we might be extracting a BLKmode value from
8689 an integer-mode (e.g., SImode) object. Handle this case
8690 by doing the extract into an object as wide as the field
8691 (which we know to be the width of a basic mode), then
8692 storing into memory, and changing the mode to BLKmode.
8693 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8694 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8695 if (mode1 == VOIDmode
8696 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8697 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8698 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8699 || bitpos % TYPE_ALIGN (type) != 0)))
8700 {
8701 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8702
8703 if (ext_mode == BLKmode)
8704 {
8705 /* In this case, BITPOS must start at a byte boundary. */
8706 if (GET_CODE (op0) != MEM
8707 || bitpos % BITS_PER_UNIT != 0)
8708 abort ();
8709
8710 op0 = change_address (op0, VOIDmode,
8711 plus_constant (XEXP (op0, 0),
8712 bitpos / BITS_PER_UNIT));
8713 }
8714 else
8715 {
8716 rtx new = assign_stack_temp (ext_mode,
8717 bitsize / BITS_PER_UNIT, 0);
8718
8719 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8720 unsignedp, NULL_RTX, ext_mode,
8721 ext_mode, alignment,
8722 int_size_in_bytes (TREE_TYPE (tem)));
8723
8724 /* If the result is a record type and BITSIZE is narrower than
8725 the mode of OP0, an integral mode, and this is a big endian
8726 machine, we must put the field into the high-order bits. */
8727 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8728 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8729 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8730 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8731 size_int (GET_MODE_BITSIZE
8732 (GET_MODE (op0))
8733 - bitsize),
8734 op0, 1);
8735
8736
8737 emit_move_insn (new, op0);
8738 op0 = copy_rtx (new);
8739 PUT_MODE (op0, BLKmode);
8740 }
8741 }
8742 else
8743 /* Get a reference to just this component. */
8744 op0 = change_address (op0, mode1,
8745 plus_constant (XEXP (op0, 0),
8746 (bitpos / BITS_PER_UNIT)));
8747
8748 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8749
8750 /* Adjust the alignment in case the bit position is not
8751 a multiple of the alignment of the inner object. */
8752 while (bitpos % alignment != 0)
8753 alignment >>= 1;
8754
8755 if (GET_CODE (XEXP (op0, 0)) == REG)
8756 mark_reg_pointer (XEXP (op0, 0), alignment);
8757
8758 MEM_IN_STRUCT_P (op0) = 1;
8759 MEM_VOLATILE_P (op0) |= volatilep;
8760
8761 *palign = alignment;
8762 return op0;
8763 }
8764
8765 default:
8766 break;
8767
8768 }
8769
8770 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8771 }
8772 \f
8773 /* Return the tree node if a ARG corresponds to a string constant or zero
8774 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8775 in bytes within the string that ARG is accessing. The type of the
8776 offset will be `sizetype'. */
8777
8778 tree
8779 string_constant (arg, ptr_offset)
8780 tree arg;
8781 tree *ptr_offset;
8782 {
8783 STRIP_NOPS (arg);
8784
8785 if (TREE_CODE (arg) == ADDR_EXPR
8786 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8787 {
8788 *ptr_offset = size_zero_node;
8789 return TREE_OPERAND (arg, 0);
8790 }
8791 else if (TREE_CODE (arg) == PLUS_EXPR)
8792 {
8793 tree arg0 = TREE_OPERAND (arg, 0);
8794 tree arg1 = TREE_OPERAND (arg, 1);
8795
8796 STRIP_NOPS (arg0);
8797 STRIP_NOPS (arg1);
8798
8799 if (TREE_CODE (arg0) == ADDR_EXPR
8800 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8801 {
8802 *ptr_offset = convert (sizetype, arg1);
8803 return TREE_OPERAND (arg0, 0);
8804 }
8805 else if (TREE_CODE (arg1) == ADDR_EXPR
8806 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8807 {
8808 *ptr_offset = convert (sizetype, arg0);
8809 return TREE_OPERAND (arg1, 0);
8810 }
8811 }
8812
8813 return 0;
8814 }
8815 \f
8816 /* Expand code for a post- or pre- increment or decrement
8817 and return the RTX for the result.
8818 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8819
8820 static rtx
8821 expand_increment (exp, post, ignore)
8822 register tree exp;
8823 int post, ignore;
8824 {
8825 register rtx op0, op1;
8826 register rtx temp, value;
8827 register tree incremented = TREE_OPERAND (exp, 0);
8828 optab this_optab = add_optab;
8829 int icode;
8830 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8831 int op0_is_copy = 0;
8832 int single_insn = 0;
8833 /* 1 means we can't store into OP0 directly,
8834 because it is a subreg narrower than a word,
8835 and we don't dare clobber the rest of the word. */
8836 int bad_subreg = 0;
8837
8838 /* Stabilize any component ref that might need to be
8839 evaluated more than once below. */
8840 if (!post
8841 || TREE_CODE (incremented) == BIT_FIELD_REF
8842 || (TREE_CODE (incremented) == COMPONENT_REF
8843 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8844 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8845 incremented = stabilize_reference (incremented);
8846 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8847 ones into save exprs so that they don't accidentally get evaluated
8848 more than once by the code below. */
8849 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8850 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8851 incremented = save_expr (incremented);
8852
8853 /* Compute the operands as RTX.
8854 Note whether OP0 is the actual lvalue or a copy of it:
8855 I believe it is a copy iff it is a register or subreg
8856 and insns were generated in computing it. */
8857
8858 temp = get_last_insn ();
8859 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8860
8861 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8862 in place but instead must do sign- or zero-extension during assignment,
8863 so we copy it into a new register and let the code below use it as
8864 a copy.
8865
8866 Note that we can safely modify this SUBREG since it is know not to be
8867 shared (it was made by the expand_expr call above). */
8868
8869 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8870 {
8871 if (post)
8872 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8873 else
8874 bad_subreg = 1;
8875 }
8876 else if (GET_CODE (op0) == SUBREG
8877 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8878 {
8879 /* We cannot increment this SUBREG in place. If we are
8880 post-incrementing, get a copy of the old value. Otherwise,
8881 just mark that we cannot increment in place. */
8882 if (post)
8883 op0 = copy_to_reg (op0);
8884 else
8885 bad_subreg = 1;
8886 }
8887
8888 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8889 && temp != get_last_insn ());
8890 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8891 EXPAND_MEMORY_USE_BAD);
8892
8893 /* Decide whether incrementing or decrementing. */
8894 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8895 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8896 this_optab = sub_optab;
8897
8898 /* Convert decrement by a constant into a negative increment. */
8899 if (this_optab == sub_optab
8900 && GET_CODE (op1) == CONST_INT)
8901 {
8902 op1 = GEN_INT (- INTVAL (op1));
8903 this_optab = add_optab;
8904 }
8905
8906 /* For a preincrement, see if we can do this with a single instruction. */
8907 if (!post)
8908 {
8909 icode = (int) this_optab->handlers[(int) mode].insn_code;
8910 if (icode != (int) CODE_FOR_nothing
8911 /* Make sure that OP0 is valid for operands 0 and 1
8912 of the insn we want to queue. */
8913 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8914 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8915 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8916 single_insn = 1;
8917 }
8918
8919 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8920 then we cannot just increment OP0. We must therefore contrive to
8921 increment the original value. Then, for postincrement, we can return
8922 OP0 since it is a copy of the old value. For preincrement, expand here
8923 unless we can do it with a single insn.
8924
8925 Likewise if storing directly into OP0 would clobber high bits
8926 we need to preserve (bad_subreg). */
8927 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8928 {
8929 /* This is the easiest way to increment the value wherever it is.
8930 Problems with multiple evaluation of INCREMENTED are prevented
8931 because either (1) it is a component_ref or preincrement,
8932 in which case it was stabilized above, or (2) it is an array_ref
8933 with constant index in an array in a register, which is
8934 safe to reevaluate. */
8935 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8936 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8937 ? MINUS_EXPR : PLUS_EXPR),
8938 TREE_TYPE (exp),
8939 incremented,
8940 TREE_OPERAND (exp, 1));
8941
8942 while (TREE_CODE (incremented) == NOP_EXPR
8943 || TREE_CODE (incremented) == CONVERT_EXPR)
8944 {
8945 newexp = convert (TREE_TYPE (incremented), newexp);
8946 incremented = TREE_OPERAND (incremented, 0);
8947 }
8948
8949 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8950 return post ? op0 : temp;
8951 }
8952
8953 if (post)
8954 {
8955 /* We have a true reference to the value in OP0.
8956 If there is an insn to add or subtract in this mode, queue it.
8957 Queueing the increment insn avoids the register shuffling
8958 that often results if we must increment now and first save
8959 the old value for subsequent use. */
8960
8961 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8962 op0 = stabilize (op0);
8963 #endif
8964
8965 icode = (int) this_optab->handlers[(int) mode].insn_code;
8966 if (icode != (int) CODE_FOR_nothing
8967 /* Make sure that OP0 is valid for operands 0 and 1
8968 of the insn we want to queue. */
8969 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8970 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8971 {
8972 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8973 op1 = force_reg (mode, op1);
8974
8975 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8976 }
8977 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8978 {
8979 rtx addr = (general_operand (XEXP (op0, 0), mode)
8980 ? force_reg (Pmode, XEXP (op0, 0))
8981 : copy_to_reg (XEXP (op0, 0)));
8982 rtx temp, result;
8983
8984 op0 = change_address (op0, VOIDmode, addr);
8985 temp = force_reg (GET_MODE (op0), op0);
8986 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8987 op1 = force_reg (mode, op1);
8988
8989 /* The increment queue is LIFO, thus we have to `queue'
8990 the instructions in reverse order. */
8991 enqueue_insn (op0, gen_move_insn (op0, temp));
8992 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8993 return result;
8994 }
8995 }
8996
8997 /* Preincrement, or we can't increment with one simple insn. */
8998 if (post)
8999 /* Save a copy of the value before inc or dec, to return it later. */
9000 temp = value = copy_to_reg (op0);
9001 else
9002 /* Arrange to return the incremented value. */
9003 /* Copy the rtx because expand_binop will protect from the queue,
9004 and the results of that would be invalid for us to return
9005 if our caller does emit_queue before using our result. */
9006 temp = copy_rtx (value = op0);
9007
9008 /* Increment however we can. */
9009 op1 = expand_binop (mode, this_optab, value, op1,
9010 current_function_check_memory_usage ? NULL_RTX : op0,
9011 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9012 /* Make sure the value is stored into OP0. */
9013 if (op1 != op0)
9014 emit_move_insn (op0, op1);
9015
9016 return temp;
9017 }
9018 \f
9019 /* Expand all function calls contained within EXP, innermost ones first.
9020 But don't look within expressions that have sequence points.
9021 For each CALL_EXPR, record the rtx for its value
9022 in the CALL_EXPR_RTL field. */
9023
9024 static void
9025 preexpand_calls (exp)
9026 tree exp;
9027 {
9028 register int nops, i;
9029 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9030
9031 if (! do_preexpand_calls)
9032 return;
9033
9034 /* Only expressions and references can contain calls. */
9035
9036 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9037 return;
9038
9039 switch (TREE_CODE (exp))
9040 {
9041 case CALL_EXPR:
9042 /* Do nothing if already expanded. */
9043 if (CALL_EXPR_RTL (exp) != 0
9044 /* Do nothing if the call returns a variable-sized object. */
9045 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9046 /* Do nothing to built-in functions. */
9047 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9048 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9049 == FUNCTION_DECL)
9050 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9051 return;
9052
9053 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9054 return;
9055
9056 case COMPOUND_EXPR:
9057 case COND_EXPR:
9058 case TRUTH_ANDIF_EXPR:
9059 case TRUTH_ORIF_EXPR:
9060 /* If we find one of these, then we can be sure
9061 the adjust will be done for it (since it makes jumps).
9062 Do it now, so that if this is inside an argument
9063 of a function, we don't get the stack adjustment
9064 after some other args have already been pushed. */
9065 do_pending_stack_adjust ();
9066 return;
9067
9068 case BLOCK:
9069 case RTL_EXPR:
9070 case WITH_CLEANUP_EXPR:
9071 case CLEANUP_POINT_EXPR:
9072 case TRY_CATCH_EXPR:
9073 return;
9074
9075 case SAVE_EXPR:
9076 if (SAVE_EXPR_RTL (exp) != 0)
9077 return;
9078
9079 default:
9080 break;
9081 }
9082
9083 nops = tree_code_length[(int) TREE_CODE (exp)];
9084 for (i = 0; i < nops; i++)
9085 if (TREE_OPERAND (exp, i) != 0)
9086 {
9087 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9088 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9089 It doesn't happen before the call is made. */
9090 ;
9091 else
9092 {
9093 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9094 if (type == 'e' || type == '<' || type == '1' || type == '2'
9095 || type == 'r')
9096 preexpand_calls (TREE_OPERAND (exp, i));
9097 }
9098 }
9099 }
9100 \f
9101 /* At the start of a function, record that we have no previously-pushed
9102 arguments waiting to be popped. */
9103
9104 void
9105 init_pending_stack_adjust ()
9106 {
9107 pending_stack_adjust = 0;
9108 }
9109
9110 /* When exiting from function, if safe, clear out any pending stack adjust
9111 so the adjustment won't get done.
9112
9113 Note, if the current function calls alloca, then it must have a
9114 frame pointer regardless of the value of flag_omit_frame_pointer. */
9115
9116 void
9117 clear_pending_stack_adjust ()
9118 {
9119 #ifdef EXIT_IGNORE_STACK
9120 if (optimize > 0
9121 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9122 && EXIT_IGNORE_STACK
9123 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9124 && ! flag_inline_functions)
9125 pending_stack_adjust = 0;
9126 #endif
9127 }
9128
9129 /* Pop any previously-pushed arguments that have not been popped yet. */
9130
9131 void
9132 do_pending_stack_adjust ()
9133 {
9134 if (inhibit_defer_pop == 0)
9135 {
9136 if (pending_stack_adjust != 0)
9137 adjust_stack (GEN_INT (pending_stack_adjust));
9138 pending_stack_adjust = 0;
9139 }
9140 }
9141 \f
9142 /* Expand conditional expressions. */
9143
9144 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9145 LABEL is an rtx of code CODE_LABEL, in this function and all the
9146 functions here. */
9147
9148 void
9149 jumpifnot (exp, label)
9150 tree exp;
9151 rtx label;
9152 {
9153 do_jump (exp, label, NULL_RTX);
9154 }
9155
9156 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9157
9158 void
9159 jumpif (exp, label)
9160 tree exp;
9161 rtx label;
9162 {
9163 do_jump (exp, NULL_RTX, label);
9164 }
9165
9166 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9167 the result is zero, or IF_TRUE_LABEL if the result is one.
9168 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9169 meaning fall through in that case.
9170
9171 do_jump always does any pending stack adjust except when it does not
9172 actually perform a jump. An example where there is no jump
9173 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9174
9175 This function is responsible for optimizing cases such as
9176 &&, || and comparison operators in EXP. */
9177
9178 void
9179 do_jump (exp, if_false_label, if_true_label)
9180 tree exp;
9181 rtx if_false_label, if_true_label;
9182 {
9183 register enum tree_code code = TREE_CODE (exp);
9184 /* Some cases need to create a label to jump to
9185 in order to properly fall through.
9186 These cases set DROP_THROUGH_LABEL nonzero. */
9187 rtx drop_through_label = 0;
9188 rtx temp;
9189 int i;
9190 tree type;
9191 enum machine_mode mode;
9192
9193 #ifdef MAX_INTEGER_COMPUTATION_MODE
9194 check_max_integer_computation_mode (exp);
9195 #endif
9196
9197 emit_queue ();
9198
9199 switch (code)
9200 {
9201 case ERROR_MARK:
9202 break;
9203
9204 case INTEGER_CST:
9205 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9206 if (temp)
9207 emit_jump (temp);
9208 break;
9209
9210 #if 0
9211 /* This is not true with #pragma weak */
9212 case ADDR_EXPR:
9213 /* The address of something can never be zero. */
9214 if (if_true_label)
9215 emit_jump (if_true_label);
9216 break;
9217 #endif
9218
9219 case NOP_EXPR:
9220 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9221 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9222 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9223 goto normal;
9224 case CONVERT_EXPR:
9225 /* If we are narrowing the operand, we have to do the compare in the
9226 narrower mode. */
9227 if ((TYPE_PRECISION (TREE_TYPE (exp))
9228 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9229 goto normal;
9230 case NON_LVALUE_EXPR:
9231 case REFERENCE_EXPR:
9232 case ABS_EXPR:
9233 case NEGATE_EXPR:
9234 case LROTATE_EXPR:
9235 case RROTATE_EXPR:
9236 /* These cannot change zero->non-zero or vice versa. */
9237 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9238 break;
9239
9240 case WITH_RECORD_EXPR:
9241 /* Put the object on the placeholder list, recurse through our first
9242 operand, and pop the list. */
9243 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9244 placeholder_list);
9245 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9246 placeholder_list = TREE_CHAIN (placeholder_list);
9247 break;
9248
9249 #if 0
9250 /* This is never less insns than evaluating the PLUS_EXPR followed by
9251 a test and can be longer if the test is eliminated. */
9252 case PLUS_EXPR:
9253 /* Reduce to minus. */
9254 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9255 TREE_OPERAND (exp, 0),
9256 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9257 TREE_OPERAND (exp, 1))));
9258 /* Process as MINUS. */
9259 #endif
9260
9261 case MINUS_EXPR:
9262 /* Non-zero iff operands of minus differ. */
9263 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9264 TREE_OPERAND (exp, 0),
9265 TREE_OPERAND (exp, 1)),
9266 NE, NE, if_false_label, if_true_label);
9267 break;
9268
9269 case BIT_AND_EXPR:
9270 /* If we are AND'ing with a small constant, do this comparison in the
9271 smallest type that fits. If the machine doesn't have comparisons
9272 that small, it will be converted back to the wider comparison.
9273 This helps if we are testing the sign bit of a narrower object.
9274 combine can't do this for us because it can't know whether a
9275 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9276
9277 if (! SLOW_BYTE_ACCESS
9278 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9279 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9280 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9281 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9282 && (type = type_for_mode (mode, 1)) != 0
9283 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9284 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9285 != CODE_FOR_nothing))
9286 {
9287 do_jump (convert (type, exp), if_false_label, if_true_label);
9288 break;
9289 }
9290 goto normal;
9291
9292 case TRUTH_NOT_EXPR:
9293 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9294 break;
9295
9296 case TRUTH_ANDIF_EXPR:
9297 if (if_false_label == 0)
9298 if_false_label = drop_through_label = gen_label_rtx ();
9299 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9300 start_cleanup_deferral ();
9301 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9302 end_cleanup_deferral ();
9303 break;
9304
9305 case TRUTH_ORIF_EXPR:
9306 if (if_true_label == 0)
9307 if_true_label = drop_through_label = gen_label_rtx ();
9308 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9309 start_cleanup_deferral ();
9310 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9311 end_cleanup_deferral ();
9312 break;
9313
9314 case COMPOUND_EXPR:
9315 push_temp_slots ();
9316 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9317 preserve_temp_slots (NULL_RTX);
9318 free_temp_slots ();
9319 pop_temp_slots ();
9320 emit_queue ();
9321 do_pending_stack_adjust ();
9322 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9323 break;
9324
9325 case COMPONENT_REF:
9326 case BIT_FIELD_REF:
9327 case ARRAY_REF:
9328 {
9329 int bitsize, bitpos, unsignedp;
9330 enum machine_mode mode;
9331 tree type;
9332 tree offset;
9333 int volatilep = 0;
9334 unsigned int alignment;
9335
9336 /* Get description of this reference. We don't actually care
9337 about the underlying object here. */
9338 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9339 &mode, &unsignedp, &volatilep,
9340 &alignment);
9341
9342 type = type_for_size (bitsize, unsignedp);
9343 if (! SLOW_BYTE_ACCESS
9344 && type != 0 && bitsize >= 0
9345 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9346 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9347 != CODE_FOR_nothing))
9348 {
9349 do_jump (convert (type, exp), if_false_label, if_true_label);
9350 break;
9351 }
9352 goto normal;
9353 }
9354
9355 case COND_EXPR:
9356 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9357 if (integer_onep (TREE_OPERAND (exp, 1))
9358 && integer_zerop (TREE_OPERAND (exp, 2)))
9359 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9360
9361 else if (integer_zerop (TREE_OPERAND (exp, 1))
9362 && integer_onep (TREE_OPERAND (exp, 2)))
9363 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9364
9365 else
9366 {
9367 register rtx label1 = gen_label_rtx ();
9368 drop_through_label = gen_label_rtx ();
9369
9370 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9371
9372 start_cleanup_deferral ();
9373 /* Now the THEN-expression. */
9374 do_jump (TREE_OPERAND (exp, 1),
9375 if_false_label ? if_false_label : drop_through_label,
9376 if_true_label ? if_true_label : drop_through_label);
9377 /* In case the do_jump just above never jumps. */
9378 do_pending_stack_adjust ();
9379 emit_label (label1);
9380
9381 /* Now the ELSE-expression. */
9382 do_jump (TREE_OPERAND (exp, 2),
9383 if_false_label ? if_false_label : drop_through_label,
9384 if_true_label ? if_true_label : drop_through_label);
9385 end_cleanup_deferral ();
9386 }
9387 break;
9388
9389 case EQ_EXPR:
9390 {
9391 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9392
9393 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9394 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9395 {
9396 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9397 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9398 do_jump
9399 (fold
9400 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9401 fold (build (EQ_EXPR, TREE_TYPE (exp),
9402 fold (build1 (REALPART_EXPR,
9403 TREE_TYPE (inner_type),
9404 exp0)),
9405 fold (build1 (REALPART_EXPR,
9406 TREE_TYPE (inner_type),
9407 exp1)))),
9408 fold (build (EQ_EXPR, TREE_TYPE (exp),
9409 fold (build1 (IMAGPART_EXPR,
9410 TREE_TYPE (inner_type),
9411 exp0)),
9412 fold (build1 (IMAGPART_EXPR,
9413 TREE_TYPE (inner_type),
9414 exp1)))))),
9415 if_false_label, if_true_label);
9416 }
9417
9418 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9419 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9420
9421 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9422 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9423 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9424 else
9425 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9426 break;
9427 }
9428
9429 case NE_EXPR:
9430 {
9431 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9432
9433 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9434 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9435 {
9436 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9437 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9438 do_jump
9439 (fold
9440 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9441 fold (build (NE_EXPR, TREE_TYPE (exp),
9442 fold (build1 (REALPART_EXPR,
9443 TREE_TYPE (inner_type),
9444 exp0)),
9445 fold (build1 (REALPART_EXPR,
9446 TREE_TYPE (inner_type),
9447 exp1)))),
9448 fold (build (NE_EXPR, TREE_TYPE (exp),
9449 fold (build1 (IMAGPART_EXPR,
9450 TREE_TYPE (inner_type),
9451 exp0)),
9452 fold (build1 (IMAGPART_EXPR,
9453 TREE_TYPE (inner_type),
9454 exp1)))))),
9455 if_false_label, if_true_label);
9456 }
9457
9458 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9459 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9460
9461 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9462 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9463 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9464 else
9465 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9466 break;
9467 }
9468
9469 case LT_EXPR:
9470 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9471 if (GET_MODE_CLASS (mode) == MODE_INT
9472 && ! can_compare_p (LT, mode, ccp_jump))
9473 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9474 else
9475 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9476 break;
9477
9478 case LE_EXPR:
9479 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9480 if (GET_MODE_CLASS (mode) == MODE_INT
9481 && ! can_compare_p (LE, mode, ccp_jump))
9482 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9483 else
9484 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9485 break;
9486
9487 case GT_EXPR:
9488 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9489 if (GET_MODE_CLASS (mode) == MODE_INT
9490 && ! can_compare_p (GT, mode, ccp_jump))
9491 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9492 else
9493 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9494 break;
9495
9496 case GE_EXPR:
9497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9498 if (GET_MODE_CLASS (mode) == MODE_INT
9499 && ! can_compare_p (GE, mode, ccp_jump))
9500 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9501 else
9502 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9503 break;
9504
9505 case UNORDERED_EXPR:
9506 case ORDERED_EXPR:
9507 {
9508 enum rtx_code cmp, rcmp;
9509 int do_rev;
9510
9511 if (code == UNORDERED_EXPR)
9512 cmp = UNORDERED, rcmp = ORDERED;
9513 else
9514 cmp = ORDERED, rcmp = UNORDERED;
9515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9516
9517 do_rev = 0;
9518 if (! can_compare_p (cmp, mode, ccp_jump)
9519 && (can_compare_p (rcmp, mode, ccp_jump)
9520 /* If the target doesn't provide either UNORDERED or ORDERED
9521 comparisons, canonicalize on UNORDERED for the library. */
9522 || rcmp == UNORDERED))
9523 do_rev = 1;
9524
9525 if (! do_rev)
9526 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9527 else
9528 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9529 }
9530 break;
9531
9532 {
9533 enum rtx_code rcode1;
9534 enum tree_code tcode2;
9535
9536 case UNLT_EXPR:
9537 rcode1 = UNLT;
9538 tcode2 = LT_EXPR;
9539 goto unordered_bcc;
9540 case UNLE_EXPR:
9541 rcode1 = UNLE;
9542 tcode2 = LE_EXPR;
9543 goto unordered_bcc;
9544 case UNGT_EXPR:
9545 rcode1 = UNGT;
9546 tcode2 = GT_EXPR;
9547 goto unordered_bcc;
9548 case UNGE_EXPR:
9549 rcode1 = UNGE;
9550 tcode2 = GE_EXPR;
9551 goto unordered_bcc;
9552 case UNEQ_EXPR:
9553 rcode1 = UNEQ;
9554 tcode2 = EQ_EXPR;
9555 goto unordered_bcc;
9556
9557 unordered_bcc:
9558 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 if (can_compare_p (rcode1, mode, ccp_jump))
9560 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9561 if_true_label);
9562 else
9563 {
9564 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9565 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9566 tree cmp0, cmp1;
9567
9568 /* If the target doesn't support combined unordered
9569 compares, decompose into UNORDERED + comparison. */
9570 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9571 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9572 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9573 do_jump (exp, if_false_label, if_true_label);
9574 }
9575 }
9576 break;
9577
9578 default:
9579 normal:
9580 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9581 #if 0
9582 /* This is not needed any more and causes poor code since it causes
9583 comparisons and tests from non-SI objects to have different code
9584 sequences. */
9585 /* Copy to register to avoid generating bad insns by cse
9586 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9587 if (!cse_not_expected && GET_CODE (temp) == MEM)
9588 temp = copy_to_reg (temp);
9589 #endif
9590 do_pending_stack_adjust ();
9591 /* Do any postincrements in the expression that was tested. */
9592 emit_queue ();
9593
9594 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9595 {
9596 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9597 if (target)
9598 emit_jump (target);
9599 }
9600 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9601 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9602 /* Note swapping the labels gives us not-equal. */
9603 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9604 else if (GET_MODE (temp) != VOIDmode)
9605 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9606 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9607 GET_MODE (temp), NULL_RTX, 0,
9608 if_false_label, if_true_label);
9609 else
9610 abort ();
9611 }
9612
9613 if (drop_through_label)
9614 {
9615 /* If do_jump produces code that might be jumped around,
9616 do any stack adjusts from that code, before the place
9617 where control merges in. */
9618 do_pending_stack_adjust ();
9619 emit_label (drop_through_label);
9620 }
9621 }
9622 \f
9623 /* Given a comparison expression EXP for values too wide to be compared
9624 with one insn, test the comparison and jump to the appropriate label.
9625 The code of EXP is ignored; we always test GT if SWAP is 0,
9626 and LT if SWAP is 1. */
9627
9628 static void
9629 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9630 tree exp;
9631 int swap;
9632 rtx if_false_label, if_true_label;
9633 {
9634 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9635 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9636 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9637 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9638
9639 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9640 }
9641
9642 /* Compare OP0 with OP1, word at a time, in mode MODE.
9643 UNSIGNEDP says to do unsigned comparison.
9644 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9645
9646 void
9647 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9648 enum machine_mode mode;
9649 int unsignedp;
9650 rtx op0, op1;
9651 rtx if_false_label, if_true_label;
9652 {
9653 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9654 rtx drop_through_label = 0;
9655 int i;
9656
9657 if (! if_true_label || ! if_false_label)
9658 drop_through_label = gen_label_rtx ();
9659 if (! if_true_label)
9660 if_true_label = drop_through_label;
9661 if (! if_false_label)
9662 if_false_label = drop_through_label;
9663
9664 /* Compare a word at a time, high order first. */
9665 for (i = 0; i < nwords; i++)
9666 {
9667 rtx op0_word, op1_word;
9668
9669 if (WORDS_BIG_ENDIAN)
9670 {
9671 op0_word = operand_subword_force (op0, i, mode);
9672 op1_word = operand_subword_force (op1, i, mode);
9673 }
9674 else
9675 {
9676 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9677 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9678 }
9679
9680 /* All but high-order word must be compared as unsigned. */
9681 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9682 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9683 NULL_RTX, if_true_label);
9684
9685 /* Consider lower words only if these are equal. */
9686 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9687 NULL_RTX, 0, NULL_RTX, if_false_label);
9688 }
9689
9690 if (if_false_label)
9691 emit_jump (if_false_label);
9692 if (drop_through_label)
9693 emit_label (drop_through_label);
9694 }
9695
9696 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9697 with one insn, test the comparison and jump to the appropriate label. */
9698
9699 static void
9700 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9701 tree exp;
9702 rtx if_false_label, if_true_label;
9703 {
9704 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9705 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9707 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9708 int i;
9709 rtx drop_through_label = 0;
9710
9711 if (! if_false_label)
9712 drop_through_label = if_false_label = gen_label_rtx ();
9713
9714 for (i = 0; i < nwords; i++)
9715 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9716 operand_subword_force (op1, i, mode),
9717 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9718 word_mode, NULL_RTX, 0, if_false_label,
9719 NULL_RTX);
9720
9721 if (if_true_label)
9722 emit_jump (if_true_label);
9723 if (drop_through_label)
9724 emit_label (drop_through_label);
9725 }
9726 \f
9727 /* Jump according to whether OP0 is 0.
9728 We assume that OP0 has an integer mode that is too wide
9729 for the available compare insns. */
9730
9731 void
9732 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9733 rtx op0;
9734 rtx if_false_label, if_true_label;
9735 {
9736 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9737 rtx part;
9738 int i;
9739 rtx drop_through_label = 0;
9740
9741 /* The fastest way of doing this comparison on almost any machine is to
9742 "or" all the words and compare the result. If all have to be loaded
9743 from memory and this is a very wide item, it's possible this may
9744 be slower, but that's highly unlikely. */
9745
9746 part = gen_reg_rtx (word_mode);
9747 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9748 for (i = 1; i < nwords && part != 0; i++)
9749 part = expand_binop (word_mode, ior_optab, part,
9750 operand_subword_force (op0, i, GET_MODE (op0)),
9751 part, 1, OPTAB_WIDEN);
9752
9753 if (part != 0)
9754 {
9755 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9756 NULL_RTX, 0, if_false_label, if_true_label);
9757
9758 return;
9759 }
9760
9761 /* If we couldn't do the "or" simply, do this with a series of compares. */
9762 if (! if_false_label)
9763 drop_through_label = if_false_label = gen_label_rtx ();
9764
9765 for (i = 0; i < nwords; i++)
9766 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9767 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9768 if_false_label, NULL_RTX);
9769
9770 if (if_true_label)
9771 emit_jump (if_true_label);
9772
9773 if (drop_through_label)
9774 emit_label (drop_through_label);
9775 }
9776 \f
9777 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9778 (including code to compute the values to be compared)
9779 and set (CC0) according to the result.
9780 The decision as to signed or unsigned comparison must be made by the caller.
9781
9782 We force a stack adjustment unless there are currently
9783 things pushed on the stack that aren't yet used.
9784
9785 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9786 compared.
9787
9788 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9789 size of MODE should be used. */
9790
9791 rtx
9792 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9793 register rtx op0, op1;
9794 enum rtx_code code;
9795 int unsignedp;
9796 enum machine_mode mode;
9797 rtx size;
9798 unsigned int align;
9799 {
9800 rtx tem;
9801
9802 /* If one operand is constant, make it the second one. Only do this
9803 if the other operand is not constant as well. */
9804
9805 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9806 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9807 {
9808 tem = op0;
9809 op0 = op1;
9810 op1 = tem;
9811 code = swap_condition (code);
9812 }
9813
9814 if (flag_force_mem)
9815 {
9816 op0 = force_not_mem (op0);
9817 op1 = force_not_mem (op1);
9818 }
9819
9820 do_pending_stack_adjust ();
9821
9822 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9823 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9824 return tem;
9825
9826 #if 0
9827 /* There's no need to do this now that combine.c can eliminate lots of
9828 sign extensions. This can be less efficient in certain cases on other
9829 machines. */
9830
9831 /* If this is a signed equality comparison, we can do it as an
9832 unsigned comparison since zero-extension is cheaper than sign
9833 extension and comparisons with zero are done as unsigned. This is
9834 the case even on machines that can do fast sign extension, since
9835 zero-extension is easier to combine with other operations than
9836 sign-extension is. If we are comparing against a constant, we must
9837 convert it to what it would look like unsigned. */
9838 if ((code == EQ || code == NE) && ! unsignedp
9839 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9840 {
9841 if (GET_CODE (op1) == CONST_INT
9842 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9843 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9844 unsignedp = 1;
9845 }
9846 #endif
9847
9848 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9849
9850 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9851 }
9852
9853 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9854 The decision as to signed or unsigned comparison must be made by the caller.
9855
9856 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9857 compared.
9858
9859 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9860 size of MODE should be used. */
9861
9862 void
9863 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9864 if_false_label, if_true_label)
9865 register rtx op0, op1;
9866 enum rtx_code code;
9867 int unsignedp;
9868 enum machine_mode mode;
9869 rtx size;
9870 unsigned int align;
9871 rtx if_false_label, if_true_label;
9872 {
9873 rtx tem;
9874 int dummy_true_label = 0;
9875
9876 /* Reverse the comparison if that is safe and we want to jump if it is
9877 false. */
9878 if (! if_true_label && ! FLOAT_MODE_P (mode))
9879 {
9880 if_true_label = if_false_label;
9881 if_false_label = 0;
9882 code = reverse_condition (code);
9883 }
9884
9885 /* If one operand is constant, make it the second one. Only do this
9886 if the other operand is not constant as well. */
9887
9888 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9889 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9890 {
9891 tem = op0;
9892 op0 = op1;
9893 op1 = tem;
9894 code = swap_condition (code);
9895 }
9896
9897 if (flag_force_mem)
9898 {
9899 op0 = force_not_mem (op0);
9900 op1 = force_not_mem (op1);
9901 }
9902
9903 do_pending_stack_adjust ();
9904
9905 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9906 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9907 {
9908 if (tem == const_true_rtx)
9909 {
9910 if (if_true_label)
9911 emit_jump (if_true_label);
9912 }
9913 else
9914 {
9915 if (if_false_label)
9916 emit_jump (if_false_label);
9917 }
9918 return;
9919 }
9920
9921 #if 0
9922 /* There's no need to do this now that combine.c can eliminate lots of
9923 sign extensions. This can be less efficient in certain cases on other
9924 machines. */
9925
9926 /* If this is a signed equality comparison, we can do it as an
9927 unsigned comparison since zero-extension is cheaper than sign
9928 extension and comparisons with zero are done as unsigned. This is
9929 the case even on machines that can do fast sign extension, since
9930 zero-extension is easier to combine with other operations than
9931 sign-extension is. If we are comparing against a constant, we must
9932 convert it to what it would look like unsigned. */
9933 if ((code == EQ || code == NE) && ! unsignedp
9934 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9935 {
9936 if (GET_CODE (op1) == CONST_INT
9937 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9938 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9939 unsignedp = 1;
9940 }
9941 #endif
9942
9943 if (! if_true_label)
9944 {
9945 dummy_true_label = 1;
9946 if_true_label = gen_label_rtx ();
9947 }
9948
9949 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9950 if_true_label);
9951
9952 if (if_false_label)
9953 emit_jump (if_false_label);
9954 if (dummy_true_label)
9955 emit_label (if_true_label);
9956 }
9957
9958 /* Generate code for a comparison expression EXP (including code to compute
9959 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9960 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9961 generated code will drop through.
9962 SIGNED_CODE should be the rtx operation for this comparison for
9963 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9964
9965 We force a stack adjustment unless there are currently
9966 things pushed on the stack that aren't yet used. */
9967
9968 static void
9969 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9970 if_true_label)
9971 register tree exp;
9972 enum rtx_code signed_code, unsigned_code;
9973 rtx if_false_label, if_true_label;
9974 {
9975 unsigned int align0, align1;
9976 register rtx op0, op1;
9977 register tree type;
9978 register enum machine_mode mode;
9979 int unsignedp;
9980 enum rtx_code code;
9981
9982 /* Don't crash if the comparison was erroneous. */
9983 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9984 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9985 return;
9986
9987 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9988 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9989 mode = TYPE_MODE (type);
9990 unsignedp = TREE_UNSIGNED (type);
9991 code = unsignedp ? unsigned_code : signed_code;
9992
9993 #ifdef HAVE_canonicalize_funcptr_for_compare
9994 /* If function pointers need to be "canonicalized" before they can
9995 be reliably compared, then canonicalize them. */
9996 if (HAVE_canonicalize_funcptr_for_compare
9997 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9998 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9999 == FUNCTION_TYPE))
10000 {
10001 rtx new_op0 = gen_reg_rtx (mode);
10002
10003 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10004 op0 = new_op0;
10005 }
10006
10007 if (HAVE_canonicalize_funcptr_for_compare
10008 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10009 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10010 == FUNCTION_TYPE))
10011 {
10012 rtx new_op1 = gen_reg_rtx (mode);
10013
10014 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10015 op1 = new_op1;
10016 }
10017 #endif
10018
10019 /* Do any postincrements in the expression that was tested. */
10020 emit_queue ();
10021
10022 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10023 ((mode == BLKmode)
10024 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10025 MIN (align0, align1) / BITS_PER_UNIT,
10026 if_false_label, if_true_label);
10027 }
10028 \f
10029 /* Generate code to calculate EXP using a store-flag instruction
10030 and return an rtx for the result. EXP is either a comparison
10031 or a TRUTH_NOT_EXPR whose operand is a comparison.
10032
10033 If TARGET is nonzero, store the result there if convenient.
10034
10035 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10036 cheap.
10037
10038 Return zero if there is no suitable set-flag instruction
10039 available on this machine.
10040
10041 Once expand_expr has been called on the arguments of the comparison,
10042 we are committed to doing the store flag, since it is not safe to
10043 re-evaluate the expression. We emit the store-flag insn by calling
10044 emit_store_flag, but only expand the arguments if we have a reason
10045 to believe that emit_store_flag will be successful. If we think that
10046 it will, but it isn't, we have to simulate the store-flag with a
10047 set/jump/set sequence. */
10048
10049 static rtx
10050 do_store_flag (exp, target, mode, only_cheap)
10051 tree exp;
10052 rtx target;
10053 enum machine_mode mode;
10054 int only_cheap;
10055 {
10056 enum rtx_code code;
10057 tree arg0, arg1, type;
10058 tree tem;
10059 enum machine_mode operand_mode;
10060 int invert = 0;
10061 int unsignedp;
10062 rtx op0, op1;
10063 enum insn_code icode;
10064 rtx subtarget = target;
10065 rtx result, label;
10066
10067 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10068 result at the end. We can't simply invert the test since it would
10069 have already been inverted if it were valid. This case occurs for
10070 some floating-point comparisons. */
10071
10072 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10073 invert = 1, exp = TREE_OPERAND (exp, 0);
10074
10075 arg0 = TREE_OPERAND (exp, 0);
10076 arg1 = TREE_OPERAND (exp, 1);
10077 type = TREE_TYPE (arg0);
10078 operand_mode = TYPE_MODE (type);
10079 unsignedp = TREE_UNSIGNED (type);
10080
10081 /* We won't bother with BLKmode store-flag operations because it would mean
10082 passing a lot of information to emit_store_flag. */
10083 if (operand_mode == BLKmode)
10084 return 0;
10085
10086 /* We won't bother with store-flag operations involving function pointers
10087 when function pointers must be canonicalized before comparisons. */
10088 #ifdef HAVE_canonicalize_funcptr_for_compare
10089 if (HAVE_canonicalize_funcptr_for_compare
10090 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10091 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10092 == FUNCTION_TYPE))
10093 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10094 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10095 == FUNCTION_TYPE))))
10096 return 0;
10097 #endif
10098
10099 STRIP_NOPS (arg0);
10100 STRIP_NOPS (arg1);
10101
10102 /* Get the rtx comparison code to use. We know that EXP is a comparison
10103 operation of some type. Some comparisons against 1 and -1 can be
10104 converted to comparisons with zero. Do so here so that the tests
10105 below will be aware that we have a comparison with zero. These
10106 tests will not catch constants in the first operand, but constants
10107 are rarely passed as the first operand. */
10108
10109 switch (TREE_CODE (exp))
10110 {
10111 case EQ_EXPR:
10112 code = EQ;
10113 break;
10114 case NE_EXPR:
10115 code = NE;
10116 break;
10117 case LT_EXPR:
10118 if (integer_onep (arg1))
10119 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10120 else
10121 code = unsignedp ? LTU : LT;
10122 break;
10123 case LE_EXPR:
10124 if (! unsignedp && integer_all_onesp (arg1))
10125 arg1 = integer_zero_node, code = LT;
10126 else
10127 code = unsignedp ? LEU : LE;
10128 break;
10129 case GT_EXPR:
10130 if (! unsignedp && integer_all_onesp (arg1))
10131 arg1 = integer_zero_node, code = GE;
10132 else
10133 code = unsignedp ? GTU : GT;
10134 break;
10135 case GE_EXPR:
10136 if (integer_onep (arg1))
10137 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10138 else
10139 code = unsignedp ? GEU : GE;
10140 break;
10141
10142 case UNORDERED_EXPR:
10143 code = UNORDERED;
10144 break;
10145 case ORDERED_EXPR:
10146 code = ORDERED;
10147 break;
10148 case UNLT_EXPR:
10149 code = UNLT;
10150 break;
10151 case UNLE_EXPR:
10152 code = UNLE;
10153 break;
10154 case UNGT_EXPR:
10155 code = UNGT;
10156 break;
10157 case UNGE_EXPR:
10158 code = UNGE;
10159 break;
10160 case UNEQ_EXPR:
10161 code = UNEQ;
10162 break;
10163
10164 default:
10165 abort ();
10166 }
10167
10168 /* Put a constant second. */
10169 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10170 {
10171 tem = arg0; arg0 = arg1; arg1 = tem;
10172 code = swap_condition (code);
10173 }
10174
10175 /* If this is an equality or inequality test of a single bit, we can
10176 do this by shifting the bit being tested to the low-order bit and
10177 masking the result with the constant 1. If the condition was EQ,
10178 we xor it with 1. This does not require an scc insn and is faster
10179 than an scc insn even if we have it. */
10180
10181 if ((code == NE || code == EQ)
10182 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10183 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10184 {
10185 tree inner = TREE_OPERAND (arg0, 0);
10186 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10187 int ops_unsignedp;
10188
10189 /* If INNER is a right shift of a constant and it plus BITNUM does
10190 not overflow, adjust BITNUM and INNER. */
10191
10192 if (TREE_CODE (inner) == RSHIFT_EXPR
10193 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10194 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10195 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10196 < TYPE_PRECISION (type)))
10197 {
10198 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10199 inner = TREE_OPERAND (inner, 0);
10200 }
10201
10202 /* If we are going to be able to omit the AND below, we must do our
10203 operations as unsigned. If we must use the AND, we have a choice.
10204 Normally unsigned is faster, but for some machines signed is. */
10205 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10206 #ifdef LOAD_EXTEND_OP
10207 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10208 #else
10209 : 1
10210 #endif
10211 );
10212
10213 if (subtarget == 0 || GET_CODE (subtarget) != REG
10214 || GET_MODE (subtarget) != operand_mode
10215 || ! safe_from_p (subtarget, inner, 1))
10216 subtarget = 0;
10217
10218 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10219
10220 if (bitnum != 0)
10221 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10222 size_int (bitnum), subtarget, ops_unsignedp);
10223
10224 if (GET_MODE (op0) != mode)
10225 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10226
10227 if ((code == EQ && ! invert) || (code == NE && invert))
10228 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10229 ops_unsignedp, OPTAB_LIB_WIDEN);
10230
10231 /* Put the AND last so it can combine with more things. */
10232 if (bitnum != TYPE_PRECISION (type) - 1)
10233 op0 = expand_and (op0, const1_rtx, subtarget);
10234
10235 return op0;
10236 }
10237
10238 /* Now see if we are likely to be able to do this. Return if not. */
10239 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10240 return 0;
10241
10242 icode = setcc_gen_code[(int) code];
10243 if (icode == CODE_FOR_nothing
10244 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10245 {
10246 /* We can only do this if it is one of the special cases that
10247 can be handled without an scc insn. */
10248 if ((code == LT && integer_zerop (arg1))
10249 || (! only_cheap && code == GE && integer_zerop (arg1)))
10250 ;
10251 else if (BRANCH_COST >= 0
10252 && ! only_cheap && (code == NE || code == EQ)
10253 && TREE_CODE (type) != REAL_TYPE
10254 && ((abs_optab->handlers[(int) operand_mode].insn_code
10255 != CODE_FOR_nothing)
10256 || (ffs_optab->handlers[(int) operand_mode].insn_code
10257 != CODE_FOR_nothing)))
10258 ;
10259 else
10260 return 0;
10261 }
10262
10263 preexpand_calls (exp);
10264 if (subtarget == 0 || GET_CODE (subtarget) != REG
10265 || GET_MODE (subtarget) != operand_mode
10266 || ! safe_from_p (subtarget, arg1, 1))
10267 subtarget = 0;
10268
10269 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10270 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10271
10272 if (target == 0)
10273 target = gen_reg_rtx (mode);
10274
10275 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10276 because, if the emit_store_flag does anything it will succeed and
10277 OP0 and OP1 will not be used subsequently. */
10278
10279 result = emit_store_flag (target, code,
10280 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10281 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10282 operand_mode, unsignedp, 1);
10283
10284 if (result)
10285 {
10286 if (invert)
10287 result = expand_binop (mode, xor_optab, result, const1_rtx,
10288 result, 0, OPTAB_LIB_WIDEN);
10289 return result;
10290 }
10291
10292 /* If this failed, we have to do this with set/compare/jump/set code. */
10293 if (GET_CODE (target) != REG
10294 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10295 target = gen_reg_rtx (GET_MODE (target));
10296
10297 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10298 result = compare_from_rtx (op0, op1, code, unsignedp,
10299 operand_mode, NULL_RTX, 0);
10300 if (GET_CODE (result) == CONST_INT)
10301 return (((result == const0_rtx && ! invert)
10302 || (result != const0_rtx && invert))
10303 ? const0_rtx : const1_rtx);
10304
10305 label = gen_label_rtx ();
10306 if (bcc_gen_fctn[(int) code] == 0)
10307 abort ();
10308
10309 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10310 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10311 emit_label (label);
10312
10313 return target;
10314 }
10315 \f
10316 /* Generate a tablejump instruction (used for switch statements). */
10317
10318 #ifdef HAVE_tablejump
10319
10320 /* INDEX is the value being switched on, with the lowest value
10321 in the table already subtracted.
10322 MODE is its expected mode (needed if INDEX is constant).
10323 RANGE is the length of the jump table.
10324 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10325
10326 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10327 index value is out of range. */
10328
10329 void
10330 do_tablejump (index, mode, range, table_label, default_label)
10331 rtx index, range, table_label, default_label;
10332 enum machine_mode mode;
10333 {
10334 register rtx temp, vector;
10335
10336 /* Do an unsigned comparison (in the proper mode) between the index
10337 expression and the value which represents the length of the range.
10338 Since we just finished subtracting the lower bound of the range
10339 from the index expression, this comparison allows us to simultaneously
10340 check that the original index expression value is both greater than
10341 or equal to the minimum value of the range and less than or equal to
10342 the maximum value of the range. */
10343
10344 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10345 0, default_label);
10346
10347 /* If index is in range, it must fit in Pmode.
10348 Convert to Pmode so we can index with it. */
10349 if (mode != Pmode)
10350 index = convert_to_mode (Pmode, index, 1);
10351
10352 /* Don't let a MEM slip thru, because then INDEX that comes
10353 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10354 and break_out_memory_refs will go to work on it and mess it up. */
10355 #ifdef PIC_CASE_VECTOR_ADDRESS
10356 if (flag_pic && GET_CODE (index) != REG)
10357 index = copy_to_mode_reg (Pmode, index);
10358 #endif
10359
10360 /* If flag_force_addr were to affect this address
10361 it could interfere with the tricky assumptions made
10362 about addresses that contain label-refs,
10363 which may be valid only very near the tablejump itself. */
10364 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10365 GET_MODE_SIZE, because this indicates how large insns are. The other
10366 uses should all be Pmode, because they are addresses. This code
10367 could fail if addresses and insns are not the same size. */
10368 index = gen_rtx_PLUS (Pmode,
10369 gen_rtx_MULT (Pmode, index,
10370 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10371 gen_rtx_LABEL_REF (Pmode, table_label));
10372 #ifdef PIC_CASE_VECTOR_ADDRESS
10373 if (flag_pic)
10374 index = PIC_CASE_VECTOR_ADDRESS (index);
10375 else
10376 #endif
10377 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10378 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10379 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10380 RTX_UNCHANGING_P (vector) = 1;
10381 convert_move (temp, vector, 0);
10382
10383 emit_jump_insn (gen_tablejump (temp, table_label));
10384
10385 /* If we are generating PIC code or if the table is PC-relative, the
10386 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10387 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10388 emit_barrier ();
10389 }
10390
10391 #endif /* HAVE_tablejump */