* Rework fields used to describe positions of bitfields and
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
141 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PARAMS ((tree));
145 static int mostly_zeros_p PARAMS ((tree));
146 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, tree, unsigned int, int));
149 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
150 unsigned HOST_WIDE_INT));
151 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
152 HOST_WIDE_INT, enum machine_mode,
153 tree, enum machine_mode, int,
154 unsigned int, HOST_WIDE_INT, int));
155 static enum memory_use_mode
156 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
157 static tree save_noncopied_parts PARAMS ((tree, tree));
158 static tree init_noncopied_parts PARAMS ((tree, tree));
159 static int safe_from_p PARAMS ((rtx, tree, int));
160 static int fixed_type_p PARAMS ((tree));
161 static rtx var_rtx PARAMS ((tree));
162 static int readonly_fields_p PARAMS ((tree));
163 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
164 static rtx expand_increment PARAMS ((tree, int, int));
165 static void preexpand_calls PARAMS ((tree));
166 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
167 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
168 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
169 rtx, rtx));
170 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
171
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
175
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
178
179 /* If a memory-to-memory move would take MOVE_RATIO or more simple
180 move-instruction sequences, we will do a movstr or libcall instead. */
181
182 #ifndef MOVE_RATIO
183 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
184 #define MOVE_RATIO 2
185 #else
186 /* If we are optimizing for space (-Os), cut down the default move ratio */
187 #define MOVE_RATIO (optimize_size ? 3 : 15)
188 #endif
189 #endif
190
191 /* This macro is used to determine whether move_by_pieces should be called
192 to perform a structure copy. */
193 #ifndef MOVE_BY_PIECES_P
194 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
195 (SIZE, ALIGN) < MOVE_RATIO)
196 #endif
197
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movstr_optab[NUM_MACHINE_MODES];
200
201 /* This array records the insn_code of insns to perform block clears. */
202 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
203
204 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
205
206 #ifndef SLOW_UNALIGNED_ACCESS
207 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
208 #endif
209 \f
210 /* This is run once per compilation to set up which modes can be used
211 directly in memory and to initialize the block move optab. */
212
213 void
214 init_expr_once ()
215 {
216 rtx insn, pat;
217 enum machine_mode mode;
218 int num_clobbers;
219 rtx mem, mem1;
220 char *free_point;
221
222 start_sequence ();
223
224 /* Since we are on the permanent obstack, we must be sure we save this
225 spot AFTER we call start_sequence, since it will reuse the rtl it
226 makes. */
227 free_point = (char *) oballoc (0);
228
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234
235 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
236 pat = PATTERN (insn);
237
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
240 {
241 int regno;
242 rtx reg;
243
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
247
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
250
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
254 regno++)
255 {
256 if (! HARD_REGNO_MODE_OK (regno, mode))
257 continue;
258
259 reg = gen_rtx_REG (mode, regno);
260
261 SET_SRC (pat) = mem;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275
276 SET_SRC (pat) = reg;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
280 }
281 }
282
283 end_sequence ();
284 obfree (free_point);
285 }
286
287 /* This is run at the start of compiling a function. */
288
289 void
290 init_expr ()
291 {
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293
294 pending_chain = 0;
295 pending_stack_adjust = 0;
296 arg_space_so_far = 0;
297 inhibit_defer_pop = 0;
298 saveregs_value = 0;
299 apply_args_value = 0;
300 forced_labels = 0;
301 }
302
303 void
304 mark_expr_status (p)
305 struct expr_status *p;
306 {
307 if (p == NULL)
308 return;
309
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
313 }
314
315 void
316 free_expr_status (f)
317 struct function *f;
318 {
319 free (f->expr);
320 f->expr = NULL;
321 }
322
323 /* Small sanity check that the queue is empty at the end of a function. */
324 void
325 finish_expr_for_function ()
326 {
327 if (pending_chain)
328 abort ();
329 }
330 \f
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
333
334 /* Queue up to increment (or change) VAR later. BODY says how:
335 BODY should be the same thing you would pass to emit_insn
336 to increment right away. It will go to emit_insn later on.
337
338 The value is a QUEUED expression to be used in place of VAR
339 where you want to guarantee the pre-incrementation value of VAR. */
340
341 static rtx
342 enqueue_insn (var, body)
343 rtx var, body;
344 {
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
348 }
349
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
356
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
360
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
364
365 rtx
366 protect_from_queue (x, modify)
367 register rtx x;
368 int modify;
369 {
370 register RTX_CODE code = GET_CODE (x);
371
372 #if 0 /* A QUEUED can hang around after the queue is forced out. */
373 /* Shortcut for most common case. */
374 if (pending_chain == 0)
375 return x;
376 #endif
377
378 if (code != QUEUED)
379 {
380 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
381 use of autoincrement. Make a copy of the contents of the memory
382 location rather than a copy of the address, but not if the value is
383 of mode BLKmode. Don't modify X in place since it might be
384 shared. */
385 if (code == MEM && GET_MODE (x) != BLKmode
386 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 {
388 register rtx y = XEXP (x, 0);
389 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390
391 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
392 MEM_COPY_ATTRIBUTES (new, x);
393 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
394
395 if (QUEUED_INSN (y))
396 {
397 register rtx temp = gen_reg_rtx (GET_MODE (new));
398 emit_insn_before (gen_move_insn (temp, new),
399 QUEUED_INSN (y));
400 return temp;
401 }
402 return new;
403 }
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
406 if (code == MEM)
407 {
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
410 {
411 x = copy_rtx (x);
412 XEXP (x, 0) = tem;
413 }
414 }
415 else if (code == PLUS || code == MULT)
416 {
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 {
421 x = copy_rtx (x);
422 XEXP (x, 0) = new0;
423 XEXP (x, 1) = new1;
424 }
425 }
426 return x;
427 }
428 /* If the increment has not happened, use the variable itself. */
429 if (QUEUED_INSN (x) == 0)
430 return QUEUED_VAR (x);
431 /* If the increment has happened and a pre-increment copy exists,
432 use that copy. */
433 if (QUEUED_COPY (x) != 0)
434 return QUEUED_COPY (x);
435 /* The increment has happened but we haven't set up a pre-increment copy.
436 Set one up now, and use it. */
437 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
438 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
439 QUEUED_INSN (x));
440 return QUEUED_COPY (x);
441 }
442
443 /* Return nonzero if X contains a QUEUED expression:
444 if it contains anything that will be altered by a queued increment.
445 We handle only combinations of MEM, PLUS, MINUS and MULT operators
446 since memory addresses generally contain only those. */
447
448 int
449 queued_subexp_p (x)
450 rtx x;
451 {
452 register enum rtx_code code = GET_CODE (x);
453 switch (code)
454 {
455 case QUEUED:
456 return 1;
457 case MEM:
458 return queued_subexp_p (XEXP (x, 0));
459 case MULT:
460 case PLUS:
461 case MINUS:
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
464 default:
465 return 0;
466 }
467 }
468
469 /* Perform all the pending incrementations. */
470
471 void
472 emit_queue ()
473 {
474 register rtx p;
475 while ((p = pending_chain))
476 {
477 rtx body = QUEUED_BODY (p);
478
479 if (GET_CODE (body) == SEQUENCE)
480 {
481 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
482 emit_insn (QUEUED_BODY (p));
483 }
484 else
485 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
486 pending_chain = QUEUED_NEXT (p);
487 }
488 }
489 \f
490 /* Copy data from FROM to TO, where the machine modes are not the same.
491 Both modes may be integer, or both may be floating.
492 UNSIGNEDP should be nonzero if FROM is an unsigned type.
493 This causes zero-extension instead of sign-extension. */
494
495 void
496 convert_move (to, from, unsignedp)
497 register rtx to, from;
498 int unsignedp;
499 {
500 enum machine_mode to_mode = GET_MODE (to);
501 enum machine_mode from_mode = GET_MODE (from);
502 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
503 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
504 enum insn_code code;
505 rtx libcall;
506
507 /* rtx code for making an equivalent value. */
508 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
509
510 to = protect_from_queue (to, 1);
511 from = protect_from_queue (from, 0);
512
513 if (to_real != from_real)
514 abort ();
515
516 /* If FROM is a SUBREG that indicates that we have already done at least
517 the required extension, strip it. We don't handle such SUBREGs as
518 TO here. */
519
520 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
521 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
522 >= GET_MODE_SIZE (to_mode))
523 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
524 from = gen_lowpart (to_mode, from), from_mode = to_mode;
525
526 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
527 abort ();
528
529 if (to_mode == from_mode
530 || (from_mode == VOIDmode && CONSTANT_P (from)))
531 {
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real)
537 {
538 rtx value;
539
540 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
541 {
542 /* Try converting directly if the insn is supported. */
543 if ((code = can_extend_p (to_mode, from_mode, 0))
544 != CODE_FOR_nothing)
545 {
546 emit_unop_insn (code, to, from, UNKNOWN);
547 return;
548 }
549 }
550
551 #ifdef HAVE_trunchfqf2
552 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
555 return;
556 }
557 #endif
558 #ifdef HAVE_trunctqfqf2
559 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
562 return;
563 }
564 #endif
565 #ifdef HAVE_truncsfqf2
566 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_truncdfqf2
573 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncxfqf2
580 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_trunctfqf2
587 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593
594 #ifdef HAVE_trunctqfhf2
595 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncsfhf2
602 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_truncdfhf2
609 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncxfhf2
616 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_trunctfhf2
623 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629
630 #ifdef HAVE_truncsftqf2
631 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncdftqf2
638 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncxftqf2
645 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_trunctftqf2
652 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658
659 #ifdef HAVE_truncdfsf2
660 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncxfsf2
667 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_trunctfsf2
674 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfdf2
681 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfdf2
688 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694
695 libcall = (rtx) 0;
696 switch (from_mode)
697 {
698 case SFmode:
699 switch (to_mode)
700 {
701 case DFmode:
702 libcall = extendsfdf2_libfunc;
703 break;
704
705 case XFmode:
706 libcall = extendsfxf2_libfunc;
707 break;
708
709 case TFmode:
710 libcall = extendsftf2_libfunc;
711 break;
712
713 default:
714 break;
715 }
716 break;
717
718 case DFmode:
719 switch (to_mode)
720 {
721 case SFmode:
722 libcall = truncdfsf2_libfunc;
723 break;
724
725 case XFmode:
726 libcall = extenddfxf2_libfunc;
727 break;
728
729 case TFmode:
730 libcall = extenddftf2_libfunc;
731 break;
732
733 default:
734 break;
735 }
736 break;
737
738 case XFmode:
739 switch (to_mode)
740 {
741 case SFmode:
742 libcall = truncxfsf2_libfunc;
743 break;
744
745 case DFmode:
746 libcall = truncxfdf2_libfunc;
747 break;
748
749 default:
750 break;
751 }
752 break;
753
754 case TFmode:
755 switch (to_mode)
756 {
757 case SFmode:
758 libcall = trunctfsf2_libfunc;
759 break;
760
761 case DFmode:
762 libcall = trunctfdf2_libfunc;
763 break;
764
765 default:
766 break;
767 }
768 break;
769
770 default:
771 break;
772 }
773
774 if (libcall == (rtx) 0)
775 /* This conversion is not implemented yet. */
776 abort ();
777
778 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
779 1, from, from_mode);
780 emit_move_insn (to, value);
781 return;
782 }
783
784 /* Now both modes are integers. */
785
786 /* Handle expanding beyond a word. */
787 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
788 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
789 {
790 rtx insns;
791 rtx lowpart;
792 rtx fill_value;
793 rtx lowfrom;
794 int i;
795 enum machine_mode lowpart_mode;
796 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
797
798 /* Try converting directly if the insn is supported. */
799 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
800 != CODE_FOR_nothing)
801 {
802 /* If FROM is a SUBREG, put it into a register. Do this
803 so that we always generate the same set of insns for
804 better cse'ing; if an intermediate assignment occurred,
805 we won't be doing the operation directly on the SUBREG. */
806 if (optimize > 0 && GET_CODE (from) == SUBREG)
807 from = force_reg (from_mode, from);
808 emit_unop_insn (code, to, from, equiv_code);
809 return;
810 }
811 /* Next, try converting via full word. */
812 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
813 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
814 != CODE_FOR_nothing))
815 {
816 if (GET_CODE (to) == REG)
817 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
818 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
819 emit_unop_insn (code, to,
820 gen_lowpart (word_mode, to), equiv_code);
821 return;
822 }
823
824 /* No special multiword conversion insn; do it by hand. */
825 start_sequence ();
826
827 /* Since we will turn this into a no conflict block, we must ensure
828 that the source does not overlap the target. */
829
830 if (reg_overlap_mentioned_p (to, from))
831 from = force_reg (from_mode, from);
832
833 /* Get a copy of FROM widened to a word, if necessary. */
834 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
835 lowpart_mode = word_mode;
836 else
837 lowpart_mode = from_mode;
838
839 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
840
841 lowpart = gen_lowpart (lowpart_mode, to);
842 emit_move_insn (lowpart, lowfrom);
843
844 /* Compute the value to put in each remaining word. */
845 if (unsignedp)
846 fill_value = const0_rtx;
847 else
848 {
849 #ifdef HAVE_slt
850 if (HAVE_slt
851 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
852 && STORE_FLAG_VALUE == -1)
853 {
854 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
855 lowpart_mode, 0, 0);
856 fill_value = gen_reg_rtx (word_mode);
857 emit_insn (gen_slt (fill_value));
858 }
859 else
860 #endif
861 {
862 fill_value
863 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
864 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
865 NULL_RTX, 0);
866 fill_value = convert_to_mode (word_mode, fill_value, 1);
867 }
868 }
869
870 /* Fill the remaining words. */
871 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
872 {
873 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
874 rtx subword = operand_subword (to, index, 1, to_mode);
875
876 if (subword == 0)
877 abort ();
878
879 if (fill_value != subword)
880 emit_move_insn (subword, fill_value);
881 }
882
883 insns = get_insns ();
884 end_sequence ();
885
886 emit_no_conflict_block (insns, to, from, NULL_RTX,
887 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
888 return;
889 }
890
891 /* Truncating multi-word to a word or less. */
892 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
893 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
894 {
895 if (!((GET_CODE (from) == MEM
896 && ! MEM_VOLATILE_P (from)
897 && direct_load[(int) to_mode]
898 && ! mode_dependent_address_p (XEXP (from, 0)))
899 || GET_CODE (from) == REG
900 || GET_CODE (from) == SUBREG))
901 from = force_reg (from_mode, from);
902 convert_move (to, gen_lowpart (word_mode, from), 0);
903 return;
904 }
905
906 /* Handle pointer conversion */ /* SPEE 900220 */
907 if (to_mode == PQImode)
908 {
909 if (from_mode != QImode)
910 from = convert_to_mode (QImode, from, unsignedp);
911
912 #ifdef HAVE_truncqipqi2
913 if (HAVE_truncqipqi2)
914 {
915 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
916 return;
917 }
918 #endif /* HAVE_truncqipqi2 */
919 abort ();
920 }
921
922 if (from_mode == PQImode)
923 {
924 if (to_mode != QImode)
925 {
926 from = convert_to_mode (QImode, from, unsignedp);
927 from_mode = QImode;
928 }
929 else
930 {
931 #ifdef HAVE_extendpqiqi2
932 if (HAVE_extendpqiqi2)
933 {
934 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
935 return;
936 }
937 #endif /* HAVE_extendpqiqi2 */
938 abort ();
939 }
940 }
941
942 if (to_mode == PSImode)
943 {
944 if (from_mode != SImode)
945 from = convert_to_mode (SImode, from, unsignedp);
946
947 #ifdef HAVE_truncsipsi2
948 if (HAVE_truncsipsi2)
949 {
950 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
951 return;
952 }
953 #endif /* HAVE_truncsipsi2 */
954 abort ();
955 }
956
957 if (from_mode == PSImode)
958 {
959 if (to_mode != SImode)
960 {
961 from = convert_to_mode (SImode, from, unsignedp);
962 from_mode = SImode;
963 }
964 else
965 {
966 #ifdef HAVE_extendpsisi2
967 if (HAVE_extendpsisi2)
968 {
969 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
970 return;
971 }
972 #endif /* HAVE_extendpsisi2 */
973 abort ();
974 }
975 }
976
977 if (to_mode == PDImode)
978 {
979 if (from_mode != DImode)
980 from = convert_to_mode (DImode, from, unsignedp);
981
982 #ifdef HAVE_truncdipdi2
983 if (HAVE_truncdipdi2)
984 {
985 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
986 return;
987 }
988 #endif /* HAVE_truncdipdi2 */
989 abort ();
990 }
991
992 if (from_mode == PDImode)
993 {
994 if (to_mode != DImode)
995 {
996 from = convert_to_mode (DImode, from, unsignedp);
997 from_mode = DImode;
998 }
999 else
1000 {
1001 #ifdef HAVE_extendpdidi2
1002 if (HAVE_extendpdidi2)
1003 {
1004 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1005 return;
1006 }
1007 #endif /* HAVE_extendpdidi2 */
1008 abort ();
1009 }
1010 }
1011
1012 /* Now follow all the conversions between integers
1013 no more than a word long. */
1014
1015 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1016 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1017 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1018 GET_MODE_BITSIZE (from_mode)))
1019 {
1020 if (!((GET_CODE (from) == MEM
1021 && ! MEM_VOLATILE_P (from)
1022 && direct_load[(int) to_mode]
1023 && ! mode_dependent_address_p (XEXP (from, 0)))
1024 || GET_CODE (from) == REG
1025 || GET_CODE (from) == SUBREG))
1026 from = force_reg (from_mode, from);
1027 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1028 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1029 from = copy_to_reg (from);
1030 emit_move_insn (to, gen_lowpart (to_mode, from));
1031 return;
1032 }
1033
1034 /* Handle extension. */
1035 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1036 {
1037 /* Convert directly if that works. */
1038 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1039 != CODE_FOR_nothing)
1040 {
1041 emit_unop_insn (code, to, from, equiv_code);
1042 return;
1043 }
1044 else
1045 {
1046 enum machine_mode intermediate;
1047 rtx tmp;
1048 tree shift_amount;
1049
1050 /* Search for a mode to convert via. */
1051 for (intermediate = from_mode; intermediate != VOIDmode;
1052 intermediate = GET_MODE_WIDER_MODE (intermediate))
1053 if (((can_extend_p (to_mode, intermediate, unsignedp)
1054 != CODE_FOR_nothing)
1055 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1056 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1057 GET_MODE_BITSIZE (intermediate))))
1058 && (can_extend_p (intermediate, from_mode, unsignedp)
1059 != CODE_FOR_nothing))
1060 {
1061 convert_move (to, convert_to_mode (intermediate, from,
1062 unsignedp), unsignedp);
1063 return;
1064 }
1065
1066 /* No suitable intermediate mode.
1067 Generate what we need with shifts. */
1068 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1069 - GET_MODE_BITSIZE (from_mode), 0);
1070 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1071 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1072 to, unsignedp);
1073 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1074 to, unsignedp);
1075 if (tmp != to)
1076 emit_move_insn (to, tmp);
1077 return;
1078 }
1079 }
1080
1081 /* Support special truncate insns for certain modes. */
1082
1083 if (from_mode == DImode && to_mode == SImode)
1084 {
1085 #ifdef HAVE_truncdisi2
1086 if (HAVE_truncdisi2)
1087 {
1088 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1089 return;
1090 }
1091 #endif
1092 convert_move (to, force_reg (from_mode, from), unsignedp);
1093 return;
1094 }
1095
1096 if (from_mode == DImode && to_mode == HImode)
1097 {
1098 #ifdef HAVE_truncdihi2
1099 if (HAVE_truncdihi2)
1100 {
1101 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1102 return;
1103 }
1104 #endif
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1106 return;
1107 }
1108
1109 if (from_mode == DImode && to_mode == QImode)
1110 {
1111 #ifdef HAVE_truncdiqi2
1112 if (HAVE_truncdiqi2)
1113 {
1114 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1115 return;
1116 }
1117 #endif
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1119 return;
1120 }
1121
1122 if (from_mode == SImode && to_mode == HImode)
1123 {
1124 #ifdef HAVE_truncsihi2
1125 if (HAVE_truncsihi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1128 return;
1129 }
1130 #endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == SImode && to_mode == QImode)
1136 {
1137 #ifdef HAVE_truncsiqi2
1138 if (HAVE_truncsiqi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1141 return;
1142 }
1143 #endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == HImode && to_mode == QImode)
1149 {
1150 #ifdef HAVE_trunchiqi2
1151 if (HAVE_trunchiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156 #endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == TImode && to_mode == DImode)
1162 {
1163 #ifdef HAVE_trunctidi2
1164 if (HAVE_trunctidi2)
1165 {
1166 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1167 return;
1168 }
1169 #endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == TImode && to_mode == SImode)
1175 {
1176 #ifdef HAVE_trunctisi2
1177 if (HAVE_trunctisi2)
1178 {
1179 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1180 return;
1181 }
1182 #endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == TImode && to_mode == HImode)
1188 {
1189 #ifdef HAVE_trunctihi2
1190 if (HAVE_trunctihi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1193 return;
1194 }
1195 #endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == TImode && to_mode == QImode)
1201 {
1202 #ifdef HAVE_trunctiqi2
1203 if (HAVE_trunctiqi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1206 return;
1207 }
1208 #endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 /* Handle truncation of volatile memrefs, and so on;
1214 the things that couldn't be truncated directly,
1215 and for which there was no special instruction. */
1216 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1217 {
1218 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1219 emit_move_insn (to, temp);
1220 return;
1221 }
1222
1223 /* Mode combination is not recognized. */
1224 abort ();
1225 }
1226
1227 /* Return an rtx for a value that would result
1228 from converting X to mode MODE.
1229 Both X and MODE may be floating, or both integer.
1230 UNSIGNEDP is nonzero if X is an unsigned value.
1231 This can be done by referring to a part of X in place
1232 or by copying to a new temporary with conversion.
1233
1234 This function *must not* call protect_from_queue
1235 except when putting X into an insn (in which case convert_move does it). */
1236
1237 rtx
1238 convert_to_mode (mode, x, unsignedp)
1239 enum machine_mode mode;
1240 rtx x;
1241 int unsignedp;
1242 {
1243 return convert_modes (mode, VOIDmode, x, unsignedp);
1244 }
1245
1246 /* Return an rtx for a value that would result
1247 from converting X from mode OLDMODE to mode MODE.
1248 Both modes may be floating, or both integer.
1249 UNSIGNEDP is nonzero if X is an unsigned value.
1250
1251 This can be done by referring to a part of X in place
1252 or by copying to a new temporary with conversion.
1253
1254 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1255
1256 This function *must not* call protect_from_queue
1257 except when putting X into an insn (in which case convert_move does it). */
1258
1259 rtx
1260 convert_modes (mode, oldmode, x, unsignedp)
1261 enum machine_mode mode, oldmode;
1262 rtx x;
1263 int unsignedp;
1264 {
1265 register rtx temp;
1266
1267 /* If FROM is a SUBREG that indicates that we have already done at least
1268 the required extension, strip it. */
1269
1270 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1271 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1272 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1273 x = gen_lowpart (mode, x);
1274
1275 if (GET_MODE (x) != VOIDmode)
1276 oldmode = GET_MODE (x);
1277
1278 if (mode == oldmode)
1279 return x;
1280
1281 /* There is one case that we must handle specially: If we are converting
1282 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1283 we are to interpret the constant as unsigned, gen_lowpart will do
1284 the wrong if the constant appears negative. What we want to do is
1285 make the high-order word of the constant zero, not all ones. */
1286
1287 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1288 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1289 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1290 {
1291 HOST_WIDE_INT val = INTVAL (x);
1292
1293 if (oldmode != VOIDmode
1294 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1295 {
1296 int width = GET_MODE_BITSIZE (oldmode);
1297
1298 /* We need to zero extend VAL. */
1299 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1300 }
1301
1302 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1303 }
1304
1305 /* We can do this with a gen_lowpart if both desired and current modes
1306 are integer, and this is either a constant integer, a register, or a
1307 non-volatile MEM. Except for the constant case where MODE is no
1308 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1309
1310 if ((GET_CODE (x) == CONST_INT
1311 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1312 || (GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_CLASS (oldmode) == MODE_INT
1314 && (GET_CODE (x) == CONST_DOUBLE
1315 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1316 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1317 && direct_load[(int) mode])
1318 || (GET_CODE (x) == REG
1319 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1320 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1321 {
1322 /* ?? If we don't know OLDMODE, we have to assume here that
1323 X does not need sign- or zero-extension. This may not be
1324 the case, but it's the best we can do. */
1325 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1326 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1327 {
1328 HOST_WIDE_INT val = INTVAL (x);
1329 int width = GET_MODE_BITSIZE (oldmode);
1330
1331 /* We must sign or zero-extend in this case. Start by
1332 zero-extending, then sign extend if we need to. */
1333 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1334 if (! unsignedp
1335 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1336 val |= (HOST_WIDE_INT) (-1) << width;
1337
1338 return GEN_INT (val);
1339 }
1340
1341 return gen_lowpart (mode, x);
1342 }
1343
1344 temp = gen_reg_rtx (mode);
1345 convert_move (temp, x, unsignedp);
1346 return temp;
1347 }
1348 \f
1349
1350 /* This macro is used to determine what the largest unit size that
1351 move_by_pieces can use is. */
1352
1353 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1354 move efficiently, as opposed to MOVE_MAX which is the maximum
1355 number of bhytes we can move with a single instruction. */
1356
1357 #ifndef MOVE_MAX_PIECES
1358 #define MOVE_MAX_PIECES MOVE_MAX
1359 #endif
1360
1361 /* Generate several move instructions to copy LEN bytes
1362 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1363 The caller must pass FROM and TO
1364 through protect_from_queue before calling.
1365 ALIGN (in bytes) is maximum alignment we can assume. */
1366
1367 void
1368 move_by_pieces (to, from, len, align)
1369 rtx to, from;
1370 int len;
1371 unsigned int align;
1372 {
1373 struct move_by_pieces data;
1374 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1375 unsigned int max_size = MOVE_MAX_PIECES + 1;
1376 enum machine_mode mode = VOIDmode, tmode;
1377 enum insn_code icode;
1378
1379 data.offset = 0;
1380 data.to_addr = to_addr;
1381 data.from_addr = from_addr;
1382 data.to = to;
1383 data.from = from;
1384 data.autinc_to
1385 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1386 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1387 data.autinc_from
1388 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1389 || GET_CODE (from_addr) == POST_INC
1390 || GET_CODE (from_addr) == POST_DEC);
1391
1392 data.explicit_inc_from = 0;
1393 data.explicit_inc_to = 0;
1394 data.reverse
1395 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1396 if (data.reverse) data.offset = len;
1397 data.len = len;
1398
1399 data.to_struct = MEM_IN_STRUCT_P (to);
1400 data.from_struct = MEM_IN_STRUCT_P (from);
1401 data.to_readonly = RTX_UNCHANGING_P (to);
1402 data.from_readonly = RTX_UNCHANGING_P (from);
1403
1404 /* If copying requires more than two move insns,
1405 copy addresses to registers (to make displacements shorter)
1406 and use post-increment if available. */
1407 if (!(data.autinc_from && data.autinc_to)
1408 && move_by_pieces_ninsns (len, align) > 2)
1409 {
1410 /* Find the mode of the largest move... */
1411 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1412 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1413 if (GET_MODE_SIZE (tmode) < max_size)
1414 mode = tmode;
1415
1416 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1417 {
1418 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = -1;
1421 }
1422 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1423 {
1424 data.from_addr = copy_addr_to_reg (from_addr);
1425 data.autinc_from = 1;
1426 data.explicit_inc_from = 1;
1427 }
1428 if (!data.autinc_from && CONSTANT_P (from_addr))
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1431 {
1432 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = -1;
1435 }
1436 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1437 {
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 data.autinc_to = 1;
1440 data.explicit_inc_to = 1;
1441 }
1442 if (!data.autinc_to && CONSTANT_P (to_addr))
1443 data.to_addr = copy_addr_to_reg (to_addr);
1444 }
1445
1446 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1447 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1448 align = MOVE_MAX;
1449
1450 /* First move what we can in the largest integer mode, then go to
1451 successively smaller modes. */
1452
1453 while (max_size > 1)
1454 {
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1458 mode = tmode;
1459
1460 if (mode == VOIDmode)
1461 break;
1462
1463 icode = mov_optab->handlers[(int) mode].insn_code;
1464 if (icode != CODE_FOR_nothing
1465 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1466 (unsigned int) GET_MODE_SIZE (mode)))
1467 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1468
1469 max_size = GET_MODE_SIZE (mode);
1470 }
1471
1472 /* The code above should have handled everything. */
1473 if (data.len > 0)
1474 abort ();
1475 }
1476
1477 /* Return number of insns required to move L bytes by pieces.
1478 ALIGN (in bytes) is maximum alignment we can assume. */
1479
1480 static int
1481 move_by_pieces_ninsns (l, align)
1482 unsigned int l;
1483 unsigned int align;
1484 {
1485 register int n_insns = 0;
1486 unsigned int max_size = MOVE_MAX + 1;
1487
1488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1489 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1490 align = MOVE_MAX;
1491
1492 while (max_size > 1)
1493 {
1494 enum machine_mode mode = VOIDmode, tmode;
1495 enum insn_code icode;
1496
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) < max_size)
1500 mode = tmode;
1501
1502 if (mode == VOIDmode)
1503 break;
1504
1505 icode = mov_optab->handlers[(int) mode].insn_code;
1506 if (icode != CODE_FOR_nothing
1507 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1509
1510 max_size = GET_MODE_SIZE (mode);
1511 }
1512
1513 return n_insns;
1514 }
1515
1516 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1519
1520 static void
1521 move_by_pieces_1 (genfun, mode, data)
1522 rtx (*genfun) PARAMS ((rtx, ...));
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1525 {
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1528
1529 while (data->len >= size)
1530 {
1531 if (data->reverse) data->offset -= size;
1532
1533 to1 = (data->autinc_to
1534 ? gen_rtx_MEM (mode, data->to_addr)
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
1539 RTX_UNCHANGING_P (to1) = data->to_readonly;
1540
1541 from1
1542 = (data->autinc_from
1543 ? gen_rtx_MEM (mode, data->from_addr)
1544 : copy_rtx (change_address (data->from, mode,
1545 plus_constant (data->from_addr,
1546 data->offset))));
1547 MEM_IN_STRUCT_P (from1) = data->from_struct;
1548 RTX_UNCHANGING_P (from1) = data->from_readonly;
1549
1550 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1552 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1554
1555 emit_insn ((*genfun) (to1, from1));
1556 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1557 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1558 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1559 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1560
1561 if (! data->reverse) data->offset += size;
1562
1563 data->len -= size;
1564 }
1565 }
1566 \f
1567 /* Emit code to move a block Y to a block X.
1568 This may be done with string-move instructions,
1569 with multiple scalar move instructions, or with a library call.
1570
1571 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1572 with mode BLKmode.
1573 SIZE is an rtx that says how long they are.
1574 ALIGN is the maximum alignment we can assume they have,
1575 measured in bytes.
1576
1577 Return the address of the new block, if memcpy is called and returns it,
1578 0 otherwise. */
1579
1580 rtx
1581 emit_block_move (x, y, size, align)
1582 rtx x, y;
1583 rtx size;
1584 unsigned int align;
1585 {
1586 rtx retval = 0;
1587 #ifdef TARGET_MEM_FUNCTIONS
1588 static tree fn;
1589 tree call_expr, arg_list;
1590 #endif
1591
1592 if (GET_MODE (x) != BLKmode)
1593 abort ();
1594
1595 if (GET_MODE (y) != BLKmode)
1596 abort ();
1597
1598 x = protect_from_queue (x, 1);
1599 y = protect_from_queue (y, 0);
1600 size = protect_from_queue (size, 0);
1601
1602 if (GET_CODE (x) != MEM)
1603 abort ();
1604 if (GET_CODE (y) != MEM)
1605 abort ();
1606 if (size == 0)
1607 abort ();
1608
1609 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1610 move_by_pieces (x, y, INTVAL (size), align);
1611 else
1612 {
1613 /* Try the most limited insn first, because there's no point
1614 including more than one in the machine description unless
1615 the more limited one has some advantage. */
1616
1617 rtx opalign = GEN_INT (align);
1618 enum machine_mode mode;
1619
1620 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1621 mode = GET_MODE_WIDER_MODE (mode))
1622 {
1623 enum insn_code code = movstr_optab[(int) mode];
1624 insn_operand_predicate_fn pred;
1625
1626 if (code != CODE_FOR_nothing
1627 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1628 here because if SIZE is less than the mode mask, as it is
1629 returned by the macro, it will definitely be less than the
1630 actual mode mask. */
1631 && ((GET_CODE (size) == CONST_INT
1632 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1633 <= (GET_MODE_MASK (mode) >> 1)))
1634 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1635 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1636 || (*pred) (x, BLKmode))
1637 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1638 || (*pred) (y, BLKmode))
1639 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1640 || (*pred) (opalign, VOIDmode)))
1641 {
1642 rtx op2;
1643 rtx last = get_last_insn ();
1644 rtx pat;
1645
1646 op2 = convert_to_mode (mode, size, 1);
1647 pred = insn_data[(int) code].operand[2].predicate;
1648 if (pred != 0 && ! (*pred) (op2, mode))
1649 op2 = copy_to_mode_reg (mode, op2);
1650
1651 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1652 if (pat)
1653 {
1654 emit_insn (pat);
1655 return 0;
1656 }
1657 else
1658 delete_insns_since (last);
1659 }
1660 }
1661
1662 /* X, Y, or SIZE may have been passed through protect_from_queue.
1663
1664 It is unsafe to save the value generated by protect_from_queue
1665 and reuse it later. Consider what happens if emit_queue is
1666 called before the return value from protect_from_queue is used.
1667
1668 Expansion of the CALL_EXPR below will call emit_queue before
1669 we are finished emitting RTL for argument setup. So if we are
1670 not careful we could get the wrong value for an argument.
1671
1672 To avoid this problem we go ahead and emit code to copy X, Y &
1673 SIZE into new pseudos. We can then place those new pseudos
1674 into an RTL_EXPR and use them later, even after a call to
1675 emit_queue.
1676
1677 Note this is not strictly needed for library calls since they
1678 do not call emit_queue before loading their arguments. However,
1679 we may need to have library calls call emit_queue in the future
1680 since failing to do so could cause problems for targets which
1681 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1682 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1683 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1684
1685 #ifdef TARGET_MEM_FUNCTIONS
1686 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1687 #else
1688 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1689 TREE_UNSIGNED (integer_type_node));
1690 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1691 #endif
1692
1693 #ifdef TARGET_MEM_FUNCTIONS
1694 /* It is incorrect to use the libcall calling conventions to call
1695 memcpy in this context.
1696
1697 This could be a user call to memcpy and the user may wish to
1698 examine the return value from memcpy.
1699
1700 For targets where libcalls and normal calls have different conventions
1701 for returning pointers, we could end up generating incorrect code.
1702
1703 So instead of using a libcall sequence we build up a suitable
1704 CALL_EXPR and expand the call in the normal fashion. */
1705 if (fn == NULL_TREE)
1706 {
1707 tree fntype;
1708
1709 /* This was copied from except.c, I don't know if all this is
1710 necessary in this context or not. */
1711 fn = get_identifier ("memcpy");
1712 push_obstacks_nochange ();
1713 end_temporary_allocation ();
1714 fntype = build_pointer_type (void_type_node);
1715 fntype = build_function_type (fntype, NULL_TREE);
1716 fn = build_decl (FUNCTION_DECL, fn, fntype);
1717 ggc_add_tree_root (&fn, 1);
1718 DECL_EXTERNAL (fn) = 1;
1719 TREE_PUBLIC (fn) = 1;
1720 DECL_ARTIFICIAL (fn) = 1;
1721 make_decl_rtl (fn, NULL_PTR, 1);
1722 assemble_external (fn);
1723 pop_obstacks ();
1724 }
1725
1726 /* We need to make an argument list for the function call.
1727
1728 memcpy has three arguments, the first two are void * addresses and
1729 the last is a size_t byte count for the copy. */
1730 arg_list
1731 = build_tree_list (NULL_TREE,
1732 make_tree (build_pointer_type (void_type_node), x));
1733 TREE_CHAIN (arg_list)
1734 = build_tree_list (NULL_TREE,
1735 make_tree (build_pointer_type (void_type_node), y));
1736 TREE_CHAIN (TREE_CHAIN (arg_list))
1737 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1738 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1739
1740 /* Now we have to build up the CALL_EXPR itself. */
1741 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1742 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1743 call_expr, arg_list, NULL_TREE);
1744 TREE_SIDE_EFFECTS (call_expr) = 1;
1745
1746 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1747 #else
1748 emit_library_call (bcopy_libfunc, 0,
1749 VOIDmode, 3, y, Pmode, x, Pmode,
1750 convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node)),
1752 TYPE_MODE (integer_type_node));
1753 #endif
1754 }
1755
1756 return retval;
1757 }
1758 \f
1759 /* Copy all or part of a value X into registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1761
1762 void
1763 move_block_to_reg (regno, x, nregs, mode)
1764 int regno;
1765 rtx x;
1766 int nregs;
1767 enum machine_mode mode;
1768 {
1769 int i;
1770 #ifdef HAVE_load_multiple
1771 rtx pat;
1772 rtx last;
1773 #endif
1774
1775 if (nregs == 0)
1776 return;
1777
1778 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1779 x = validize_mem (force_const_mem (mode, x));
1780
1781 /* See if the machine can do this with a load multiple insn. */
1782 #ifdef HAVE_load_multiple
1783 if (HAVE_load_multiple)
1784 {
1785 last = get_last_insn ();
1786 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1787 GEN_INT (nregs));
1788 if (pat)
1789 {
1790 emit_insn (pat);
1791 return;
1792 }
1793 else
1794 delete_insns_since (last);
1795 }
1796 #endif
1797
1798 for (i = 0; i < nregs; i++)
1799 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1800 operand_subword_force (x, i, mode));
1801 }
1802
1803 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1804 The number of registers to be filled is NREGS. SIZE indicates the number
1805 of bytes in the object X. */
1806
1807
1808 void
1809 move_block_from_reg (regno, x, nregs, size)
1810 int regno;
1811 rtx x;
1812 int nregs;
1813 int size;
1814 {
1815 int i;
1816 #ifdef HAVE_store_multiple
1817 rtx pat;
1818 rtx last;
1819 #endif
1820 enum machine_mode mode;
1821
1822 /* If SIZE is that of a mode no bigger than a word, just use that
1823 mode's store operation. */
1824 if (size <= UNITS_PER_WORD
1825 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1826 {
1827 emit_move_insn (change_address (x, mode, NULL),
1828 gen_rtx_REG (mode, regno));
1829 return;
1830 }
1831
1832 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1833 to the left before storing to memory. Note that the previous test
1834 doesn't handle all cases (e.g. SIZE == 3). */
1835 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1836 {
1837 rtx tem = operand_subword (x, 0, 1, BLKmode);
1838 rtx shift;
1839
1840 if (tem == 0)
1841 abort ();
1842
1843 shift = expand_shift (LSHIFT_EXPR, word_mode,
1844 gen_rtx_REG (word_mode, regno),
1845 build_int_2 ((UNITS_PER_WORD - size)
1846 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1847 emit_move_insn (tem, shift);
1848 return;
1849 }
1850
1851 /* See if the machine can do this with a store multiple insn. */
1852 #ifdef HAVE_store_multiple
1853 if (HAVE_store_multiple)
1854 {
1855 last = get_last_insn ();
1856 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1857 GEN_INT (nregs));
1858 if (pat)
1859 {
1860 emit_insn (pat);
1861 return;
1862 }
1863 else
1864 delete_insns_since (last);
1865 }
1866 #endif
1867
1868 for (i = 0; i < nregs; i++)
1869 {
1870 rtx tem = operand_subword (x, i, 1, BLKmode);
1871
1872 if (tem == 0)
1873 abort ();
1874
1875 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1876 }
1877 }
1878
1879 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1880 registers represented by a PARALLEL. SSIZE represents the total size of
1881 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1882 SRC in bits. */
1883 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1884 the balance will be in what would be the low-order memory addresses, i.e.
1885 left justified for big endian, right justified for little endian. This
1886 happens to be true for the targets currently using this support. If this
1887 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1888 would be needed. */
1889
1890 void
1891 emit_group_load (dst, orig_src, ssize, align)
1892 rtx dst, orig_src;
1893 unsigned int align;
1894 int ssize;
1895 {
1896 rtx *tmps, src;
1897 int start, i;
1898
1899 if (GET_CODE (dst) != PARALLEL)
1900 abort ();
1901
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (dst, 0, 0), 0))
1905 start = 0;
1906 else
1907 start = 1;
1908
1909 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1910
1911 /* If we won't be loading directly from memory, protect the real source
1912 from strange tricks we might play. */
1913 src = orig_src;
1914 if (GET_CODE (src) != MEM)
1915 {
1916 if (GET_CODE (src) == VOIDmode)
1917 src = gen_reg_rtx (GET_MODE (dst));
1918 else
1919 src = gen_reg_rtx (GET_MODE (orig_src));
1920 emit_move_insn (src, orig_src);
1921 }
1922
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (dst, 0); i++)
1925 {
1926 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1927 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1928 unsigned int bytelen = GET_MODE_SIZE (mode);
1929 int shift = 0;
1930
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + bytelen > ssize)
1933 {
1934 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1935 bytelen = ssize - bytepos;
1936 if (bytelen <= 0)
1937 abort ();
1938 }
1939
1940 /* Optimize the access just a bit. */
1941 if (GET_CODE (src) == MEM
1942 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1943 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1944 && bytelen == GET_MODE_SIZE (mode))
1945 {
1946 tmps[i] = gen_reg_rtx (mode);
1947 emit_move_insn (tmps[i],
1948 change_address (src, mode,
1949 plus_constant (XEXP (src, 0),
1950 bytepos)));
1951 }
1952 else if (GET_CODE (src) == CONCAT)
1953 {
1954 if (bytepos == 0
1955 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1956 tmps[i] = XEXP (src, 0);
1957 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1958 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1959 tmps[i] = XEXP (src, 1);
1960 else
1961 abort ();
1962 }
1963 else
1964 {
1965 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1966 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1967 mode, mode, align, ssize);
1968 }
1969
1970 if (BYTES_BIG_ENDIAN && shift)
1971 {
1972 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1973 tmps[i], 0, OPTAB_WIDEN);
1974 }
1975 }
1976 emit_queue();
1977
1978 /* Copy the extracted pieces into the proper (probable) hard regs. */
1979 for (i = start; i < XVECLEN (dst, 0); i++)
1980 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1981 }
1982
1983 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1984 registers represented by a PARALLEL. SSIZE represents the total size of
1985 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1986
1987 void
1988 emit_group_store (orig_dst, src, ssize, align)
1989 rtx orig_dst, src;
1990 int ssize;
1991 unsigned int align;
1992 {
1993 rtx *tmps, dst;
1994 int start, i;
1995
1996 if (GET_CODE (src) != PARALLEL)
1997 abort ();
1998
1999 /* Check for a NULL entry, used to indicate that the parameter goes
2000 both on the stack and in registers. */
2001 if (XEXP (XVECEXP (src, 0, 0), 0))
2002 start = 0;
2003 else
2004 start = 1;
2005
2006 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2007
2008 /* Copy the (probable) hard regs into pseudos. */
2009 for (i = start; i < XVECLEN (src, 0); i++)
2010 {
2011 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2012 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2013 emit_move_insn (tmps[i], reg);
2014 }
2015 emit_queue();
2016
2017 /* If we won't be storing directly into memory, protect the real destination
2018 from strange tricks we might play. */
2019 dst = orig_dst;
2020 if (GET_CODE (dst) == PARALLEL)
2021 {
2022 rtx temp;
2023
2024 /* We can get a PARALLEL dst if there is a conditional expression in
2025 a return statement. In that case, the dst and src are the same,
2026 so no action is necessary. */
2027 if (rtx_equal_p (dst, src))
2028 return;
2029
2030 /* It is unclear if we can ever reach here, but we may as well handle
2031 it. Allocate a temporary, and split this into a store/load to/from
2032 the temporary. */
2033
2034 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2035 emit_group_store (temp, src, ssize, align);
2036 emit_group_load (dst, temp, ssize, align);
2037 return;
2038 }
2039 else if (GET_CODE (dst) != MEM)
2040 {
2041 dst = gen_reg_rtx (GET_MODE (orig_dst));
2042 /* Make life a bit easier for combine. */
2043 emit_move_insn (dst, const0_rtx);
2044 }
2045 else if (! MEM_IN_STRUCT_P (dst))
2046 {
2047 /* store_bit_field requires that memory operations have
2048 mem_in_struct_p set; we might not. */
2049
2050 dst = copy_rtx (orig_dst);
2051 MEM_SET_IN_STRUCT_P (dst, 1);
2052 }
2053
2054 /* Process the pieces. */
2055 for (i = start; i < XVECLEN (src, 0); i++)
2056 {
2057 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2058 enum machine_mode mode = GET_MODE (tmps[i]);
2059 unsigned int bytelen = GET_MODE_SIZE (mode);
2060
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + bytelen > ssize)
2063 {
2064 if (BYTES_BIG_ENDIAN)
2065 {
2066 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2067 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2068 tmps[i], 0, OPTAB_WIDEN);
2069 }
2070 bytelen = ssize - bytepos;
2071 }
2072
2073 /* Optimize the access just a bit. */
2074 if (GET_CODE (dst) == MEM
2075 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2076 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2077 && bytelen == GET_MODE_SIZE (mode))
2078 emit_move_insn (change_address (dst, mode,
2079 plus_constant (XEXP (dst, 0),
2080 bytepos)),
2081 tmps[i]);
2082 else
2083 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 mode, tmps[i], align, ssize);
2085 }
2086
2087 emit_queue();
2088
2089 /* Copy from the pseudo into the (probable) hard reg. */
2090 if (GET_CODE (dst) == REG)
2091 emit_move_insn (orig_dst, dst);
2092 }
2093
2094 /* Generate code to copy a BLKmode object of TYPE out of a
2095 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2096 is null, a stack temporary is created. TGTBLK is returned.
2097
2098 The primary purpose of this routine is to handle functions
2099 that return BLKmode structures in registers. Some machines
2100 (the PA for example) want to return all small structures
2101 in registers regardless of the structure's alignment. */
2102
2103 rtx
2104 copy_blkmode_from_reg (tgtblk,srcreg,type)
2105 rtx tgtblk;
2106 rtx srcreg;
2107 tree type;
2108 {
2109 int bytes = int_size_in_bytes (type);
2110 rtx src = NULL, dst = NULL;
2111 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2112 int bitpos, xbitpos, big_endian_correction = 0;
2113
2114 if (tgtblk == 0)
2115 {
2116 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2117 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2118 preserve_temp_slots (tgtblk);
2119 }
2120
2121 /* This code assumes srcreg is at least a full word. If it isn't,
2122 copy it into a new pseudo which is a full word. */
2123 if (GET_MODE (srcreg) != BLKmode
2124 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2125 srcreg = convert_to_mode (word_mode, srcreg,
2126 TREE_UNSIGNED (type));
2127
2128 /* Structures whose size is not a multiple of a word are aligned
2129 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2130 machine, this means we must skip the empty high order bytes when
2131 calculating the bit offset. */
2132 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2133 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2134 * BITS_PER_UNIT));
2135
2136 /* Copy the structure BITSIZE bites at a time.
2137
2138 We could probably emit more efficient code for machines
2139 which do not use strict alignment, but it doesn't seem
2140 worth the effort at the current time. */
2141 for (bitpos = 0, xbitpos = big_endian_correction;
2142 bitpos < bytes * BITS_PER_UNIT;
2143 bitpos += bitsize, xbitpos += bitsize)
2144 {
2145
2146 /* We need a new source operand each time xbitpos is on a
2147 word boundary and when xbitpos == big_endian_correction
2148 (the first time through). */
2149 if (xbitpos % BITS_PER_WORD == 0
2150 || xbitpos == big_endian_correction)
2151 src = operand_subword_force (srcreg,
2152 xbitpos / BITS_PER_WORD,
2153 BLKmode);
2154
2155 /* We need a new destination operand each time bitpos is on
2156 a word boundary. */
2157 if (bitpos % BITS_PER_WORD == 0)
2158 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2159
2160 /* Use xbitpos for the source extraction (right justified) and
2161 xbitpos for the destination store (left justified). */
2162 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2163 extract_bit_field (src, bitsize,
2164 xbitpos % BITS_PER_WORD, 1,
2165 NULL_RTX, word_mode,
2166 word_mode,
2167 bitsize / BITS_PER_UNIT,
2168 BITS_PER_WORD),
2169 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2170 }
2171 return tgtblk;
2172 }
2173
2174
2175 /* Add a USE expression for REG to the (possibly empty) list pointed
2176 to by CALL_FUSAGE. REG must denote a hard register. */
2177
2178 void
2179 use_reg (call_fusage, reg)
2180 rtx *call_fusage, reg;
2181 {
2182 if (GET_CODE (reg) != REG
2183 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2184 abort();
2185
2186 *call_fusage
2187 = gen_rtx_EXPR_LIST (VOIDmode,
2188 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2189 }
2190
2191 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2192 starting at REGNO. All of these registers must be hard registers. */
2193
2194 void
2195 use_regs (call_fusage, regno, nregs)
2196 rtx *call_fusage;
2197 int regno;
2198 int nregs;
2199 {
2200 int i;
2201
2202 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2203 abort ();
2204
2205 for (i = 0; i < nregs; i++)
2206 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2207 }
2208
2209 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2210 PARALLEL REGS. This is for calls that pass values in multiple
2211 non-contiguous locations. The Irix 6 ABI has examples of this. */
2212
2213 void
2214 use_group_regs (call_fusage, regs)
2215 rtx *call_fusage;
2216 rtx regs;
2217 {
2218 int i;
2219
2220 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 {
2222 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg != 0 && GET_CODE (reg) == REG)
2228 use_reg (call_fusage, reg);
2229 }
2230 }
2231 \f
2232 /* Generate several move instructions to clear LEN bytes of block TO.
2233 (A MEM rtx with BLKmode). The caller must pass TO through
2234 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2235 we can assume. */
2236
2237 static void
2238 clear_by_pieces (to, len, align)
2239 rtx to;
2240 int len;
2241 unsigned int align;
2242 {
2243 struct clear_by_pieces data;
2244 rtx to_addr = XEXP (to, 0);
2245 unsigned int max_size = MOVE_MAX_PIECES + 1;
2246 enum machine_mode mode = VOIDmode, tmode;
2247 enum insn_code icode;
2248
2249 data.offset = 0;
2250 data.to_addr = to_addr;
2251 data.to = to;
2252 data.autinc_to
2253 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2254 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2255
2256 data.explicit_inc_to = 0;
2257 data.reverse
2258 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2259 if (data.reverse) data.offset = len;
2260 data.len = len;
2261
2262 data.to_struct = MEM_IN_STRUCT_P (to);
2263
2264 /* If copying requires more than two move insns,
2265 copy addresses to registers (to make displacements shorter)
2266 and use post-increment if available. */
2267 if (!data.autinc_to
2268 && move_by_pieces_ninsns (len, align) > 2)
2269 {
2270 /* Determine the main mode we'll be using */
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2275
2276 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2277 {
2278 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = -1;
2281 }
2282 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2283 {
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = 1;
2287 }
2288 if (!data.autinc_to && CONSTANT_P (to_addr))
2289 data.to_addr = copy_addr_to_reg (to_addr);
2290 }
2291
2292 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2293 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2294 align = MOVE_MAX;
2295
2296 /* First move what we can in the largest integer mode, then go to
2297 successively smaller modes. */
2298
2299 while (max_size > 1)
2300 {
2301 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2302 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2303 if (GET_MODE_SIZE (tmode) < max_size)
2304 mode = tmode;
2305
2306 if (mode == VOIDmode)
2307 break;
2308
2309 icode = mov_optab->handlers[(int) mode].insn_code;
2310 if (icode != CODE_FOR_nothing
2311 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2312 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2313
2314 max_size = GET_MODE_SIZE (mode);
2315 }
2316
2317 /* The code above should have handled everything. */
2318 if (data.len != 0)
2319 abort ();
2320 }
2321
2322 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2323 with move instructions for mode MODE. GENFUN is the gen_... function
2324 to make a move insn for that mode. DATA has all the other info. */
2325
2326 static void
2327 clear_by_pieces_1 (genfun, mode, data)
2328 rtx (*genfun) PARAMS ((rtx, ...));
2329 enum machine_mode mode;
2330 struct clear_by_pieces *data;
2331 {
2332 register int size = GET_MODE_SIZE (mode);
2333 register rtx to1;
2334
2335 while (data->len >= size)
2336 {
2337 if (data->reverse) data->offset -= size;
2338
2339 to1 = (data->autinc_to
2340 ? gen_rtx_MEM (mode, data->to_addr)
2341 : copy_rtx (change_address (data->to, mode,
2342 plus_constant (data->to_addr,
2343 data->offset))));
2344 MEM_IN_STRUCT_P (to1) = data->to_struct;
2345
2346 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2347 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2348
2349 emit_insn ((*genfun) (to1, const0_rtx));
2350 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2351 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2352
2353 if (! data->reverse) data->offset += size;
2354
2355 data->len -= size;
2356 }
2357 }
2358 \f
2359 /* Write zeros through the storage of OBJECT.
2360 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2361 the maximum alignment we can is has, measured in bytes.
2362
2363 If we call a function that returns the length of the block, return it. */
2364
2365 rtx
2366 clear_storage (object, size, align)
2367 rtx object;
2368 rtx size;
2369 unsigned int align;
2370 {
2371 #ifdef TARGET_MEM_FUNCTIONS
2372 static tree fn;
2373 tree call_expr, arg_list;
2374 #endif
2375 rtx retval = 0;
2376
2377 if (GET_MODE (object) == BLKmode)
2378 {
2379 object = protect_from_queue (object, 1);
2380 size = protect_from_queue (size, 0);
2381
2382 if (GET_CODE (size) == CONST_INT
2383 && MOVE_BY_PIECES_P (INTVAL (size), align))
2384 clear_by_pieces (object, INTVAL (size), align);
2385
2386 else
2387 {
2388 /* Try the most limited insn first, because there's no point
2389 including more than one in the machine description unless
2390 the more limited one has some advantage. */
2391
2392 rtx opalign = GEN_INT (align);
2393 enum machine_mode mode;
2394
2395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2396 mode = GET_MODE_WIDER_MODE (mode))
2397 {
2398 enum insn_code code = clrstr_optab[(int) mode];
2399 insn_operand_predicate_fn pred;
2400
2401 if (code != CODE_FOR_nothing
2402 /* We don't need MODE to be narrower than
2403 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2404 the mode mask, as it is returned by the macro, it will
2405 definitely be less than the actual mode mask. */
2406 && ((GET_CODE (size) == CONST_INT
2407 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2408 <= (GET_MODE_MASK (mode) >> 1)))
2409 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2410 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2411 || (*pred) (object, BLKmode))
2412 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2413 || (*pred) (opalign, VOIDmode)))
2414 {
2415 rtx op1;
2416 rtx last = get_last_insn ();
2417 rtx pat;
2418
2419 op1 = convert_to_mode (mode, size, 1);
2420 pred = insn_data[(int) code].operand[1].predicate;
2421 if (pred != 0 && ! (*pred) (op1, mode))
2422 op1 = copy_to_mode_reg (mode, op1);
2423
2424 pat = GEN_FCN ((int) code) (object, op1, opalign);
2425 if (pat)
2426 {
2427 emit_insn (pat);
2428 return 0;
2429 }
2430 else
2431 delete_insns_since (last);
2432 }
2433 }
2434
2435 /* OBJECT or SIZE may have been passed through protect_from_queue.
2436
2437 It is unsafe to save the value generated by protect_from_queue
2438 and reuse it later. Consider what happens if emit_queue is
2439 called before the return value from protect_from_queue is used.
2440
2441 Expansion of the CALL_EXPR below will call emit_queue before
2442 we are finished emitting RTL for argument setup. So if we are
2443 not careful we could get the wrong value for an argument.
2444
2445 To avoid this problem we go ahead and emit code to copy OBJECT
2446 and SIZE into new pseudos. We can then place those new pseudos
2447 into an RTL_EXPR and use them later, even after a call to
2448 emit_queue.
2449
2450 Note this is not strictly needed for library calls since they
2451 do not call emit_queue before loading their arguments. However,
2452 we may need to have library calls call emit_queue in the future
2453 since failing to do so could cause problems for targets which
2454 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2455 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2456
2457 #ifdef TARGET_MEM_FUNCTIONS
2458 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2459 #else
2460 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2461 TREE_UNSIGNED (integer_type_node));
2462 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2463 #endif
2464
2465
2466 #ifdef TARGET_MEM_FUNCTIONS
2467 /* It is incorrect to use the libcall calling conventions to call
2468 memset in this context.
2469
2470 This could be a user call to memset and the user may wish to
2471 examine the return value from memset.
2472
2473 For targets where libcalls and normal calls have different
2474 conventions for returning pointers, we could end up generating
2475 incorrect code.
2476
2477 So instead of using a libcall sequence we build up a suitable
2478 CALL_EXPR and expand the call in the normal fashion. */
2479 if (fn == NULL_TREE)
2480 {
2481 tree fntype;
2482
2483 /* This was copied from except.c, I don't know if all this is
2484 necessary in this context or not. */
2485 fn = get_identifier ("memset");
2486 push_obstacks_nochange ();
2487 end_temporary_allocation ();
2488 fntype = build_pointer_type (void_type_node);
2489 fntype = build_function_type (fntype, NULL_TREE);
2490 fn = build_decl (FUNCTION_DECL, fn, fntype);
2491 ggc_add_tree_root (&fn, 1);
2492 DECL_EXTERNAL (fn) = 1;
2493 TREE_PUBLIC (fn) = 1;
2494 DECL_ARTIFICIAL (fn) = 1;
2495 make_decl_rtl (fn, NULL_PTR, 1);
2496 assemble_external (fn);
2497 pop_obstacks ();
2498 }
2499
2500 /* We need to make an argument list for the function call.
2501
2502 memset has three arguments, the first is a void * addresses, the
2503 second a integer with the initialization value, the last is a
2504 size_t byte count for the copy. */
2505 arg_list
2506 = build_tree_list (NULL_TREE,
2507 make_tree (build_pointer_type (void_type_node),
2508 object));
2509 TREE_CHAIN (arg_list)
2510 = build_tree_list (NULL_TREE,
2511 make_tree (integer_type_node, const0_rtx));
2512 TREE_CHAIN (TREE_CHAIN (arg_list))
2513 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2514 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2515
2516 /* Now we have to build up the CALL_EXPR itself. */
2517 call_expr = build1 (ADDR_EXPR,
2518 build_pointer_type (TREE_TYPE (fn)), fn);
2519 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2520 call_expr, arg_list, NULL_TREE);
2521 TREE_SIDE_EFFECTS (call_expr) = 1;
2522
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2524 #else
2525 emit_library_call (bzero_libfunc, 0,
2526 VOIDmode, 2, object, Pmode, size,
2527 TYPE_MODE (integer_type_node));
2528 #endif
2529 }
2530 }
2531 else
2532 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2533
2534 return retval;
2535 }
2536
2537 /* Generate code to copy Y into X.
2538 Both Y and X must have the same mode, except that
2539 Y can be a constant with VOIDmode.
2540 This mode cannot be BLKmode; use emit_block_move for that.
2541
2542 Return the last instruction emitted. */
2543
2544 rtx
2545 emit_move_insn (x, y)
2546 rtx x, y;
2547 {
2548 enum machine_mode mode = GET_MODE (x);
2549
2550 x = protect_from_queue (x, 1);
2551 y = protect_from_queue (y, 0);
2552
2553 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2554 abort ();
2555
2556 /* Never force constant_p_rtx to memory. */
2557 if (GET_CODE (y) == CONSTANT_P_RTX)
2558 ;
2559 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2560 y = force_const_mem (mode, y);
2561
2562 /* If X or Y are memory references, verify that their addresses are valid
2563 for the machine. */
2564 if (GET_CODE (x) == MEM
2565 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2566 && ! push_operand (x, GET_MODE (x)))
2567 || (flag_force_addr
2568 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2569 x = change_address (x, VOIDmode, XEXP (x, 0));
2570
2571 if (GET_CODE (y) == MEM
2572 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2573 || (flag_force_addr
2574 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2575 y = change_address (y, VOIDmode, XEXP (y, 0));
2576
2577 if (mode == BLKmode)
2578 abort ();
2579
2580 return emit_move_insn_1 (x, y);
2581 }
2582
2583 /* Low level part of emit_move_insn.
2584 Called just like emit_move_insn, but assumes X and Y
2585 are basically valid. */
2586
2587 rtx
2588 emit_move_insn_1 (x, y)
2589 rtx x, y;
2590 {
2591 enum machine_mode mode = GET_MODE (x);
2592 enum machine_mode submode;
2593 enum mode_class class = GET_MODE_CLASS (mode);
2594 unsigned int i;
2595
2596 if (mode >= MAX_MACHINE_MODE)
2597 abort ();
2598
2599 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2600 return
2601 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2602
2603 /* Expand complex moves by moving real part and imag part, if possible. */
2604 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2605 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2606 * BITS_PER_UNIT),
2607 (class == MODE_COMPLEX_INT
2608 ? MODE_INT : MODE_FLOAT),
2609 0))
2610 && (mov_optab->handlers[(int) submode].insn_code
2611 != CODE_FOR_nothing))
2612 {
2613 /* Don't split destination if it is a stack push. */
2614 int stack = push_operand (x, GET_MODE (x));
2615
2616 /* If this is a stack, push the highpart first, so it
2617 will be in the argument order.
2618
2619 In that case, change_address is used only to convert
2620 the mode, not to change the address. */
2621 if (stack)
2622 {
2623 /* Note that the real part always precedes the imag part in memory
2624 regardless of machine's endianness. */
2625 #ifdef STACK_GROWS_DOWNWARD
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_realpart (submode, y)));
2632 #else
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_realpart (submode, y)));
2636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2637 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2638 gen_imagpart (submode, y)));
2639 #endif
2640 }
2641 else
2642 {
2643 rtx realpart_x, realpart_y;
2644 rtx imagpart_x, imagpart_y;
2645
2646 /* If this is a complex value with each part being smaller than a
2647 word, the usual calling sequence will likely pack the pieces into
2648 a single register. Unfortunately, SUBREG of hard registers only
2649 deals in terms of words, so we have a problem converting input
2650 arguments to the CONCAT of two registers that is used elsewhere
2651 for complex values. If this is before reload, we can copy it into
2652 memory and reload. FIXME, we should see about using extract and
2653 insert on integer registers, but complex short and complex char
2654 variables should be rarely used. */
2655 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2656 && (reload_in_progress | reload_completed) == 0)
2657 {
2658 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2659 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2660
2661 if (packed_dest_p || packed_src_p)
2662 {
2663 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2664 ? MODE_FLOAT : MODE_INT);
2665
2666 enum machine_mode reg_mode =
2667 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2668
2669 if (reg_mode != BLKmode)
2670 {
2671 rtx mem = assign_stack_temp (reg_mode,
2672 GET_MODE_SIZE (mode), 0);
2673
2674 rtx cmem = change_address (mem, mode, NULL_RTX);
2675
2676 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2677
2678 if (packed_dest_p)
2679 {
2680 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2681 emit_move_insn_1 (cmem, y);
2682 return emit_move_insn_1 (sreg, mem);
2683 }
2684 else
2685 {
2686 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2687 emit_move_insn_1 (mem, sreg);
2688 return emit_move_insn_1 (x, cmem);
2689 }
2690 }
2691 }
2692 }
2693
2694 realpart_x = gen_realpart (submode, x);
2695 realpart_y = gen_realpart (submode, y);
2696 imagpart_x = gen_imagpart (submode, x);
2697 imagpart_y = gen_imagpart (submode, y);
2698
2699 /* Show the output dies here. This is necessary for SUBREGs
2700 of pseudos since we cannot track their lifetimes correctly;
2701 hard regs shouldn't appear here except as return values.
2702 We never want to emit such a clobber after reload. */
2703 if (x != y
2704 && ! (reload_in_progress || reload_completed)
2705 && (GET_CODE (realpart_x) == SUBREG
2706 || GET_CODE (imagpart_x) == SUBREG))
2707 {
2708 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2709 }
2710
2711 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2712 (realpart_x, realpart_y));
2713 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2714 (imagpart_x, imagpart_y));
2715 }
2716
2717 return get_last_insn ();
2718 }
2719
2720 /* This will handle any multi-word mode that lacks a move_insn pattern.
2721 However, you will get better code if you define such patterns,
2722 even if they must turn into multiple assembler instructions. */
2723 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2724 {
2725 rtx last_insn = 0;
2726 rtx seq;
2727 int need_clobber;
2728
2729 #ifdef PUSH_ROUNDING
2730
2731 /* If X is a push on the stack, do the push now and replace
2732 X with a reference to the stack pointer. */
2733 if (push_operand (x, GET_MODE (x)))
2734 {
2735 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2736 x = change_address (x, VOIDmode, stack_pointer_rtx);
2737 }
2738 #endif
2739
2740 start_sequence ();
2741
2742 need_clobber = 0;
2743 for (i = 0;
2744 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2745 i++)
2746 {
2747 rtx xpart = operand_subword (x, i, 1, mode);
2748 rtx ypart = operand_subword (y, i, 1, mode);
2749
2750 /* If we can't get a part of Y, put Y into memory if it is a
2751 constant. Otherwise, force it into a register. If we still
2752 can't get a part of Y, abort. */
2753 if (ypart == 0 && CONSTANT_P (y))
2754 {
2755 y = force_const_mem (mode, y);
2756 ypart = operand_subword (y, i, 1, mode);
2757 }
2758 else if (ypart == 0)
2759 ypart = operand_subword_force (y, i, mode);
2760
2761 if (xpart == 0 || ypart == 0)
2762 abort ();
2763
2764 need_clobber |= (GET_CODE (xpart) == SUBREG);
2765
2766 last_insn = emit_move_insn (xpart, ypart);
2767 }
2768
2769 seq = gen_sequence ();
2770 end_sequence ();
2771
2772 /* Show the output dies here. This is necessary for SUBREGs
2773 of pseudos since we cannot track their lifetimes correctly;
2774 hard regs shouldn't appear here except as return values.
2775 We never want to emit such a clobber after reload. */
2776 if (x != y
2777 && ! (reload_in_progress || reload_completed)
2778 && need_clobber != 0)
2779 {
2780 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2781 }
2782
2783 emit_insn (seq);
2784
2785 return last_insn;
2786 }
2787 else
2788 abort ();
2789 }
2790 \f
2791 /* Pushing data onto the stack. */
2792
2793 /* Push a block of length SIZE (perhaps variable)
2794 and return an rtx to address the beginning of the block.
2795 Note that it is not possible for the value returned to be a QUEUED.
2796 The value may be virtual_outgoing_args_rtx.
2797
2798 EXTRA is the number of bytes of padding to push in addition to SIZE.
2799 BELOW nonzero means this padding comes at low addresses;
2800 otherwise, the padding comes at high addresses. */
2801
2802 rtx
2803 push_block (size, extra, below)
2804 rtx size;
2805 int extra, below;
2806 {
2807 register rtx temp;
2808
2809 size = convert_modes (Pmode, ptr_mode, size, 1);
2810 if (CONSTANT_P (size))
2811 anti_adjust_stack (plus_constant (size, extra));
2812 else if (GET_CODE (size) == REG && extra == 0)
2813 anti_adjust_stack (size);
2814 else
2815 {
2816 rtx temp = copy_to_mode_reg (Pmode, size);
2817 if (extra != 0)
2818 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2819 temp, 0, OPTAB_LIB_WIDEN);
2820 anti_adjust_stack (temp);
2821 }
2822
2823 #if defined (STACK_GROWS_DOWNWARD) \
2824 || (defined (ARGS_GROW_DOWNWARD) \
2825 && !defined (ACCUMULATE_OUTGOING_ARGS))
2826
2827 /* Return the lowest stack address when STACK or ARGS grow downward and
2828 we are not aaccumulating outgoing arguments (the c4x port uses such
2829 conventions). */
2830 temp = virtual_outgoing_args_rtx;
2831 if (extra != 0 && below)
2832 temp = plus_constant (temp, extra);
2833 #else
2834 if (GET_CODE (size) == CONST_INT)
2835 temp = plus_constant (virtual_outgoing_args_rtx,
2836 - INTVAL (size) - (below ? 0 : extra));
2837 else if (extra != 0 && !below)
2838 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2839 negate_rtx (Pmode, plus_constant (size, extra)));
2840 else
2841 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2842 negate_rtx (Pmode, size));
2843 #endif
2844
2845 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2846 }
2847
2848 rtx
2849 gen_push_operand ()
2850 {
2851 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2852 }
2853
2854 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2855 block of SIZE bytes. */
2856
2857 static rtx
2858 get_push_address (size)
2859 int size;
2860 {
2861 register rtx temp;
2862
2863 if (STACK_PUSH_CODE == POST_DEC)
2864 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2865 else if (STACK_PUSH_CODE == POST_INC)
2866 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2867 else
2868 temp = stack_pointer_rtx;
2869
2870 return copy_to_reg (temp);
2871 }
2872
2873 /* Generate code to push X onto the stack, assuming it has mode MODE and
2874 type TYPE.
2875 MODE is redundant except when X is a CONST_INT (since they don't
2876 carry mode info).
2877 SIZE is an rtx for the size of data to be copied (in bytes),
2878 needed only if X is BLKmode.
2879
2880 ALIGN (in bytes) is maximum alignment we can assume.
2881
2882 If PARTIAL and REG are both nonzero, then copy that many of the first
2883 words of X into registers starting with REG, and push the rest of X.
2884 The amount of space pushed is decreased by PARTIAL words,
2885 rounded *down* to a multiple of PARM_BOUNDARY.
2886 REG must be a hard register in this case.
2887 If REG is zero but PARTIAL is not, take any all others actions for an
2888 argument partially in registers, but do not actually load any
2889 registers.
2890
2891 EXTRA is the amount in bytes of extra space to leave next to this arg.
2892 This is ignored if an argument block has already been allocated.
2893
2894 On a machine that lacks real push insns, ARGS_ADDR is the address of
2895 the bottom of the argument block for this call. We use indexing off there
2896 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2897 argument block has not been preallocated.
2898
2899 ARGS_SO_FAR is the size of args previously pushed for this call.
2900
2901 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2902 for arguments passed in registers. If nonzero, it will be the number
2903 of bytes required. */
2904
2905 void
2906 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2907 args_addr, args_so_far, reg_parm_stack_space,
2908 alignment_pad)
2909 register rtx x;
2910 enum machine_mode mode;
2911 tree type;
2912 rtx size;
2913 unsigned int align;
2914 int partial;
2915 rtx reg;
2916 int extra;
2917 rtx args_addr;
2918 rtx args_so_far;
2919 int reg_parm_stack_space;
2920 rtx alignment_pad;
2921 {
2922 rtx xinner;
2923 enum direction stack_direction
2924 #ifdef STACK_GROWS_DOWNWARD
2925 = downward;
2926 #else
2927 = upward;
2928 #endif
2929
2930 /* Decide where to pad the argument: `downward' for below,
2931 `upward' for above, or `none' for don't pad it.
2932 Default is below for small data on big-endian machines; else above. */
2933 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2934
2935 /* Invert direction if stack is post-update. */
2936 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2937 if (where_pad != none)
2938 where_pad = (where_pad == downward ? upward : downward);
2939
2940 xinner = x = protect_from_queue (x, 0);
2941
2942 if (mode == BLKmode)
2943 {
2944 /* Copy a block into the stack, entirely or partially. */
2945
2946 register rtx temp;
2947 int used = partial * UNITS_PER_WORD;
2948 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2949 int skip;
2950
2951 if (size == 0)
2952 abort ();
2953
2954 used -= offset;
2955
2956 /* USED is now the # of bytes we need not copy to the stack
2957 because registers will take care of them. */
2958
2959 if (partial != 0)
2960 xinner = change_address (xinner, BLKmode,
2961 plus_constant (XEXP (xinner, 0), used));
2962
2963 /* If the partial register-part of the arg counts in its stack size,
2964 skip the part of stack space corresponding to the registers.
2965 Otherwise, start copying to the beginning of the stack space,
2966 by setting SKIP to 0. */
2967 skip = (reg_parm_stack_space == 0) ? 0 : used;
2968
2969 #ifdef PUSH_ROUNDING
2970 /* Do it with several push insns if that doesn't take lots of insns
2971 and if there is no difficulty with push insns that skip bytes
2972 on the stack for alignment purposes. */
2973 if (args_addr == 0
2974 && GET_CODE (size) == CONST_INT
2975 && skip == 0
2976 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2977 /* Here we avoid the case of a structure whose weak alignment
2978 forces many pushes of a small amount of data,
2979 and such small pushes do rounding that causes trouble. */
2980 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2981 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2982 || PUSH_ROUNDING (align) == align)
2983 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2984 {
2985 /* Push padding now if padding above and stack grows down,
2986 or if padding below and stack grows up.
2987 But if space already allocated, this has already been done. */
2988 if (extra && args_addr == 0
2989 && where_pad != none && where_pad != stack_direction)
2990 anti_adjust_stack (GEN_INT (extra));
2991
2992 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2993 INTVAL (size) - used, align);
2994
2995 if (current_function_check_memory_usage && ! in_check_memory_usage)
2996 {
2997 rtx temp;
2998
2999 in_check_memory_usage = 1;
3000 temp = get_push_address (INTVAL(size) - used);
3001 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3002 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3003 temp, Pmode,
3004 XEXP (xinner, 0), Pmode,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype));
3007 else
3008 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3009 temp, Pmode,
3010 GEN_INT (INTVAL(size) - used),
3011 TYPE_MODE (sizetype),
3012 GEN_INT (MEMORY_USE_RW),
3013 TYPE_MODE (integer_type_node));
3014 in_check_memory_usage = 0;
3015 }
3016 }
3017 else
3018 #endif /* PUSH_ROUNDING */
3019 {
3020 /* Otherwise make space on the stack and copy the data
3021 to the address of that space. */
3022
3023 /* Deduct words put into registers from the size we must copy. */
3024 if (partial != 0)
3025 {
3026 if (GET_CODE (size) == CONST_INT)
3027 size = GEN_INT (INTVAL (size) - used);
3028 else
3029 size = expand_binop (GET_MODE (size), sub_optab, size,
3030 GEN_INT (used), NULL_RTX, 0,
3031 OPTAB_LIB_WIDEN);
3032 }
3033
3034 /* Get the address of the stack space.
3035 In this case, we do not deal with EXTRA separately.
3036 A single stack adjust will do. */
3037 if (! args_addr)
3038 {
3039 temp = push_block (size, extra, where_pad == downward);
3040 extra = 0;
3041 }
3042 else if (GET_CODE (args_so_far) == CONST_INT)
3043 temp = memory_address (BLKmode,
3044 plus_constant (args_addr,
3045 skip + INTVAL (args_so_far)));
3046 else
3047 temp = memory_address (BLKmode,
3048 plus_constant (gen_rtx_PLUS (Pmode,
3049 args_addr,
3050 args_so_far),
3051 skip));
3052 if (current_function_check_memory_usage && ! in_check_memory_usage)
3053 {
3054 rtx target;
3055
3056 in_check_memory_usage = 1;
3057 target = copy_to_reg (temp);
3058 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3059 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3060 target, Pmode,
3061 XEXP (xinner, 0), Pmode,
3062 size, TYPE_MODE (sizetype));
3063 else
3064 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3065 target, Pmode,
3066 size, TYPE_MODE (sizetype),
3067 GEN_INT (MEMORY_USE_RW),
3068 TYPE_MODE (integer_type_node));
3069 in_check_memory_usage = 0;
3070 }
3071
3072 /* TEMP is the address of the block. Copy the data there. */
3073 if (GET_CODE (size) == CONST_INT
3074 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3075 {
3076 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3077 INTVAL (size), align);
3078 goto ret;
3079 }
3080 else
3081 {
3082 rtx opalign = GEN_INT (align);
3083 enum machine_mode mode;
3084 rtx target = gen_rtx_MEM (BLKmode, temp);
3085
3086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3087 mode != VOIDmode;
3088 mode = GET_MODE_WIDER_MODE (mode))
3089 {
3090 enum insn_code code = movstr_optab[(int) mode];
3091 insn_operand_predicate_fn pred;
3092
3093 if (code != CODE_FOR_nothing
3094 && ((GET_CODE (size) == CONST_INT
3095 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3096 <= (GET_MODE_MASK (mode) >> 1)))
3097 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3098 && (!(pred = insn_data[(int) code].operand[0].predicate)
3099 || ((*pred) (target, BLKmode)))
3100 && (!(pred = insn_data[(int) code].operand[1].predicate)
3101 || ((*pred) (xinner, BLKmode)))
3102 && (!(pred = insn_data[(int) code].operand[3].predicate)
3103 || ((*pred) (opalign, VOIDmode))))
3104 {
3105 rtx op2 = convert_to_mode (mode, size, 1);
3106 rtx last = get_last_insn ();
3107 rtx pat;
3108
3109 pred = insn_data[(int) code].operand[2].predicate;
3110 if (pred != 0 && ! (*pred) (op2, mode))
3111 op2 = copy_to_mode_reg (mode, op2);
3112
3113 pat = GEN_FCN ((int) code) (target, xinner,
3114 op2, opalign);
3115 if (pat)
3116 {
3117 emit_insn (pat);
3118 goto ret;
3119 }
3120 else
3121 delete_insns_since (last);
3122 }
3123 }
3124 }
3125
3126 #ifndef ACCUMULATE_OUTGOING_ARGS
3127 /* If the source is referenced relative to the stack pointer,
3128 copy it to another register to stabilize it. We do not need
3129 to do this if we know that we won't be changing sp. */
3130
3131 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3132 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3133 temp = copy_to_reg (temp);
3134 #endif
3135
3136 /* Make inhibit_defer_pop nonzero around the library call
3137 to force it to pop the bcopy-arguments right away. */
3138 NO_DEFER_POP;
3139 #ifdef TARGET_MEM_FUNCTIONS
3140 emit_library_call (memcpy_libfunc, 0,
3141 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3142 convert_to_mode (TYPE_MODE (sizetype),
3143 size, TREE_UNSIGNED (sizetype)),
3144 TYPE_MODE (sizetype));
3145 #else
3146 emit_library_call (bcopy_libfunc, 0,
3147 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3148 convert_to_mode (TYPE_MODE (integer_type_node),
3149 size,
3150 TREE_UNSIGNED (integer_type_node)),
3151 TYPE_MODE (integer_type_node));
3152 #endif
3153 OK_DEFER_POP;
3154 }
3155 }
3156 else if (partial > 0)
3157 {
3158 /* Scalar partly in registers. */
3159
3160 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3161 int i;
3162 int not_stack;
3163 /* # words of start of argument
3164 that we must make space for but need not store. */
3165 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3166 int args_offset = INTVAL (args_so_far);
3167 int skip;
3168
3169 /* Push padding now if padding above and stack grows down,
3170 or if padding below and stack grows up.
3171 But if space already allocated, this has already been done. */
3172 if (extra && args_addr == 0
3173 && where_pad != none && where_pad != stack_direction)
3174 anti_adjust_stack (GEN_INT (extra));
3175
3176 /* If we make space by pushing it, we might as well push
3177 the real data. Otherwise, we can leave OFFSET nonzero
3178 and leave the space uninitialized. */
3179 if (args_addr == 0)
3180 offset = 0;
3181
3182 /* Now NOT_STACK gets the number of words that we don't need to
3183 allocate on the stack. */
3184 not_stack = partial - offset;
3185
3186 /* If the partial register-part of the arg counts in its stack size,
3187 skip the part of stack space corresponding to the registers.
3188 Otherwise, start copying to the beginning of the stack space,
3189 by setting SKIP to 0. */
3190 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3191
3192 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3193 x = validize_mem (force_const_mem (mode, x));
3194
3195 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3196 SUBREGs of such registers are not allowed. */
3197 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3198 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3199 x = copy_to_reg (x);
3200
3201 /* Loop over all the words allocated on the stack for this arg. */
3202 /* We can do it by words, because any scalar bigger than a word
3203 has a size a multiple of a word. */
3204 #ifndef PUSH_ARGS_REVERSED
3205 for (i = not_stack; i < size; i++)
3206 #else
3207 for (i = size - 1; i >= not_stack; i--)
3208 #endif
3209 if (i >= not_stack + offset)
3210 emit_push_insn (operand_subword_force (x, i, mode),
3211 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3212 0, args_addr,
3213 GEN_INT (args_offset + ((i - not_stack + skip)
3214 * UNITS_PER_WORD)),
3215 reg_parm_stack_space, alignment_pad);
3216 }
3217 else
3218 {
3219 rtx addr;
3220 rtx target = NULL_RTX;
3221
3222 /* Push padding now if padding above and stack grows down,
3223 or if padding below and stack grows up.
3224 But if space already allocated, this has already been done. */
3225 if (extra && args_addr == 0
3226 && where_pad != none && where_pad != stack_direction)
3227 anti_adjust_stack (GEN_INT (extra));
3228
3229 #ifdef PUSH_ROUNDING
3230 if (args_addr == 0)
3231 addr = gen_push_operand ();
3232 else
3233 #endif
3234 {
3235 if (GET_CODE (args_so_far) == CONST_INT)
3236 addr
3237 = memory_address (mode,
3238 plus_constant (args_addr,
3239 INTVAL (args_so_far)));
3240 else
3241 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3242 args_so_far));
3243 target = addr;
3244 }
3245
3246 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3247
3248 if (current_function_check_memory_usage && ! in_check_memory_usage)
3249 {
3250 in_check_memory_usage = 1;
3251 if (target == 0)
3252 target = get_push_address (GET_MODE_SIZE (mode));
3253
3254 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3255 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3256 target, Pmode,
3257 XEXP (x, 0), Pmode,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype));
3260 else
3261 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3262 target, Pmode,
3263 GEN_INT (GET_MODE_SIZE (mode)),
3264 TYPE_MODE (sizetype),
3265 GEN_INT (MEMORY_USE_RW),
3266 TYPE_MODE (integer_type_node));
3267 in_check_memory_usage = 0;
3268 }
3269 }
3270
3271 ret:
3272 /* If part should go in registers, copy that part
3273 into the appropriate registers. Do this now, at the end,
3274 since mem-to-mem copies above may do function calls. */
3275 if (partial > 0 && reg != 0)
3276 {
3277 /* Handle calls that pass values in multiple non-contiguous locations.
3278 The Irix 6 ABI has examples of this. */
3279 if (GET_CODE (reg) == PARALLEL)
3280 emit_group_load (reg, x, -1, align); /* ??? size? */
3281 else
3282 move_block_to_reg (REGNO (reg), x, partial, mode);
3283 }
3284
3285 if (extra && args_addr == 0 && where_pad == stack_direction)
3286 anti_adjust_stack (GEN_INT (extra));
3287
3288 if (alignment_pad)
3289 anti_adjust_stack (alignment_pad);
3290 }
3291 \f
3292 /* Expand an assignment that stores the value of FROM into TO.
3293 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3294 (This may contain a QUEUED rtx;
3295 if the value is constant, this rtx is a constant.)
3296 Otherwise, the returned value is NULL_RTX.
3297
3298 SUGGEST_REG is no longer actually used.
3299 It used to mean, copy the value through a register
3300 and return that register, if that is possible.
3301 We now use WANT_VALUE to decide whether to do this. */
3302
3303 rtx
3304 expand_assignment (to, from, want_value, suggest_reg)
3305 tree to, from;
3306 int want_value;
3307 int suggest_reg ATTRIBUTE_UNUSED;
3308 {
3309 register rtx to_rtx = 0;
3310 rtx result;
3311
3312 /* Don't crash if the lhs of the assignment was erroneous. */
3313
3314 if (TREE_CODE (to) == ERROR_MARK)
3315 {
3316 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3317 return want_value ? result : NULL_RTX;
3318 }
3319
3320 /* Assignment of a structure component needs special treatment
3321 if the structure component's rtx is not simply a MEM.
3322 Assignment of an array element at a constant index, and assignment of
3323 an array element in an unaligned packed structure field, has the same
3324 problem. */
3325
3326 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3327 || TREE_CODE (to) == ARRAY_REF)
3328 {
3329 enum machine_mode mode1;
3330 HOST_WIDE_INT bitsize, bitpos;
3331 tree offset;
3332 int unsignedp;
3333 int volatilep = 0;
3334 tree tem;
3335 unsigned int alignment;
3336
3337 push_temp_slots ();
3338 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3339 &unsignedp, &volatilep, &alignment);
3340
3341 /* If we are going to use store_bit_field and extract_bit_field,
3342 make sure to_rtx will be safe for multiple use. */
3343
3344 if (mode1 == VOIDmode && want_value)
3345 tem = stabilize_reference (tem);
3346
3347 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3348 if (offset != 0)
3349 {
3350 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3351
3352 if (GET_CODE (to_rtx) != MEM)
3353 abort ();
3354
3355 if (GET_MODE (offset_rtx) != ptr_mode)
3356 {
3357 #ifdef POINTERS_EXTEND_UNSIGNED
3358 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3359 #else
3360 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3361 #endif
3362 }
3363
3364 /* A constant address in TO_RTX can have VOIDmode, we must not try
3365 to call force_reg for that case. Avoid that case. */
3366 if (GET_CODE (to_rtx) == MEM
3367 && GET_MODE (to_rtx) == BLKmode
3368 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3369 && bitsize
3370 && (bitpos % bitsize) == 0
3371 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3372 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3373 {
3374 rtx temp = change_address (to_rtx, mode1,
3375 plus_constant (XEXP (to_rtx, 0),
3376 (bitpos /
3377 BITS_PER_UNIT)));
3378 if (GET_CODE (XEXP (temp, 0)) == REG)
3379 to_rtx = temp;
3380 else
3381 to_rtx = change_address (to_rtx, mode1,
3382 force_reg (GET_MODE (XEXP (temp, 0)),
3383 XEXP (temp, 0)));
3384 bitpos = 0;
3385 }
3386
3387 to_rtx = change_address (to_rtx, VOIDmode,
3388 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3389 force_reg (ptr_mode,
3390 offset_rtx)));
3391 }
3392
3393 if (volatilep)
3394 {
3395 if (GET_CODE (to_rtx) == MEM)
3396 {
3397 /* When the offset is zero, to_rtx is the address of the
3398 structure we are storing into, and hence may be shared.
3399 We must make a new MEM before setting the volatile bit. */
3400 if (offset == 0)
3401 to_rtx = copy_rtx (to_rtx);
3402
3403 MEM_VOLATILE_P (to_rtx) = 1;
3404 }
3405 #if 0 /* This was turned off because, when a field is volatile
3406 in an object which is not volatile, the object may be in a register,
3407 and then we would abort over here. */
3408 else
3409 abort ();
3410 #endif
3411 }
3412
3413 if (TREE_CODE (to) == COMPONENT_REF
3414 && TREE_READONLY (TREE_OPERAND (to, 1)))
3415 {
3416 if (offset == 0)
3417 to_rtx = copy_rtx (to_rtx);
3418
3419 RTX_UNCHANGING_P (to_rtx) = 1;
3420 }
3421
3422 /* Check the access. */
3423 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3424 {
3425 rtx to_addr;
3426 int size;
3427 int best_mode_size;
3428 enum machine_mode best_mode;
3429
3430 best_mode = get_best_mode (bitsize, bitpos,
3431 TYPE_ALIGN (TREE_TYPE (tem)),
3432 mode1, volatilep);
3433 if (best_mode == VOIDmode)
3434 best_mode = QImode;
3435
3436 best_mode_size = GET_MODE_BITSIZE (best_mode);
3437 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3438 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3439 size *= GET_MODE_SIZE (best_mode);
3440
3441 /* Check the access right of the pointer. */
3442 if (size)
3443 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3444 to_addr, Pmode,
3445 GEN_INT (size), TYPE_MODE (sizetype),
3446 GEN_INT (MEMORY_USE_WO),
3447 TYPE_MODE (integer_type_node));
3448 }
3449
3450 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3451 (want_value
3452 /* Spurious cast makes HPUX compiler happy. */
3453 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3454 : VOIDmode),
3455 unsignedp,
3456 /* Required alignment of containing datum. */
3457 alignment,
3458 int_size_in_bytes (TREE_TYPE (tem)),
3459 get_alias_set (to));
3460 preserve_temp_slots (result);
3461 free_temp_slots ();
3462 pop_temp_slots ();
3463
3464 /* If the value is meaningful, convert RESULT to the proper mode.
3465 Otherwise, return nothing. */
3466 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3467 TYPE_MODE (TREE_TYPE (from)),
3468 result,
3469 TREE_UNSIGNED (TREE_TYPE (to)))
3470 : NULL_RTX);
3471 }
3472
3473 /* If the rhs is a function call and its value is not an aggregate,
3474 call the function before we start to compute the lhs.
3475 This is needed for correct code for cases such as
3476 val = setjmp (buf) on machines where reference to val
3477 requires loading up part of an address in a separate insn.
3478
3479 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3480 since it might be a promoted variable where the zero- or sign- extension
3481 needs to be done. Handling this in the normal way is safe because no
3482 computation is done before the call. */
3483 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3484 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3485 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3486 && GET_CODE (DECL_RTL (to)) == REG))
3487 {
3488 rtx value;
3489
3490 push_temp_slots ();
3491 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3492 if (to_rtx == 0)
3493 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3494
3495 /* Handle calls that return values in multiple non-contiguous locations.
3496 The Irix 6 ABI has examples of this. */
3497 if (GET_CODE (to_rtx) == PARALLEL)
3498 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3499 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3500 else if (GET_MODE (to_rtx) == BLKmode)
3501 emit_block_move (to_rtx, value, expr_size (from),
3502 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3503 else
3504 {
3505 #ifdef POINTERS_EXTEND_UNSIGNED
3506 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3507 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3508 value = convert_memory_address (GET_MODE (to_rtx), value);
3509 #endif
3510 emit_move_insn (to_rtx, value);
3511 }
3512 preserve_temp_slots (to_rtx);
3513 free_temp_slots ();
3514 pop_temp_slots ();
3515 return want_value ? to_rtx : NULL_RTX;
3516 }
3517
3518 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3519 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3520
3521 if (to_rtx == 0)
3522 {
3523 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3524 if (GET_CODE (to_rtx) == MEM)
3525 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3526 }
3527
3528 /* Don't move directly into a return register. */
3529 if (TREE_CODE (to) == RESULT_DECL
3530 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3531 {
3532 rtx temp;
3533
3534 push_temp_slots ();
3535 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3536
3537 if (GET_CODE (to_rtx) == PARALLEL)
3538 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3539 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3540 else
3541 emit_move_insn (to_rtx, temp);
3542
3543 preserve_temp_slots (to_rtx);
3544 free_temp_slots ();
3545 pop_temp_slots ();
3546 return want_value ? to_rtx : NULL_RTX;
3547 }
3548
3549 /* In case we are returning the contents of an object which overlaps
3550 the place the value is being stored, use a safe function when copying
3551 a value through a pointer into a structure value return block. */
3552 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3553 && current_function_returns_struct
3554 && !current_function_returns_pcc_struct)
3555 {
3556 rtx from_rtx, size;
3557
3558 push_temp_slots ();
3559 size = expr_size (from);
3560 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3561 EXPAND_MEMORY_USE_DONT);
3562
3563 /* Copy the rights of the bitmap. */
3564 if (current_function_check_memory_usage)
3565 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3566 XEXP (to_rtx, 0), Pmode,
3567 XEXP (from_rtx, 0), Pmode,
3568 convert_to_mode (TYPE_MODE (sizetype),
3569 size, TREE_UNSIGNED (sizetype)),
3570 TYPE_MODE (sizetype));
3571
3572 #ifdef TARGET_MEM_FUNCTIONS
3573 emit_library_call (memcpy_libfunc, 0,
3574 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3575 XEXP (from_rtx, 0), Pmode,
3576 convert_to_mode (TYPE_MODE (sizetype),
3577 size, TREE_UNSIGNED (sizetype)),
3578 TYPE_MODE (sizetype));
3579 #else
3580 emit_library_call (bcopy_libfunc, 0,
3581 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3582 XEXP (to_rtx, 0), Pmode,
3583 convert_to_mode (TYPE_MODE (integer_type_node),
3584 size, TREE_UNSIGNED (integer_type_node)),
3585 TYPE_MODE (integer_type_node));
3586 #endif
3587
3588 preserve_temp_slots (to_rtx);
3589 free_temp_slots ();
3590 pop_temp_slots ();
3591 return want_value ? to_rtx : NULL_RTX;
3592 }
3593
3594 /* Compute FROM and store the value in the rtx we got. */
3595
3596 push_temp_slots ();
3597 result = store_expr (from, to_rtx, want_value);
3598 preserve_temp_slots (result);
3599 free_temp_slots ();
3600 pop_temp_slots ();
3601 return want_value ? result : NULL_RTX;
3602 }
3603
3604 /* Generate code for computing expression EXP,
3605 and storing the value into TARGET.
3606 TARGET may contain a QUEUED rtx.
3607
3608 If WANT_VALUE is nonzero, return a copy of the value
3609 not in TARGET, so that we can be sure to use the proper
3610 value in a containing expression even if TARGET has something
3611 else stored in it. If possible, we copy the value through a pseudo
3612 and return that pseudo. Or, if the value is constant, we try to
3613 return the constant. In some cases, we return a pseudo
3614 copied *from* TARGET.
3615
3616 If the mode is BLKmode then we may return TARGET itself.
3617 It turns out that in BLKmode it doesn't cause a problem.
3618 because C has no operators that could combine two different
3619 assignments into the same BLKmode object with different values
3620 with no sequence point. Will other languages need this to
3621 be more thorough?
3622
3623 If WANT_VALUE is 0, we return NULL, to make sure
3624 to catch quickly any cases where the caller uses the value
3625 and fails to set WANT_VALUE. */
3626
3627 rtx
3628 store_expr (exp, target, want_value)
3629 register tree exp;
3630 register rtx target;
3631 int want_value;
3632 {
3633 register rtx temp;
3634 int dont_return_target = 0;
3635
3636 if (TREE_CODE (exp) == COMPOUND_EXPR)
3637 {
3638 /* Perform first part of compound expression, then assign from second
3639 part. */
3640 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3641 emit_queue ();
3642 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3643 }
3644 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3645 {
3646 /* For conditional expression, get safe form of the target. Then
3647 test the condition, doing the appropriate assignment on either
3648 side. This avoids the creation of unnecessary temporaries.
3649 For non-BLKmode, it is more efficient not to do this. */
3650
3651 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3652
3653 emit_queue ();
3654 target = protect_from_queue (target, 1);
3655
3656 do_pending_stack_adjust ();
3657 NO_DEFER_POP;
3658 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3659 start_cleanup_deferral ();
3660 store_expr (TREE_OPERAND (exp, 1), target, 0);
3661 end_cleanup_deferral ();
3662 emit_queue ();
3663 emit_jump_insn (gen_jump (lab2));
3664 emit_barrier ();
3665 emit_label (lab1);
3666 start_cleanup_deferral ();
3667 store_expr (TREE_OPERAND (exp, 2), target, 0);
3668 end_cleanup_deferral ();
3669 emit_queue ();
3670 emit_label (lab2);
3671 OK_DEFER_POP;
3672
3673 return want_value ? target : NULL_RTX;
3674 }
3675 else if (queued_subexp_p (target))
3676 /* If target contains a postincrement, let's not risk
3677 using it as the place to generate the rhs. */
3678 {
3679 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3680 {
3681 /* Expand EXP into a new pseudo. */
3682 temp = gen_reg_rtx (GET_MODE (target));
3683 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3684 }
3685 else
3686 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3687
3688 /* If target is volatile, ANSI requires accessing the value
3689 *from* the target, if it is accessed. So make that happen.
3690 In no case return the target itself. */
3691 if (! MEM_VOLATILE_P (target) && want_value)
3692 dont_return_target = 1;
3693 }
3694 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3695 && GET_MODE (target) != BLKmode)
3696 /* If target is in memory and caller wants value in a register instead,
3697 arrange that. Pass TARGET as target for expand_expr so that,
3698 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3699 We know expand_expr will not use the target in that case.
3700 Don't do this if TARGET is volatile because we are supposed
3701 to write it and then read it. */
3702 {
3703 temp = expand_expr (exp, target, GET_MODE (target), 0);
3704 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3705 temp = copy_to_reg (temp);
3706 dont_return_target = 1;
3707 }
3708 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3709 /* If this is an scalar in a register that is stored in a wider mode
3710 than the declared mode, compute the result into its declared mode
3711 and then convert to the wider mode. Our value is the computed
3712 expression. */
3713 {
3714 /* If we don't want a value, we can do the conversion inside EXP,
3715 which will often result in some optimizations. Do the conversion
3716 in two steps: first change the signedness, if needed, then
3717 the extend. But don't do this if the type of EXP is a subtype
3718 of something else since then the conversion might involve
3719 more than just converting modes. */
3720 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3721 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3722 {
3723 if (TREE_UNSIGNED (TREE_TYPE (exp))
3724 != SUBREG_PROMOTED_UNSIGNED_P (target))
3725 exp
3726 = convert
3727 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3728 TREE_TYPE (exp)),
3729 exp);
3730
3731 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3732 SUBREG_PROMOTED_UNSIGNED_P (target)),
3733 exp);
3734 }
3735
3736 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3737
3738 /* If TEMP is a volatile MEM and we want a result value, make
3739 the access now so it gets done only once. Likewise if
3740 it contains TARGET. */
3741 if (GET_CODE (temp) == MEM && want_value
3742 && (MEM_VOLATILE_P (temp)
3743 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3744 temp = copy_to_reg (temp);
3745
3746 /* If TEMP is a VOIDmode constant, use convert_modes to make
3747 sure that we properly convert it. */
3748 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3749 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3750 TYPE_MODE (TREE_TYPE (exp)), temp,
3751 SUBREG_PROMOTED_UNSIGNED_P (target));
3752
3753 convert_move (SUBREG_REG (target), temp,
3754 SUBREG_PROMOTED_UNSIGNED_P (target));
3755
3756 /* If we promoted a constant, change the mode back down to match
3757 target. Otherwise, the caller might get confused by a result whose
3758 mode is larger than expected. */
3759
3760 if (want_value && GET_MODE (temp) != GET_MODE (target)
3761 && GET_MODE (temp) != VOIDmode)
3762 {
3763 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3764 SUBREG_PROMOTED_VAR_P (temp) = 1;
3765 SUBREG_PROMOTED_UNSIGNED_P (temp)
3766 = SUBREG_PROMOTED_UNSIGNED_P (target);
3767 }
3768
3769 return want_value ? temp : NULL_RTX;
3770 }
3771 else
3772 {
3773 temp = expand_expr (exp, target, GET_MODE (target), 0);
3774 /* Return TARGET if it's a specified hardware register.
3775 If TARGET is a volatile mem ref, either return TARGET
3776 or return a reg copied *from* TARGET; ANSI requires this.
3777
3778 Otherwise, if TEMP is not TARGET, return TEMP
3779 if it is constant (for efficiency),
3780 or if we really want the correct value. */
3781 if (!(target && GET_CODE (target) == REG
3782 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3783 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3784 && ! rtx_equal_p (temp, target)
3785 && (CONSTANT_P (temp) || want_value))
3786 dont_return_target = 1;
3787 }
3788
3789 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3790 the same as that of TARGET, adjust the constant. This is needed, for
3791 example, in case it is a CONST_DOUBLE and we want only a word-sized
3792 value. */
3793 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3794 && TREE_CODE (exp) != ERROR_MARK
3795 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3796 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3797 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3798
3799 if (current_function_check_memory_usage
3800 && GET_CODE (target) == MEM
3801 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3802 {
3803 if (GET_CODE (temp) == MEM)
3804 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3805 XEXP (target, 0), Pmode,
3806 XEXP (temp, 0), Pmode,
3807 expr_size (exp), TYPE_MODE (sizetype));
3808 else
3809 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3810 XEXP (target, 0), Pmode,
3811 expr_size (exp), TYPE_MODE (sizetype),
3812 GEN_INT (MEMORY_USE_WO),
3813 TYPE_MODE (integer_type_node));
3814 }
3815
3816 /* If value was not generated in the target, store it there.
3817 Convert the value to TARGET's type first if nec. */
3818 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3819 one or both of them are volatile memory refs, we have to distinguish
3820 two cases:
3821 - expand_expr has used TARGET. In this case, we must not generate
3822 another copy. This can be detected by TARGET being equal according
3823 to == .
3824 - expand_expr has not used TARGET - that means that the source just
3825 happens to have the same RTX form. Since temp will have been created
3826 by expand_expr, it will compare unequal according to == .
3827 We must generate a copy in this case, to reach the correct number
3828 of volatile memory references. */
3829
3830 if ((! rtx_equal_p (temp, target)
3831 || (temp != target && (side_effects_p (temp)
3832 || side_effects_p (target))))
3833 && TREE_CODE (exp) != ERROR_MARK)
3834 {
3835 target = protect_from_queue (target, 1);
3836 if (GET_MODE (temp) != GET_MODE (target)
3837 && GET_MODE (temp) != VOIDmode)
3838 {
3839 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3840 if (dont_return_target)
3841 {
3842 /* In this case, we will return TEMP,
3843 so make sure it has the proper mode.
3844 But don't forget to store the value into TARGET. */
3845 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3846 emit_move_insn (target, temp);
3847 }
3848 else
3849 convert_move (target, temp, unsignedp);
3850 }
3851
3852 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3853 {
3854 /* Handle copying a string constant into an array.
3855 The string constant may be shorter than the array.
3856 So copy just the string's actual length, and clear the rest. */
3857 rtx size;
3858 rtx addr;
3859
3860 /* Get the size of the data type of the string,
3861 which is actually the size of the target. */
3862 size = expr_size (exp);
3863 if (GET_CODE (size) == CONST_INT
3864 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3865 emit_block_move (target, temp, size,
3866 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3867 else
3868 {
3869 /* Compute the size of the data to copy from the string. */
3870 tree copy_size
3871 = size_binop (MIN_EXPR,
3872 make_tree (sizetype, size),
3873 size_int (TREE_STRING_LENGTH (exp)));
3874 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3875 VOIDmode, 0);
3876 rtx label = 0;
3877
3878 /* Copy that much. */
3879 emit_block_move (target, temp, copy_size_rtx,
3880 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3881
3882 /* Figure out how much is left in TARGET that we have to clear.
3883 Do all calculations in ptr_mode. */
3884
3885 addr = XEXP (target, 0);
3886 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3887
3888 if (GET_CODE (copy_size_rtx) == CONST_INT)
3889 {
3890 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3891 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3892 }
3893 else
3894 {
3895 addr = force_reg (ptr_mode, addr);
3896 addr = expand_binop (ptr_mode, add_optab, addr,
3897 copy_size_rtx, NULL_RTX, 0,
3898 OPTAB_LIB_WIDEN);
3899
3900 size = expand_binop (ptr_mode, sub_optab, size,
3901 copy_size_rtx, NULL_RTX, 0,
3902 OPTAB_LIB_WIDEN);
3903
3904 label = gen_label_rtx ();
3905 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3906 GET_MODE (size), 0, 0, label);
3907 }
3908
3909 if (size != const0_rtx)
3910 {
3911 /* Be sure we can write on ADDR. */
3912 if (current_function_check_memory_usage)
3913 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3914 addr, Pmode,
3915 size, TYPE_MODE (sizetype),
3916 GEN_INT (MEMORY_USE_WO),
3917 TYPE_MODE (integer_type_node));
3918 #ifdef TARGET_MEM_FUNCTIONS
3919 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3920 addr, ptr_mode,
3921 const0_rtx, TYPE_MODE (integer_type_node),
3922 convert_to_mode (TYPE_MODE (sizetype),
3923 size,
3924 TREE_UNSIGNED (sizetype)),
3925 TYPE_MODE (sizetype));
3926 #else
3927 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3928 addr, ptr_mode,
3929 convert_to_mode (TYPE_MODE (integer_type_node),
3930 size,
3931 TREE_UNSIGNED (integer_type_node)),
3932 TYPE_MODE (integer_type_node));
3933 #endif
3934 }
3935
3936 if (label)
3937 emit_label (label);
3938 }
3939 }
3940 /* Handle calls that return values in multiple non-contiguous locations.
3941 The Irix 6 ABI has examples of this. */
3942 else if (GET_CODE (target) == PARALLEL)
3943 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3944 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3945 else if (GET_MODE (temp) == BLKmode)
3946 emit_block_move (target, temp, expr_size (exp),
3947 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3948 else
3949 emit_move_insn (target, temp);
3950 }
3951
3952 /* If we don't want a value, return NULL_RTX. */
3953 if (! want_value)
3954 return NULL_RTX;
3955
3956 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3957 ??? The latter test doesn't seem to make sense. */
3958 else if (dont_return_target && GET_CODE (temp) != MEM)
3959 return temp;
3960
3961 /* Return TARGET itself if it is a hard register. */
3962 else if (want_value && GET_MODE (target) != BLKmode
3963 && ! (GET_CODE (target) == REG
3964 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3965 return copy_to_reg (target);
3966
3967 else
3968 return target;
3969 }
3970 \f
3971 /* Return 1 if EXP just contains zeros. */
3972
3973 static int
3974 is_zeros_p (exp)
3975 tree exp;
3976 {
3977 tree elt;
3978
3979 switch (TREE_CODE (exp))
3980 {
3981 case CONVERT_EXPR:
3982 case NOP_EXPR:
3983 case NON_LVALUE_EXPR:
3984 return is_zeros_p (TREE_OPERAND (exp, 0));
3985
3986 case INTEGER_CST:
3987 return integer_zerop (exp);
3988
3989 case COMPLEX_CST:
3990 return
3991 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3992
3993 case REAL_CST:
3994 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3995
3996 case CONSTRUCTOR:
3997 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3998 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3999 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4000 if (! is_zeros_p (TREE_VALUE (elt)))
4001 return 0;
4002
4003 return 1;
4004
4005 default:
4006 return 0;
4007 }
4008 }
4009
4010 /* Return 1 if EXP contains mostly (3/4) zeros. */
4011
4012 static int
4013 mostly_zeros_p (exp)
4014 tree exp;
4015 {
4016 if (TREE_CODE (exp) == CONSTRUCTOR)
4017 {
4018 int elts = 0, zeros = 0;
4019 tree elt = CONSTRUCTOR_ELTS (exp);
4020 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4021 {
4022 /* If there are no ranges of true bits, it is all zero. */
4023 return elt == NULL_TREE;
4024 }
4025 for (; elt; elt = TREE_CHAIN (elt))
4026 {
4027 /* We do not handle the case where the index is a RANGE_EXPR,
4028 so the statistic will be somewhat inaccurate.
4029 We do make a more accurate count in store_constructor itself,
4030 so since this function is only used for nested array elements,
4031 this should be close enough. */
4032 if (mostly_zeros_p (TREE_VALUE (elt)))
4033 zeros++;
4034 elts++;
4035 }
4036
4037 return 4 * zeros >= 3 * elts;
4038 }
4039
4040 return is_zeros_p (exp);
4041 }
4042 \f
4043 /* Helper function for store_constructor.
4044 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4045 TYPE is the type of the CONSTRUCTOR, not the element type.
4046 ALIGN and CLEARED are as for store_constructor.
4047
4048 This provides a recursive shortcut back to store_constructor when it isn't
4049 necessary to go through store_field. This is so that we can pass through
4050 the cleared field to let store_constructor know that we may not have to
4051 clear a substructure if the outer structure has already been cleared. */
4052
4053 static void
4054 store_constructor_field (target, bitsize, bitpos,
4055 mode, exp, type, align, cleared)
4056 rtx target;
4057 unsigned HOST_WIDE_INT bitsize;
4058 HOST_WIDE_INT bitpos;
4059 enum machine_mode mode;
4060 tree exp, type;
4061 unsigned int align;
4062 int cleared;
4063 {
4064 if (TREE_CODE (exp) == CONSTRUCTOR
4065 && bitpos % BITS_PER_UNIT == 0
4066 /* If we have a non-zero bitpos for a register target, then we just
4067 let store_field do the bitfield handling. This is unlikely to
4068 generate unnecessary clear instructions anyways. */
4069 && (bitpos == 0 || GET_CODE (target) == MEM))
4070 {
4071 if (bitpos != 0)
4072 target
4073 = change_address (target,
4074 GET_MODE (target) == BLKmode
4075 || 0 != (bitpos
4076 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4077 ? BLKmode : VOIDmode,
4078 plus_constant (XEXP (target, 0),
4079 bitpos / BITS_PER_UNIT));
4080 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4081 }
4082 else
4083 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4084 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4085 int_size_in_bytes (type), 0);
4086 }
4087
4088 /* Store the value of constructor EXP into the rtx TARGET.
4089 TARGET is either a REG or a MEM.
4090 ALIGN is the maximum known alignment for TARGET, in bits.
4091 CLEARED is true if TARGET is known to have been zero'd.
4092 SIZE is the number of bytes of TARGET we are allowed to modify: this
4093 may not be the same as the size of EXP if we are assigning to a field
4094 which has been packed to exclude padding bits. */
4095
4096 static void
4097 store_constructor (exp, target, align, cleared, size)
4098 tree exp;
4099 rtx target;
4100 unsigned int align;
4101 int cleared;
4102 unsigned HOST_WIDE_INT size;
4103 {
4104 tree type = TREE_TYPE (exp);
4105 #ifdef WORD_REGISTER_OPERATIONS
4106 rtx exp_size = expr_size (exp);
4107 #endif
4108
4109 /* We know our target cannot conflict, since safe_from_p has been called. */
4110 #if 0
4111 /* Don't try copying piece by piece into a hard register
4112 since that is vulnerable to being clobbered by EXP.
4113 Instead, construct in a pseudo register and then copy it all. */
4114 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4115 {
4116 rtx temp = gen_reg_rtx (GET_MODE (target));
4117 store_constructor (exp, temp, align, cleared, size);
4118 emit_move_insn (target, temp);
4119 return;
4120 }
4121 #endif
4122
4123 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4124 || TREE_CODE (type) == QUAL_UNION_TYPE)
4125 {
4126 register tree elt;
4127
4128 /* Inform later passes that the whole union value is dead. */
4129 if ((TREE_CODE (type) == UNION_TYPE
4130 || TREE_CODE (type) == QUAL_UNION_TYPE)
4131 && ! cleared)
4132 {
4133 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4134
4135 /* If the constructor is empty, clear the union. */
4136 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4137 clear_storage (target, expr_size (exp),
4138 TYPE_ALIGN (type) / BITS_PER_UNIT);
4139 }
4140
4141 /* If we are building a static constructor into a register,
4142 set the initial value as zero so we can fold the value into
4143 a constant. But if more than one register is involved,
4144 this probably loses. */
4145 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4146 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4147 {
4148 if (! cleared)
4149 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4150
4151 cleared = 1;
4152 }
4153
4154 /* If the constructor has fewer fields than the structure
4155 or if we are initializing the structure to mostly zeros,
4156 clear the whole structure first. */
4157 else if (size > 0
4158 && ((list_length (CONSTRUCTOR_ELTS (exp))
4159 != fields_length (type))
4160 || mostly_zeros_p (exp)))
4161 {
4162 if (! cleared)
4163 clear_storage (target, GEN_INT (size),
4164 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4165
4166 cleared = 1;
4167 }
4168 else if (! cleared)
4169 /* Inform later passes that the old value is dead. */
4170 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4171
4172 /* Store each element of the constructor into
4173 the corresponding field of TARGET. */
4174
4175 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4176 {
4177 register tree field = TREE_PURPOSE (elt);
4178 #ifdef WORD_REGISTER_OPERATIONS
4179 tree value = TREE_VALUE (elt);
4180 #endif
4181 register enum machine_mode mode;
4182 HOST_WIDE_INT bitsize;
4183 HOST_WIDE_INT bitpos = 0;
4184 int unsignedp;
4185 tree offset;
4186 rtx to_rtx = target;
4187
4188 /* Just ignore missing fields.
4189 We cleared the whole structure, above,
4190 if any fields are missing. */
4191 if (field == 0)
4192 continue;
4193
4194 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4195 continue;
4196
4197 if (host_integerp (DECL_SIZE (field), 1))
4198 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4199 else
4200 bitsize = -1;
4201
4202 unsignedp = TREE_UNSIGNED (field);
4203 mode = DECL_MODE (field);
4204 if (DECL_BIT_FIELD (field))
4205 mode = VOIDmode;
4206
4207 offset = DECL_FIELD_OFFSET (field);
4208 if (host_integerp (offset, 0)
4209 && host_integerp (bit_position (field), 0))
4210 {
4211 bitpos = int_bit_position (field);
4212 offset = 0;
4213 }
4214 else
4215 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4216
4217 if (offset)
4218 {
4219 rtx offset_rtx;
4220
4221 if (contains_placeholder_p (offset))
4222 offset = build (WITH_RECORD_EXPR, bitsizetype,
4223 offset, make_tree (TREE_TYPE (exp), target));
4224
4225 offset = size_binop (EXACT_DIV_EXPR, offset, bitsize_unit_node);
4226 offset = convert (sizetype, offset);
4227
4228 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4229 if (GET_CODE (to_rtx) != MEM)
4230 abort ();
4231
4232 if (GET_MODE (offset_rtx) != ptr_mode)
4233 {
4234 #ifdef POINTERS_EXTEND_UNSIGNED
4235 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4236 #else
4237 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4238 #endif
4239 }
4240
4241 to_rtx
4242 = change_address (to_rtx, VOIDmode,
4243 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4244 force_reg (ptr_mode,
4245 offset_rtx)));
4246 }
4247
4248 if (TREE_READONLY (field))
4249 {
4250 if (GET_CODE (to_rtx) == MEM)
4251 to_rtx = copy_rtx (to_rtx);
4252
4253 RTX_UNCHANGING_P (to_rtx) = 1;
4254 }
4255
4256 #ifdef WORD_REGISTER_OPERATIONS
4257 /* If this initializes a field that is smaller than a word, at the
4258 start of a word, try to widen it to a full word.
4259 This special case allows us to output C++ member function
4260 initializations in a form that the optimizers can understand. */
4261 if (GET_CODE (target) == REG
4262 && bitsize < BITS_PER_WORD
4263 && bitpos % BITS_PER_WORD == 0
4264 && GET_MODE_CLASS (mode) == MODE_INT
4265 && TREE_CODE (value) == INTEGER_CST
4266 && GET_CODE (exp_size) == CONST_INT
4267 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4268 {
4269 tree type = TREE_TYPE (value);
4270 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4271 {
4272 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4273 value = convert (type, value);
4274 }
4275 if (BYTES_BIG_ENDIAN)
4276 value
4277 = fold (build (LSHIFT_EXPR, type, value,
4278 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4279 bitsize = BITS_PER_WORD;
4280 mode = word_mode;
4281 }
4282 #endif
4283 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4284 TREE_VALUE (elt), type,
4285 MIN (align,
4286 DECL_ALIGN (TREE_PURPOSE (elt))),
4287 cleared);
4288 }
4289 }
4290 else if (TREE_CODE (type) == ARRAY_TYPE)
4291 {
4292 register tree elt;
4293 register int i;
4294 int need_to_clear;
4295 tree domain = TYPE_DOMAIN (type);
4296 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4297 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4298 tree elttype = TREE_TYPE (type);
4299
4300 /* If the constructor has fewer elements than the array,
4301 clear the whole array first. Similarly if this is
4302 static constructor of a non-BLKmode object. */
4303 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4304 need_to_clear = 1;
4305 else
4306 {
4307 HOST_WIDE_INT count = 0, zero_count = 0;
4308 need_to_clear = 0;
4309 /* This loop is a more accurate version of the loop in
4310 mostly_zeros_p (it handles RANGE_EXPR in an index).
4311 It is also needed to check for missing elements. */
4312 for (elt = CONSTRUCTOR_ELTS (exp);
4313 elt != NULL_TREE;
4314 elt = TREE_CHAIN (elt))
4315 {
4316 tree index = TREE_PURPOSE (elt);
4317 HOST_WIDE_INT this_node_count;
4318 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4319 {
4320 tree lo_index = TREE_OPERAND (index, 0);
4321 tree hi_index = TREE_OPERAND (index, 1);
4322
4323 if (TREE_CODE (lo_index) != INTEGER_CST
4324 || TREE_CODE (hi_index) != INTEGER_CST)
4325 {
4326 need_to_clear = 1;
4327 break;
4328 }
4329 this_node_count = (TREE_INT_CST_LOW (hi_index)
4330 - TREE_INT_CST_LOW (lo_index) + 1);
4331 }
4332 else
4333 this_node_count = 1;
4334 count += this_node_count;
4335 if (mostly_zeros_p (TREE_VALUE (elt)))
4336 zero_count += this_node_count;
4337 }
4338 /* Clear the entire array first if there are any missing elements,
4339 or if the incidence of zero elements is >= 75%. */
4340 if (count < maxelt - minelt + 1
4341 || 4 * zero_count >= 3 * count)
4342 need_to_clear = 1;
4343 }
4344 if (need_to_clear && size > 0)
4345 {
4346 if (! cleared)
4347 clear_storage (target, GEN_INT (size),
4348 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4349 cleared = 1;
4350 }
4351 else
4352 /* Inform later passes that the old value is dead. */
4353 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4354
4355 /* Store each element of the constructor into
4356 the corresponding element of TARGET, determined
4357 by counting the elements. */
4358 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4359 elt;
4360 elt = TREE_CHAIN (elt), i++)
4361 {
4362 register enum machine_mode mode;
4363 int bitsize;
4364 int bitpos;
4365 int unsignedp;
4366 tree value = TREE_VALUE (elt);
4367 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4368 tree index = TREE_PURPOSE (elt);
4369 rtx xtarget = target;
4370
4371 if (cleared && is_zeros_p (value))
4372 continue;
4373
4374 unsignedp = TREE_UNSIGNED (elttype);
4375 mode = TYPE_MODE (elttype);
4376 if (mode == BLKmode)
4377 {
4378 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4379 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4380 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4381 else
4382 bitsize = -1;
4383 }
4384 else
4385 bitsize = GET_MODE_BITSIZE (mode);
4386
4387 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4388 {
4389 tree lo_index = TREE_OPERAND (index, 0);
4390 tree hi_index = TREE_OPERAND (index, 1);
4391 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4392 struct nesting *loop;
4393 HOST_WIDE_INT lo, hi, count;
4394 tree position;
4395
4396 /* If the range is constant and "small", unroll the loop. */
4397 if (TREE_CODE (lo_index) == INTEGER_CST
4398 && TREE_CODE (hi_index) == INTEGER_CST
4399 && (lo = TREE_INT_CST_LOW (lo_index),
4400 hi = TREE_INT_CST_LOW (hi_index),
4401 count = hi - lo + 1,
4402 (GET_CODE (target) != MEM
4403 || count <= 2
4404 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4405 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4406 <= 40 * 8))))
4407 {
4408 lo -= minelt; hi -= minelt;
4409 for (; lo <= hi; lo++)
4410 {
4411 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4412 store_constructor_field (target, bitsize, bitpos, mode,
4413 value, type, align, cleared);
4414 }
4415 }
4416 else
4417 {
4418 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4419 loop_top = gen_label_rtx ();
4420 loop_end = gen_label_rtx ();
4421
4422 unsignedp = TREE_UNSIGNED (domain);
4423
4424 index = build_decl (VAR_DECL, NULL_TREE, domain);
4425
4426 DECL_RTL (index) = index_r
4427 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4428 &unsignedp, 0));
4429
4430 if (TREE_CODE (value) == SAVE_EXPR
4431 && SAVE_EXPR_RTL (value) == 0)
4432 {
4433 /* Make sure value gets expanded once before the
4434 loop. */
4435 expand_expr (value, const0_rtx, VOIDmode, 0);
4436 emit_queue ();
4437 }
4438 store_expr (lo_index, index_r, 0);
4439 loop = expand_start_loop (0);
4440
4441 /* Assign value to element index. */
4442 position
4443 = convert (ssizetype,
4444 fold (build (MINUS_EXPR, TREE_TYPE (index),
4445 index, TYPE_MIN_VALUE (domain))));
4446 position = size_binop (MULT_EXPR, position,
4447 convert (ssizetype,
4448 TYPE_SIZE_UNIT (elttype)));
4449
4450 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4451 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4452 xtarget = change_address (target, mode, addr);
4453 if (TREE_CODE (value) == CONSTRUCTOR)
4454 store_constructor (value, xtarget, align, cleared,
4455 bitsize / BITS_PER_UNIT);
4456 else
4457 store_expr (value, xtarget, 0);
4458
4459 expand_exit_loop_if_false (loop,
4460 build (LT_EXPR, integer_type_node,
4461 index, hi_index));
4462
4463 expand_increment (build (PREINCREMENT_EXPR,
4464 TREE_TYPE (index),
4465 index, integer_one_node), 0, 0);
4466 expand_end_loop ();
4467 emit_label (loop_end);
4468 }
4469 }
4470 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4471 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4472 {
4473 rtx pos_rtx, addr;
4474 tree position;
4475
4476 if (index == 0)
4477 index = ssize_int (1);
4478
4479 if (minelt)
4480 index = convert (ssizetype,
4481 fold (build (MINUS_EXPR, index,
4482 TYPE_MIN_VALUE (domain))));
4483 position = size_binop (MULT_EXPR, index,
4484 convert (ssizetype,
4485 TYPE_SIZE_UNIT (elttype)));
4486 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4487 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4488 xtarget = change_address (target, mode, addr);
4489 store_expr (value, xtarget, 0);
4490 }
4491 else
4492 {
4493 if (index != 0)
4494 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4495 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4496 else
4497 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4498 store_constructor_field (target, bitsize, bitpos, mode, value,
4499 type, align, cleared);
4500 }
4501 }
4502 }
4503 /* set constructor assignments */
4504 else if (TREE_CODE (type) == SET_TYPE)
4505 {
4506 tree elt = CONSTRUCTOR_ELTS (exp);
4507 int nbytes = int_size_in_bytes (type), nbits;
4508 tree domain = TYPE_DOMAIN (type);
4509 tree domain_min, domain_max, bitlength;
4510
4511 /* The default implementation strategy is to extract the constant
4512 parts of the constructor, use that to initialize the target,
4513 and then "or" in whatever non-constant ranges we need in addition.
4514
4515 If a large set is all zero or all ones, it is
4516 probably better to set it using memset (if available) or bzero.
4517 Also, if a large set has just a single range, it may also be
4518 better to first clear all the first clear the set (using
4519 bzero/memset), and set the bits we want. */
4520
4521 /* Check for all zeros. */
4522 if (elt == NULL_TREE && size > 0)
4523 {
4524 if (!cleared)
4525 clear_storage (target, GEN_INT (size),
4526 TYPE_ALIGN (type) / BITS_PER_UNIT);
4527 return;
4528 }
4529
4530 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4531 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4532 bitlength = size_binop (PLUS_EXPR,
4533 size_diffop (domain_max, domain_min),
4534 ssize_int (1));
4535
4536 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4537 abort ();
4538 nbits = TREE_INT_CST_LOW (bitlength);
4539
4540 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4541 are "complicated" (more than one range), initialize (the
4542 constant parts) by copying from a constant. */
4543 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4544 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4545 {
4546 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4547 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4548 char *bit_buffer = (char *) alloca (nbits);
4549 HOST_WIDE_INT word = 0;
4550 int bit_pos = 0;
4551 int ibit = 0;
4552 int offset = 0; /* In bytes from beginning of set. */
4553 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4554 for (;;)
4555 {
4556 if (bit_buffer[ibit])
4557 {
4558 if (BYTES_BIG_ENDIAN)
4559 word |= (1 << (set_word_size - 1 - bit_pos));
4560 else
4561 word |= 1 << bit_pos;
4562 }
4563 bit_pos++; ibit++;
4564 if (bit_pos >= set_word_size || ibit == nbits)
4565 {
4566 if (word != 0 || ! cleared)
4567 {
4568 rtx datum = GEN_INT (word);
4569 rtx to_rtx;
4570 /* The assumption here is that it is safe to use
4571 XEXP if the set is multi-word, but not if
4572 it's single-word. */
4573 if (GET_CODE (target) == MEM)
4574 {
4575 to_rtx = plus_constant (XEXP (target, 0), offset);
4576 to_rtx = change_address (target, mode, to_rtx);
4577 }
4578 else if (offset == 0)
4579 to_rtx = target;
4580 else
4581 abort ();
4582 emit_move_insn (to_rtx, datum);
4583 }
4584 if (ibit == nbits)
4585 break;
4586 word = 0;
4587 bit_pos = 0;
4588 offset += set_word_size / BITS_PER_UNIT;
4589 }
4590 }
4591 }
4592 else if (!cleared)
4593 {
4594 /* Don't bother clearing storage if the set is all ones. */
4595 if (TREE_CHAIN (elt) != NULL_TREE
4596 || (TREE_PURPOSE (elt) == NULL_TREE
4597 ? nbits != 1
4598 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4599 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4600 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4601 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4602 != nbits))))
4603 clear_storage (target, expr_size (exp),
4604 TYPE_ALIGN (type) / BITS_PER_UNIT);
4605 }
4606
4607 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4608 {
4609 /* start of range of element or NULL */
4610 tree startbit = TREE_PURPOSE (elt);
4611 /* end of range of element, or element value */
4612 tree endbit = TREE_VALUE (elt);
4613 #ifdef TARGET_MEM_FUNCTIONS
4614 HOST_WIDE_INT startb, endb;
4615 #endif
4616 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4617
4618 bitlength_rtx = expand_expr (bitlength,
4619 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4620
4621 /* handle non-range tuple element like [ expr ] */
4622 if (startbit == NULL_TREE)
4623 {
4624 startbit = save_expr (endbit);
4625 endbit = startbit;
4626 }
4627 startbit = convert (sizetype, startbit);
4628 endbit = convert (sizetype, endbit);
4629 if (! integer_zerop (domain_min))
4630 {
4631 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4632 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4633 }
4634 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4635 EXPAND_CONST_ADDRESS);
4636 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4637 EXPAND_CONST_ADDRESS);
4638
4639 if (REG_P (target))
4640 {
4641 targetx = assign_stack_temp (GET_MODE (target),
4642 GET_MODE_SIZE (GET_MODE (target)),
4643 0);
4644 emit_move_insn (targetx, target);
4645 }
4646 else if (GET_CODE (target) == MEM)
4647 targetx = target;
4648 else
4649 abort ();
4650
4651 #ifdef TARGET_MEM_FUNCTIONS
4652 /* Optimization: If startbit and endbit are
4653 constants divisible by BITS_PER_UNIT,
4654 call memset instead. */
4655 if (TREE_CODE (startbit) == INTEGER_CST
4656 && TREE_CODE (endbit) == INTEGER_CST
4657 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4658 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4659 {
4660 emit_library_call (memset_libfunc, 0,
4661 VOIDmode, 3,
4662 plus_constant (XEXP (targetx, 0),
4663 startb / BITS_PER_UNIT),
4664 Pmode,
4665 constm1_rtx, TYPE_MODE (integer_type_node),
4666 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4667 TYPE_MODE (sizetype));
4668 }
4669 else
4670 #endif
4671 {
4672 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4673 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4674 bitlength_rtx, TYPE_MODE (sizetype),
4675 startbit_rtx, TYPE_MODE (sizetype),
4676 endbit_rtx, TYPE_MODE (sizetype));
4677 }
4678 if (REG_P (target))
4679 emit_move_insn (target, targetx);
4680 }
4681 }
4682
4683 else
4684 abort ();
4685 }
4686
4687 /* Store the value of EXP (an expression tree)
4688 into a subfield of TARGET which has mode MODE and occupies
4689 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4690 If MODE is VOIDmode, it means that we are storing into a bit-field.
4691
4692 If VALUE_MODE is VOIDmode, return nothing in particular.
4693 UNSIGNEDP is not used in this case.
4694
4695 Otherwise, return an rtx for the value stored. This rtx
4696 has mode VALUE_MODE if that is convenient to do.
4697 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4698
4699 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4700 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4701
4702 ALIAS_SET is the alias set for the destination. This value will
4703 (in general) be different from that for TARGET, since TARGET is a
4704 reference to the containing structure. */
4705
4706 static rtx
4707 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4708 unsignedp, align, total_size, alias_set)
4709 rtx target;
4710 HOST_WIDE_INT bitsize;
4711 HOST_WIDE_INT bitpos;
4712 enum machine_mode mode;
4713 tree exp;
4714 enum machine_mode value_mode;
4715 int unsignedp;
4716 unsigned int align;
4717 HOST_WIDE_INT total_size;
4718 int alias_set;
4719 {
4720 HOST_WIDE_INT width_mask = 0;
4721
4722 if (TREE_CODE (exp) == ERROR_MARK)
4723 return const0_rtx;
4724
4725 if (bitsize < HOST_BITS_PER_WIDE_INT)
4726 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4727
4728 /* If we are storing into an unaligned field of an aligned union that is
4729 in a register, we may have the mode of TARGET being an integer mode but
4730 MODE == BLKmode. In that case, get an aligned object whose size and
4731 alignment are the same as TARGET and store TARGET into it (we can avoid
4732 the store if the field being stored is the entire width of TARGET). Then
4733 call ourselves recursively to store the field into a BLKmode version of
4734 that object. Finally, load from the object into TARGET. This is not
4735 very efficient in general, but should only be slightly more expensive
4736 than the otherwise-required unaligned accesses. Perhaps this can be
4737 cleaned up later. */
4738
4739 if (mode == BLKmode
4740 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4741 {
4742 rtx object = assign_stack_temp (GET_MODE (target),
4743 GET_MODE_SIZE (GET_MODE (target)), 0);
4744 rtx blk_object = copy_rtx (object);
4745
4746 MEM_SET_IN_STRUCT_P (object, 1);
4747 MEM_SET_IN_STRUCT_P (blk_object, 1);
4748 PUT_MODE (blk_object, BLKmode);
4749
4750 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4751 emit_move_insn (object, target);
4752
4753 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4754 align, total_size, alias_set);
4755
4756 /* Even though we aren't returning target, we need to
4757 give it the updated value. */
4758 emit_move_insn (target, object);
4759
4760 return blk_object;
4761 }
4762
4763 if (GET_CODE (target) == CONCAT)
4764 {
4765 /* We're storing into a struct containing a single __complex. */
4766
4767 if (bitpos != 0)
4768 abort ();
4769 return store_expr (exp, target, 0);
4770 }
4771
4772 /* If the structure is in a register or if the component
4773 is a bit field, we cannot use addressing to access it.
4774 Use bit-field techniques or SUBREG to store in it. */
4775
4776 if (mode == VOIDmode
4777 || (mode != BLKmode && ! direct_store[(int) mode]
4778 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4779 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4780 || GET_CODE (target) == REG
4781 || GET_CODE (target) == SUBREG
4782 /* If the field isn't aligned enough to store as an ordinary memref,
4783 store it as a bit field. */
4784 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4785 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4786 || bitpos % GET_MODE_ALIGNMENT (mode)))
4787 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4788 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4789 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4790 /* If the RHS and field are a constant size and the size of the
4791 RHS isn't the same size as the bitfield, we must use bitfield
4792 operations. */
4793 || (bitsize >= 0
4794 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4795 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4796 {
4797 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4798
4799 /* If BITSIZE is narrower than the size of the type of EXP
4800 we will be narrowing TEMP. Normally, what's wanted are the
4801 low-order bits. However, if EXP's type is a record and this is
4802 big-endian machine, we want the upper BITSIZE bits. */
4803 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4804 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4805 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4806 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4807 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4808 - bitsize),
4809 temp, 1);
4810
4811 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4812 MODE. */
4813 if (mode != VOIDmode && mode != BLKmode
4814 && mode != TYPE_MODE (TREE_TYPE (exp)))
4815 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4816
4817 /* If the modes of TARGET and TEMP are both BLKmode, both
4818 must be in memory and BITPOS must be aligned on a byte
4819 boundary. If so, we simply do a block copy. */
4820 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4821 {
4822 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4823
4824 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4825 || bitpos % BITS_PER_UNIT != 0)
4826 abort ();
4827
4828 target = change_address (target, VOIDmode,
4829 plus_constant (XEXP (target, 0),
4830 bitpos / BITS_PER_UNIT));
4831
4832 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4833 align = MIN (exp_align, align);
4834
4835 /* Find an alignment that is consistent with the bit position. */
4836 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4837 align >>= 1;
4838
4839 emit_block_move (target, temp,
4840 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4841 / BITS_PER_UNIT),
4842 align);
4843
4844 return value_mode == VOIDmode ? const0_rtx : target;
4845 }
4846
4847 /* Store the value in the bitfield. */
4848 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4849 if (value_mode != VOIDmode)
4850 {
4851 /* The caller wants an rtx for the value. */
4852 /* If possible, avoid refetching from the bitfield itself. */
4853 if (width_mask != 0
4854 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4855 {
4856 tree count;
4857 enum machine_mode tmode;
4858
4859 if (unsignedp)
4860 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4861 tmode = GET_MODE (temp);
4862 if (tmode == VOIDmode)
4863 tmode = value_mode;
4864 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4865 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4866 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4867 }
4868 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4869 NULL_RTX, value_mode, 0, align,
4870 total_size);
4871 }
4872 return const0_rtx;
4873 }
4874 else
4875 {
4876 rtx addr = XEXP (target, 0);
4877 rtx to_rtx;
4878
4879 /* If a value is wanted, it must be the lhs;
4880 so make the address stable for multiple use. */
4881
4882 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4883 && ! CONSTANT_ADDRESS_P (addr)
4884 /* A frame-pointer reference is already stable. */
4885 && ! (GET_CODE (addr) == PLUS
4886 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4887 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4888 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4889 addr = copy_to_reg (addr);
4890
4891 /* Now build a reference to just the desired component. */
4892
4893 to_rtx = copy_rtx (change_address (target, mode,
4894 plus_constant (addr,
4895 (bitpos
4896 / BITS_PER_UNIT))));
4897 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4898 MEM_ALIAS_SET (to_rtx) = alias_set;
4899
4900 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4901 }
4902 }
4903 \f
4904 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4905 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4906 ARRAY_REFs and find the ultimate containing object, which we return.
4907
4908 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4909 bit position, and *PUNSIGNEDP to the signedness of the field.
4910 If the position of the field is variable, we store a tree
4911 giving the variable offset (in units) in *POFFSET.
4912 This offset is in addition to the bit position.
4913 If the position is not variable, we store 0 in *POFFSET.
4914 We set *PALIGNMENT to the alignment in bytes of the address that will be
4915 computed. This is the alignment of the thing we return if *POFFSET
4916 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4917
4918 If any of the extraction expressions is volatile,
4919 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4920
4921 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4922 is a mode that can be used to access the field. In that case, *PBITSIZE
4923 is redundant.
4924
4925 If the field describes a variable-sized object, *PMODE is set to
4926 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4927 this case, but the address of the object can be found. */
4928
4929 tree
4930 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4931 punsignedp, pvolatilep, palignment)
4932 tree exp;
4933 HOST_WIDE_INT *pbitsize;
4934 HOST_WIDE_INT *pbitpos;
4935 tree *poffset;
4936 enum machine_mode *pmode;
4937 int *punsignedp;
4938 int *pvolatilep;
4939 unsigned int *palignment;
4940 {
4941 tree size_tree = 0;
4942 enum machine_mode mode = VOIDmode;
4943 tree offset = size_zero_node;
4944 tree bit_offset = bitsize_zero_node;
4945 unsigned int alignment = BIGGEST_ALIGNMENT;
4946 tree tem;
4947
4948 /* First get the mode, signedness, and size. We do this from just the
4949 outermost expression. */
4950 if (TREE_CODE (exp) == COMPONENT_REF)
4951 {
4952 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4953 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4954 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4955
4956 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4957 }
4958 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4959 {
4960 size_tree = TREE_OPERAND (exp, 1);
4961 *punsignedp = TREE_UNSIGNED (exp);
4962 }
4963 else
4964 {
4965 mode = TYPE_MODE (TREE_TYPE (exp));
4966 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4967
4968 if (mode == BLKmode)
4969 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4970 else
4971 *pbitsize = GET_MODE_BITSIZE (mode);
4972 }
4973
4974 if (size_tree != 0)
4975 {
4976 if (! host_integerp (size_tree, 1))
4977 mode = BLKmode, *pbitsize = -1;
4978 else
4979 *pbitsize = tree_low_cst (size_tree, 1);
4980 }
4981
4982 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4983 and find the ultimate containing object. */
4984 while (1)
4985 {
4986 if (TREE_CODE (exp) == BIT_FIELD_REF)
4987 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
4988 else if (TREE_CODE (exp) == COMPONENT_REF)
4989 {
4990 tree field = TREE_OPERAND (exp, 1);
4991 tree this_offset = DECL_FIELD_OFFSET (field);
4992
4993 /* If this field hasn't been filled in yet, don't go
4994 past it. This should only happen when folding expressions
4995 made during type construction. */
4996 if (this_offset == 0)
4997 break;
4998 else if (! TREE_CONSTANT (this_offset)
4999 && contains_placeholder_p (this_offset))
5000 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5001
5002 offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field));
5003 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5004 DECL_FIELD_BIT_OFFSET (field));
5005
5006 if (! host_integerp (offset, 0))
5007 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5008 }
5009 else if (TREE_CODE (exp) == ARRAY_REF)
5010 {
5011 tree index = TREE_OPERAND (exp, 1);
5012 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5013 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5014
5015 /* We assume all arrays have sizes that are a multiple of a byte.
5016 First subtract the lower bound, if any, in the type of the
5017 index, then convert to sizetype and multiply by the size of the
5018 array element. */
5019 if (low_bound != 0 && ! integer_zerop (low_bound))
5020 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5021 index, low_bound));
5022
5023 if (! TREE_CONSTANT (index)
5024 && contains_placeholder_p (index))
5025 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5026
5027 offset = size_binop (PLUS_EXPR, offset,
5028 size_binop (MULT_EXPR,
5029 convert (sizetype, index),
5030 TYPE_SIZE_UNIT (TREE_TYPE (exp))));
5031 }
5032 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5033 && ! ((TREE_CODE (exp) == NOP_EXPR
5034 || TREE_CODE (exp) == CONVERT_EXPR)
5035 && (TYPE_MODE (TREE_TYPE (exp))
5036 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5037 break;
5038
5039 /* If any reference in the chain is volatile, the effect is volatile. */
5040 if (TREE_THIS_VOLATILE (exp))
5041 *pvolatilep = 1;
5042
5043 /* If the offset is non-constant already, then we can't assume any
5044 alignment more than the alignment here. */
5045 if (! TREE_CONSTANT (offset))
5046 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5047
5048 exp = TREE_OPERAND (exp, 0);
5049 }
5050
5051 if (DECL_P (exp))
5052 alignment = MIN (alignment, DECL_ALIGN (exp));
5053 else if (TREE_TYPE (exp) != 0)
5054 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5055
5056 /* If OFFSET is constant, see if we can return the whole thing as a
5057 constant bit position. Otherwise, split it up. */
5058 if (host_integerp (offset, 0)
5059 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5060 bitsize_unit_node))
5061 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5062 && host_integerp (tem, 0))
5063 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5064 else
5065 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5066
5067 *pmode = mode;
5068 *palignment = alignment / BITS_PER_UNIT;
5069 return exp;
5070 }
5071
5072 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5073
5074 static enum memory_use_mode
5075 get_memory_usage_from_modifier (modifier)
5076 enum expand_modifier modifier;
5077 {
5078 switch (modifier)
5079 {
5080 case EXPAND_NORMAL:
5081 case EXPAND_SUM:
5082 return MEMORY_USE_RO;
5083 break;
5084 case EXPAND_MEMORY_USE_WO:
5085 return MEMORY_USE_WO;
5086 break;
5087 case EXPAND_MEMORY_USE_RW:
5088 return MEMORY_USE_RW;
5089 break;
5090 case EXPAND_MEMORY_USE_DONT:
5091 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5092 MEMORY_USE_DONT, because they are modifiers to a call of
5093 expand_expr in the ADDR_EXPR case of expand_expr. */
5094 case EXPAND_CONST_ADDRESS:
5095 case EXPAND_INITIALIZER:
5096 return MEMORY_USE_DONT;
5097 case EXPAND_MEMORY_USE_BAD:
5098 default:
5099 abort ();
5100 }
5101 }
5102 \f
5103 /* Given an rtx VALUE that may contain additions and multiplications,
5104 return an equivalent value that just refers to a register or memory.
5105 This is done by generating instructions to perform the arithmetic
5106 and returning a pseudo-register containing the value.
5107
5108 The returned value may be a REG, SUBREG, MEM or constant. */
5109
5110 rtx
5111 force_operand (value, target)
5112 rtx value, target;
5113 {
5114 register optab binoptab = 0;
5115 /* Use a temporary to force order of execution of calls to
5116 `force_operand'. */
5117 rtx tmp;
5118 register rtx op2;
5119 /* Use subtarget as the target for operand 0 of a binary operation. */
5120 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5121
5122 /* Check for a PIC address load. */
5123 if (flag_pic
5124 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5125 && XEXP (value, 0) == pic_offset_table_rtx
5126 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5127 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5128 || GET_CODE (XEXP (value, 1)) == CONST))
5129 {
5130 if (!subtarget)
5131 subtarget = gen_reg_rtx (GET_MODE (value));
5132 emit_move_insn (subtarget, value);
5133 return subtarget;
5134 }
5135
5136 if (GET_CODE (value) == PLUS)
5137 binoptab = add_optab;
5138 else if (GET_CODE (value) == MINUS)
5139 binoptab = sub_optab;
5140 else if (GET_CODE (value) == MULT)
5141 {
5142 op2 = XEXP (value, 1);
5143 if (!CONSTANT_P (op2)
5144 && !(GET_CODE (op2) == REG && op2 != subtarget))
5145 subtarget = 0;
5146 tmp = force_operand (XEXP (value, 0), subtarget);
5147 return expand_mult (GET_MODE (value), tmp,
5148 force_operand (op2, NULL_RTX),
5149 target, 0);
5150 }
5151
5152 if (binoptab)
5153 {
5154 op2 = XEXP (value, 1);
5155 if (!CONSTANT_P (op2)
5156 && !(GET_CODE (op2) == REG && op2 != subtarget))
5157 subtarget = 0;
5158 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5159 {
5160 binoptab = add_optab;
5161 op2 = negate_rtx (GET_MODE (value), op2);
5162 }
5163
5164 /* Check for an addition with OP2 a constant integer and our first
5165 operand a PLUS of a virtual register and something else. In that
5166 case, we want to emit the sum of the virtual register and the
5167 constant first and then add the other value. This allows virtual
5168 register instantiation to simply modify the constant rather than
5169 creating another one around this addition. */
5170 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5171 && GET_CODE (XEXP (value, 0)) == PLUS
5172 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5173 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5174 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5175 {
5176 rtx temp = expand_binop (GET_MODE (value), binoptab,
5177 XEXP (XEXP (value, 0), 0), op2,
5178 subtarget, 0, OPTAB_LIB_WIDEN);
5179 return expand_binop (GET_MODE (value), binoptab, temp,
5180 force_operand (XEXP (XEXP (value, 0), 1), 0),
5181 target, 0, OPTAB_LIB_WIDEN);
5182 }
5183
5184 tmp = force_operand (XEXP (value, 0), subtarget);
5185 return expand_binop (GET_MODE (value), binoptab, tmp,
5186 force_operand (op2, NULL_RTX),
5187 target, 0, OPTAB_LIB_WIDEN);
5188 /* We give UNSIGNEDP = 0 to expand_binop
5189 because the only operations we are expanding here are signed ones. */
5190 }
5191 return value;
5192 }
5193 \f
5194 /* Subroutine of expand_expr:
5195 save the non-copied parts (LIST) of an expr (LHS), and return a list
5196 which can restore these values to their previous values,
5197 should something modify their storage. */
5198
5199 static tree
5200 save_noncopied_parts (lhs, list)
5201 tree lhs;
5202 tree list;
5203 {
5204 tree tail;
5205 tree parts = 0;
5206
5207 for (tail = list; tail; tail = TREE_CHAIN (tail))
5208 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5209 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5210 else
5211 {
5212 tree part = TREE_VALUE (tail);
5213 tree part_type = TREE_TYPE (part);
5214 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5215 rtx target = assign_temp (part_type, 0, 1, 1);
5216 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5217 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5218 parts = tree_cons (to_be_saved,
5219 build (RTL_EXPR, part_type, NULL_TREE,
5220 (tree) target),
5221 parts);
5222 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5223 }
5224 return parts;
5225 }
5226
5227 /* Subroutine of expand_expr:
5228 record the non-copied parts (LIST) of an expr (LHS), and return a list
5229 which specifies the initial values of these parts. */
5230
5231 static tree
5232 init_noncopied_parts (lhs, list)
5233 tree lhs;
5234 tree list;
5235 {
5236 tree tail;
5237 tree parts = 0;
5238
5239 for (tail = list; tail; tail = TREE_CHAIN (tail))
5240 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5241 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5242 else if (TREE_PURPOSE (tail))
5243 {
5244 tree part = TREE_VALUE (tail);
5245 tree part_type = TREE_TYPE (part);
5246 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5247 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5248 }
5249 return parts;
5250 }
5251
5252 /* Subroutine of expand_expr: return nonzero iff there is no way that
5253 EXP can reference X, which is being modified. TOP_P is nonzero if this
5254 call is going to be used to determine whether we need a temporary
5255 for EXP, as opposed to a recursive call to this function.
5256
5257 It is always safe for this routine to return zero since it merely
5258 searches for optimization opportunities. */
5259
5260 static int
5261 safe_from_p (x, exp, top_p)
5262 rtx x;
5263 tree exp;
5264 int top_p;
5265 {
5266 rtx exp_rtl = 0;
5267 int i, nops;
5268 static int save_expr_count;
5269 static int save_expr_size = 0;
5270 static tree *save_expr_rewritten;
5271 static tree save_expr_trees[256];
5272
5273 if (x == 0
5274 /* If EXP has varying size, we MUST use a target since we currently
5275 have no way of allocating temporaries of variable size
5276 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5277 So we assume here that something at a higher level has prevented a
5278 clash. This is somewhat bogus, but the best we can do. Only
5279 do this when X is BLKmode and when we are at the top level. */
5280 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5282 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5283 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5284 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5285 != INTEGER_CST)
5286 && GET_MODE (x) == BLKmode))
5287 return 1;
5288
5289 if (top_p && save_expr_size == 0)
5290 {
5291 int rtn;
5292
5293 save_expr_count = 0;
5294 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5295 save_expr_rewritten = &save_expr_trees[0];
5296
5297 rtn = safe_from_p (x, exp, 1);
5298
5299 for (i = 0; i < save_expr_count; ++i)
5300 {
5301 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5302 abort ();
5303 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5304 }
5305
5306 save_expr_size = 0;
5307
5308 return rtn;
5309 }
5310
5311 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5312 find the underlying pseudo. */
5313 if (GET_CODE (x) == SUBREG)
5314 {
5315 x = SUBREG_REG (x);
5316 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5317 return 0;
5318 }
5319
5320 /* If X is a location in the outgoing argument area, it is always safe. */
5321 if (GET_CODE (x) == MEM
5322 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5323 || (GET_CODE (XEXP (x, 0)) == PLUS
5324 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5325 return 1;
5326
5327 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5328 {
5329 case 'd':
5330 exp_rtl = DECL_RTL (exp);
5331 break;
5332
5333 case 'c':
5334 return 1;
5335
5336 case 'x':
5337 if (TREE_CODE (exp) == TREE_LIST)
5338 return ((TREE_VALUE (exp) == 0
5339 || safe_from_p (x, TREE_VALUE (exp), 0))
5340 && (TREE_CHAIN (exp) == 0
5341 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5342 else if (TREE_CODE (exp) == ERROR_MARK)
5343 return 1; /* An already-visited SAVE_EXPR? */
5344 else
5345 return 0;
5346
5347 case '1':
5348 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5349
5350 case '2':
5351 case '<':
5352 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5353 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5354
5355 case 'e':
5356 case 'r':
5357 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5358 the expression. If it is set, we conflict iff we are that rtx or
5359 both are in memory. Otherwise, we check all operands of the
5360 expression recursively. */
5361
5362 switch (TREE_CODE (exp))
5363 {
5364 case ADDR_EXPR:
5365 return (staticp (TREE_OPERAND (exp, 0))
5366 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5367 || TREE_STATIC (exp));
5368
5369 case INDIRECT_REF:
5370 if (GET_CODE (x) == MEM)
5371 return 0;
5372 break;
5373
5374 case CALL_EXPR:
5375 exp_rtl = CALL_EXPR_RTL (exp);
5376 if (exp_rtl == 0)
5377 {
5378 /* Assume that the call will clobber all hard registers and
5379 all of memory. */
5380 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5381 || GET_CODE (x) == MEM)
5382 return 0;
5383 }
5384
5385 break;
5386
5387 case RTL_EXPR:
5388 /* If a sequence exists, we would have to scan every instruction
5389 in the sequence to see if it was safe. This is probably not
5390 worthwhile. */
5391 if (RTL_EXPR_SEQUENCE (exp))
5392 return 0;
5393
5394 exp_rtl = RTL_EXPR_RTL (exp);
5395 break;
5396
5397 case WITH_CLEANUP_EXPR:
5398 exp_rtl = RTL_EXPR_RTL (exp);
5399 break;
5400
5401 case CLEANUP_POINT_EXPR:
5402 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5403
5404 case SAVE_EXPR:
5405 exp_rtl = SAVE_EXPR_RTL (exp);
5406 if (exp_rtl)
5407 break;
5408
5409 /* This SAVE_EXPR might appear many times in the top-level
5410 safe_from_p() expression, and if it has a complex
5411 subexpression, examining it multiple times could result
5412 in a combinatorial explosion. E.g. on an Alpha
5413 running at least 200MHz, a Fortran test case compiled with
5414 optimization took about 28 minutes to compile -- even though
5415 it was only a few lines long, and the complicated line causing
5416 so much time to be spent in the earlier version of safe_from_p()
5417 had only 293 or so unique nodes.
5418
5419 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5420 where it is so we can turn it back in the top-level safe_from_p()
5421 when we're done. */
5422
5423 /* For now, don't bother re-sizing the array. */
5424 if (save_expr_count >= save_expr_size)
5425 return 0;
5426 save_expr_rewritten[save_expr_count++] = exp;
5427
5428 nops = tree_code_length[(int) SAVE_EXPR];
5429 for (i = 0; i < nops; i++)
5430 {
5431 tree operand = TREE_OPERAND (exp, i);
5432 if (operand == NULL_TREE)
5433 continue;
5434 TREE_SET_CODE (exp, ERROR_MARK);
5435 if (!safe_from_p (x, operand, 0))
5436 return 0;
5437 TREE_SET_CODE (exp, SAVE_EXPR);
5438 }
5439 TREE_SET_CODE (exp, ERROR_MARK);
5440 return 1;
5441
5442 case BIND_EXPR:
5443 /* The only operand we look at is operand 1. The rest aren't
5444 part of the expression. */
5445 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5446
5447 case METHOD_CALL_EXPR:
5448 /* This takes a rtx argument, but shouldn't appear here. */
5449 abort ();
5450
5451 default:
5452 break;
5453 }
5454
5455 /* If we have an rtx, we do not need to scan our operands. */
5456 if (exp_rtl)
5457 break;
5458
5459 nops = tree_code_length[(int) TREE_CODE (exp)];
5460 for (i = 0; i < nops; i++)
5461 if (TREE_OPERAND (exp, i) != 0
5462 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5463 return 0;
5464 }
5465
5466 /* If we have an rtl, find any enclosed object. Then see if we conflict
5467 with it. */
5468 if (exp_rtl)
5469 {
5470 if (GET_CODE (exp_rtl) == SUBREG)
5471 {
5472 exp_rtl = SUBREG_REG (exp_rtl);
5473 if (GET_CODE (exp_rtl) == REG
5474 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5475 return 0;
5476 }
5477
5478 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5479 are memory and EXP is not readonly. */
5480 return ! (rtx_equal_p (x, exp_rtl)
5481 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5482 && ! TREE_READONLY (exp)));
5483 }
5484
5485 /* If we reach here, it is safe. */
5486 return 1;
5487 }
5488
5489 /* Subroutine of expand_expr: return nonzero iff EXP is an
5490 expression whose type is statically determinable. */
5491
5492 static int
5493 fixed_type_p (exp)
5494 tree exp;
5495 {
5496 if (TREE_CODE (exp) == PARM_DECL
5497 || TREE_CODE (exp) == VAR_DECL
5498 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5499 || TREE_CODE (exp) == COMPONENT_REF
5500 || TREE_CODE (exp) == ARRAY_REF)
5501 return 1;
5502 return 0;
5503 }
5504
5505 /* Subroutine of expand_expr: return rtx if EXP is a
5506 variable or parameter; else return 0. */
5507
5508 static rtx
5509 var_rtx (exp)
5510 tree exp;
5511 {
5512 STRIP_NOPS (exp);
5513 switch (TREE_CODE (exp))
5514 {
5515 case PARM_DECL:
5516 case VAR_DECL:
5517 return DECL_RTL (exp);
5518 default:
5519 return 0;
5520 }
5521 }
5522
5523 #ifdef MAX_INTEGER_COMPUTATION_MODE
5524 void
5525 check_max_integer_computation_mode (exp)
5526 tree exp;
5527 {
5528 enum tree_code code;
5529 enum machine_mode mode;
5530
5531 /* Strip any NOPs that don't change the mode. */
5532 STRIP_NOPS (exp);
5533 code = TREE_CODE (exp);
5534
5535 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5536 if (code == NOP_EXPR
5537 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5538 return;
5539
5540 /* First check the type of the overall operation. We need only look at
5541 unary, binary and relational operations. */
5542 if (TREE_CODE_CLASS (code) == '1'
5543 || TREE_CODE_CLASS (code) == '2'
5544 || TREE_CODE_CLASS (code) == '<')
5545 {
5546 mode = TYPE_MODE (TREE_TYPE (exp));
5547 if (GET_MODE_CLASS (mode) == MODE_INT
5548 && mode > MAX_INTEGER_COMPUTATION_MODE)
5549 fatal ("unsupported wide integer operation");
5550 }
5551
5552 /* Check operand of a unary op. */
5553 if (TREE_CODE_CLASS (code) == '1')
5554 {
5555 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5556 if (GET_MODE_CLASS (mode) == MODE_INT
5557 && mode > MAX_INTEGER_COMPUTATION_MODE)
5558 fatal ("unsupported wide integer operation");
5559 }
5560
5561 /* Check operands of a binary/comparison op. */
5562 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5563 {
5564 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5565 if (GET_MODE_CLASS (mode) == MODE_INT
5566 && mode > MAX_INTEGER_COMPUTATION_MODE)
5567 fatal ("unsupported wide integer operation");
5568
5569 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5570 if (GET_MODE_CLASS (mode) == MODE_INT
5571 && mode > MAX_INTEGER_COMPUTATION_MODE)
5572 fatal ("unsupported wide integer operation");
5573 }
5574 }
5575 #endif
5576
5577 \f
5578 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5579 has any readonly fields. If any of the fields have types that
5580 contain readonly fields, return true as well. */
5581
5582 static int
5583 readonly_fields_p (type)
5584 tree type;
5585 {
5586 tree field;
5587
5588 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5589 if (TREE_CODE (field) == FIELD_DECL
5590 && (TREE_READONLY (field)
5591 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5592 && readonly_fields_p (TREE_TYPE (field)))))
5593 return 1;
5594
5595 return 0;
5596 }
5597 \f
5598 /* expand_expr: generate code for computing expression EXP.
5599 An rtx for the computed value is returned. The value is never null.
5600 In the case of a void EXP, const0_rtx is returned.
5601
5602 The value may be stored in TARGET if TARGET is nonzero.
5603 TARGET is just a suggestion; callers must assume that
5604 the rtx returned may not be the same as TARGET.
5605
5606 If TARGET is CONST0_RTX, it means that the value will be ignored.
5607
5608 If TMODE is not VOIDmode, it suggests generating the
5609 result in mode TMODE. But this is done only when convenient.
5610 Otherwise, TMODE is ignored and the value generated in its natural mode.
5611 TMODE is just a suggestion; callers must assume that
5612 the rtx returned may not have mode TMODE.
5613
5614 Note that TARGET may have neither TMODE nor MODE. In that case, it
5615 probably will not be used.
5616
5617 If MODIFIER is EXPAND_SUM then when EXP is an addition
5618 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5619 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5620 products as above, or REG or MEM, or constant.
5621 Ordinarily in such cases we would output mul or add instructions
5622 and then return a pseudo reg containing the sum.
5623
5624 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5625 it also marks a label as absolutely required (it can't be dead).
5626 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5627 This is used for outputting expressions used in initializers.
5628
5629 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5630 with a constant address even if that address is not normally legitimate.
5631 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5632
5633 rtx
5634 expand_expr (exp, target, tmode, modifier)
5635 register tree exp;
5636 rtx target;
5637 enum machine_mode tmode;
5638 enum expand_modifier modifier;
5639 {
5640 register rtx op0, op1, temp;
5641 tree type = TREE_TYPE (exp);
5642 int unsignedp = TREE_UNSIGNED (type);
5643 register enum machine_mode mode;
5644 register enum tree_code code = TREE_CODE (exp);
5645 optab this_optab;
5646 rtx subtarget, original_target;
5647 int ignore;
5648 tree context;
5649 /* Used by check-memory-usage to make modifier read only. */
5650 enum expand_modifier ro_modifier;
5651
5652 /* Handle ERROR_MARK before anybody tries to access its type. */
5653 if (TREE_CODE (exp) == ERROR_MARK)
5654 {
5655 op0 = CONST0_RTX (tmode);
5656 if (op0 != 0)
5657 return op0;
5658 return const0_rtx;
5659 }
5660
5661 mode = TYPE_MODE (type);
5662 /* Use subtarget as the target for operand 0 of a binary operation. */
5663 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5664 original_target = target;
5665 ignore = (target == const0_rtx
5666 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5667 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5668 || code == COND_EXPR)
5669 && TREE_CODE (type) == VOID_TYPE));
5670
5671 /* Make a read-only version of the modifier. */
5672 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5673 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5674 ro_modifier = modifier;
5675 else
5676 ro_modifier = EXPAND_NORMAL;
5677
5678 /* Don't use hard regs as subtargets, because the combiner
5679 can only handle pseudo regs. */
5680 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5681 subtarget = 0;
5682 /* Avoid subtargets inside loops,
5683 since they hide some invariant expressions. */
5684 if (preserve_subexpressions_p ())
5685 subtarget = 0;
5686
5687 /* If we are going to ignore this result, we need only do something
5688 if there is a side-effect somewhere in the expression. If there
5689 is, short-circuit the most common cases here. Note that we must
5690 not call expand_expr with anything but const0_rtx in case this
5691 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5692
5693 if (ignore)
5694 {
5695 if (! TREE_SIDE_EFFECTS (exp))
5696 return const0_rtx;
5697
5698 /* Ensure we reference a volatile object even if value is ignored, but
5699 don't do this if all we are doing is taking its address. */
5700 if (TREE_THIS_VOLATILE (exp)
5701 && TREE_CODE (exp) != FUNCTION_DECL
5702 && mode != VOIDmode && mode != BLKmode
5703 && modifier != EXPAND_CONST_ADDRESS)
5704 {
5705 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5706 if (GET_CODE (temp) == MEM)
5707 temp = copy_to_reg (temp);
5708 return const0_rtx;
5709 }
5710
5711 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5712 || code == INDIRECT_REF || code == BUFFER_REF)
5713 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5714 VOIDmode, ro_modifier);
5715 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5716 || code == ARRAY_REF)
5717 {
5718 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5719 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5720 return const0_rtx;
5721 }
5722 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5723 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5724 /* If the second operand has no side effects, just evaluate
5725 the first. */
5726 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5727 VOIDmode, ro_modifier);
5728 else if (code == BIT_FIELD_REF)
5729 {
5730 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5731 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5732 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5733 return const0_rtx;
5734 }
5735 ;
5736 target = 0;
5737 }
5738
5739 #ifdef MAX_INTEGER_COMPUTATION_MODE
5740 /* Only check stuff here if the mode we want is different from the mode
5741 of the expression; if it's the same, check_max_integer_computiation_mode
5742 will handle it. Do we really need to check this stuff at all? */
5743
5744 if (target
5745 && GET_MODE (target) != mode
5746 && TREE_CODE (exp) != INTEGER_CST
5747 && TREE_CODE (exp) != PARM_DECL
5748 && TREE_CODE (exp) != ARRAY_REF
5749 && TREE_CODE (exp) != COMPONENT_REF
5750 && TREE_CODE (exp) != BIT_FIELD_REF
5751 && TREE_CODE (exp) != INDIRECT_REF
5752 && TREE_CODE (exp) != CALL_EXPR
5753 && TREE_CODE (exp) != VAR_DECL
5754 && TREE_CODE (exp) != RTL_EXPR)
5755 {
5756 enum machine_mode mode = GET_MODE (target);
5757
5758 if (GET_MODE_CLASS (mode) == MODE_INT
5759 && mode > MAX_INTEGER_COMPUTATION_MODE)
5760 fatal ("unsupported wide integer operation");
5761 }
5762
5763 if (tmode != mode
5764 && TREE_CODE (exp) != INTEGER_CST
5765 && TREE_CODE (exp) != PARM_DECL
5766 && TREE_CODE (exp) != ARRAY_REF
5767 && TREE_CODE (exp) != COMPONENT_REF
5768 && TREE_CODE (exp) != BIT_FIELD_REF
5769 && TREE_CODE (exp) != INDIRECT_REF
5770 && TREE_CODE (exp) != VAR_DECL
5771 && TREE_CODE (exp) != CALL_EXPR
5772 && TREE_CODE (exp) != RTL_EXPR
5773 && GET_MODE_CLASS (tmode) == MODE_INT
5774 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5775 fatal ("unsupported wide integer operation");
5776
5777 check_max_integer_computation_mode (exp);
5778 #endif
5779
5780 /* If will do cse, generate all results into pseudo registers
5781 since 1) that allows cse to find more things
5782 and 2) otherwise cse could produce an insn the machine
5783 cannot support. */
5784
5785 if (! cse_not_expected && mode != BLKmode && target
5786 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5787 target = subtarget;
5788
5789 switch (code)
5790 {
5791 case LABEL_DECL:
5792 {
5793 tree function = decl_function_context (exp);
5794 /* Handle using a label in a containing function. */
5795 if (function != current_function_decl
5796 && function != inline_function_decl && function != 0)
5797 {
5798 struct function *p = find_function_data (function);
5799 /* Allocate in the memory associated with the function
5800 that the label is in. */
5801 push_obstacks (p->function_obstack,
5802 p->function_maybepermanent_obstack);
5803
5804 p->expr->x_forced_labels
5805 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5806 p->expr->x_forced_labels);
5807 pop_obstacks ();
5808 }
5809 else
5810 {
5811 if (modifier == EXPAND_INITIALIZER)
5812 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5813 label_rtx (exp),
5814 forced_labels);
5815 }
5816
5817 temp = gen_rtx_MEM (FUNCTION_MODE,
5818 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5819 if (function != current_function_decl
5820 && function != inline_function_decl && function != 0)
5821 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5822 return temp;
5823 }
5824
5825 case PARM_DECL:
5826 if (DECL_RTL (exp) == 0)
5827 {
5828 error_with_decl (exp, "prior parameter's size depends on `%s'");
5829 return CONST0_RTX (mode);
5830 }
5831
5832 /* ... fall through ... */
5833
5834 case VAR_DECL:
5835 /* If a static var's type was incomplete when the decl was written,
5836 but the type is complete now, lay out the decl now. */
5837 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5838 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5839 {
5840 push_obstacks_nochange ();
5841 end_temporary_allocation ();
5842 layout_decl (exp, 0);
5843 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5844 pop_obstacks ();
5845 }
5846
5847 /* Although static-storage variables start off initialized, according to
5848 ANSI C, a memcpy could overwrite them with uninitialized values. So
5849 we check them too. This also lets us check for read-only variables
5850 accessed via a non-const declaration, in case it won't be detected
5851 any other way (e.g., in an embedded system or OS kernel without
5852 memory protection).
5853
5854 Aggregates are not checked here; they're handled elsewhere. */
5855 if (cfun && current_function_check_memory_usage
5856 && code == VAR_DECL
5857 && GET_CODE (DECL_RTL (exp)) == MEM
5858 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5859 {
5860 enum memory_use_mode memory_usage;
5861 memory_usage = get_memory_usage_from_modifier (modifier);
5862
5863 if (memory_usage != MEMORY_USE_DONT)
5864 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5865 XEXP (DECL_RTL (exp), 0), Pmode,
5866 GEN_INT (int_size_in_bytes (type)),
5867 TYPE_MODE (sizetype),
5868 GEN_INT (memory_usage),
5869 TYPE_MODE (integer_type_node));
5870 }
5871
5872 /* ... fall through ... */
5873
5874 case FUNCTION_DECL:
5875 case RESULT_DECL:
5876 if (DECL_RTL (exp) == 0)
5877 abort ();
5878
5879 /* Ensure variable marked as used even if it doesn't go through
5880 a parser. If it hasn't be used yet, write out an external
5881 definition. */
5882 if (! TREE_USED (exp))
5883 {
5884 assemble_external (exp);
5885 TREE_USED (exp) = 1;
5886 }
5887
5888 /* Show we haven't gotten RTL for this yet. */
5889 temp = 0;
5890
5891 /* Handle variables inherited from containing functions. */
5892 context = decl_function_context (exp);
5893
5894 /* We treat inline_function_decl as an alias for the current function
5895 because that is the inline function whose vars, types, etc.
5896 are being merged into the current function.
5897 See expand_inline_function. */
5898
5899 if (context != 0 && context != current_function_decl
5900 && context != inline_function_decl
5901 /* If var is static, we don't need a static chain to access it. */
5902 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5903 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5904 {
5905 rtx addr;
5906
5907 /* Mark as non-local and addressable. */
5908 DECL_NONLOCAL (exp) = 1;
5909 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5910 abort ();
5911 mark_addressable (exp);
5912 if (GET_CODE (DECL_RTL (exp)) != MEM)
5913 abort ();
5914 addr = XEXP (DECL_RTL (exp), 0);
5915 if (GET_CODE (addr) == MEM)
5916 addr = gen_rtx_MEM (Pmode,
5917 fix_lexical_addr (XEXP (addr, 0), exp));
5918 else
5919 addr = fix_lexical_addr (addr, exp);
5920 temp = change_address (DECL_RTL (exp), mode, addr);
5921 }
5922
5923 /* This is the case of an array whose size is to be determined
5924 from its initializer, while the initializer is still being parsed.
5925 See expand_decl. */
5926
5927 else if (GET_CODE (DECL_RTL (exp)) == MEM
5928 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5929 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5930 XEXP (DECL_RTL (exp), 0));
5931
5932 /* If DECL_RTL is memory, we are in the normal case and either
5933 the address is not valid or it is not a register and -fforce-addr
5934 is specified, get the address into a register. */
5935
5936 else if (GET_CODE (DECL_RTL (exp)) == MEM
5937 && modifier != EXPAND_CONST_ADDRESS
5938 && modifier != EXPAND_SUM
5939 && modifier != EXPAND_INITIALIZER
5940 && (! memory_address_p (DECL_MODE (exp),
5941 XEXP (DECL_RTL (exp), 0))
5942 || (flag_force_addr
5943 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5944 temp = change_address (DECL_RTL (exp), VOIDmode,
5945 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5946
5947 /* If we got something, return it. But first, set the alignment
5948 the address is a register. */
5949 if (temp != 0)
5950 {
5951 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5952 mark_reg_pointer (XEXP (temp, 0),
5953 DECL_ALIGN (exp) / BITS_PER_UNIT);
5954
5955 return temp;
5956 }
5957
5958 /* If the mode of DECL_RTL does not match that of the decl, it
5959 must be a promoted value. We return a SUBREG of the wanted mode,
5960 but mark it so that we know that it was already extended. */
5961
5962 if (GET_CODE (DECL_RTL (exp)) == REG
5963 && GET_MODE (DECL_RTL (exp)) != mode)
5964 {
5965 /* Get the signedness used for this variable. Ensure we get the
5966 same mode we got when the variable was declared. */
5967 if (GET_MODE (DECL_RTL (exp))
5968 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5969 abort ();
5970
5971 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5972 SUBREG_PROMOTED_VAR_P (temp) = 1;
5973 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5974 return temp;
5975 }
5976
5977 return DECL_RTL (exp);
5978
5979 case INTEGER_CST:
5980 return immed_double_const (TREE_INT_CST_LOW (exp),
5981 TREE_INT_CST_HIGH (exp), mode);
5982
5983 case CONST_DECL:
5984 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5985 EXPAND_MEMORY_USE_BAD);
5986
5987 case REAL_CST:
5988 /* If optimized, generate immediate CONST_DOUBLE
5989 which will be turned into memory by reload if necessary.
5990
5991 We used to force a register so that loop.c could see it. But
5992 this does not allow gen_* patterns to perform optimizations with
5993 the constants. It also produces two insns in cases like "x = 1.0;".
5994 On most machines, floating-point constants are not permitted in
5995 many insns, so we'd end up copying it to a register in any case.
5996
5997 Now, we do the copying in expand_binop, if appropriate. */
5998 return immed_real_const (exp);
5999
6000 case COMPLEX_CST:
6001 case STRING_CST:
6002 if (! TREE_CST_RTL (exp))
6003 output_constant_def (exp);
6004
6005 /* TREE_CST_RTL probably contains a constant address.
6006 On RISC machines where a constant address isn't valid,
6007 make some insns to get that address into a register. */
6008 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6009 && modifier != EXPAND_CONST_ADDRESS
6010 && modifier != EXPAND_INITIALIZER
6011 && modifier != EXPAND_SUM
6012 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6013 || (flag_force_addr
6014 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6015 return change_address (TREE_CST_RTL (exp), VOIDmode,
6016 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6017 return TREE_CST_RTL (exp);
6018
6019 case EXPR_WITH_FILE_LOCATION:
6020 {
6021 rtx to_return;
6022 char *saved_input_filename = input_filename;
6023 int saved_lineno = lineno;
6024 input_filename = EXPR_WFL_FILENAME (exp);
6025 lineno = EXPR_WFL_LINENO (exp);
6026 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6027 emit_line_note (input_filename, lineno);
6028 /* Possibly avoid switching back and force here */
6029 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6030 input_filename = saved_input_filename;
6031 lineno = saved_lineno;
6032 return to_return;
6033 }
6034
6035 case SAVE_EXPR:
6036 context = decl_function_context (exp);
6037
6038 /* If this SAVE_EXPR was at global context, assume we are an
6039 initialization function and move it into our context. */
6040 if (context == 0)
6041 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6042
6043 /* We treat inline_function_decl as an alias for the current function
6044 because that is the inline function whose vars, types, etc.
6045 are being merged into the current function.
6046 See expand_inline_function. */
6047 if (context == current_function_decl || context == inline_function_decl)
6048 context = 0;
6049
6050 /* If this is non-local, handle it. */
6051 if (context)
6052 {
6053 /* The following call just exists to abort if the context is
6054 not of a containing function. */
6055 find_function_data (context);
6056
6057 temp = SAVE_EXPR_RTL (exp);
6058 if (temp && GET_CODE (temp) == REG)
6059 {
6060 put_var_into_stack (exp);
6061 temp = SAVE_EXPR_RTL (exp);
6062 }
6063 if (temp == 0 || GET_CODE (temp) != MEM)
6064 abort ();
6065 return change_address (temp, mode,
6066 fix_lexical_addr (XEXP (temp, 0), exp));
6067 }
6068 if (SAVE_EXPR_RTL (exp) == 0)
6069 {
6070 if (mode == VOIDmode)
6071 temp = const0_rtx;
6072 else
6073 temp = assign_temp (type, 3, 0, 0);
6074
6075 SAVE_EXPR_RTL (exp) = temp;
6076 if (!optimize && GET_CODE (temp) == REG)
6077 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6078 save_expr_regs);
6079
6080 /* If the mode of TEMP does not match that of the expression, it
6081 must be a promoted value. We pass store_expr a SUBREG of the
6082 wanted mode but mark it so that we know that it was already
6083 extended. Note that `unsignedp' was modified above in
6084 this case. */
6085
6086 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6087 {
6088 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6089 SUBREG_PROMOTED_VAR_P (temp) = 1;
6090 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6091 }
6092
6093 if (temp == const0_rtx)
6094 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6095 EXPAND_MEMORY_USE_BAD);
6096 else
6097 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6098
6099 TREE_USED (exp) = 1;
6100 }
6101
6102 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6103 must be a promoted value. We return a SUBREG of the wanted mode,
6104 but mark it so that we know that it was already extended. */
6105
6106 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6107 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6108 {
6109 /* Compute the signedness and make the proper SUBREG. */
6110 promote_mode (type, mode, &unsignedp, 0);
6111 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6112 SUBREG_PROMOTED_VAR_P (temp) = 1;
6113 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6114 return temp;
6115 }
6116
6117 return SAVE_EXPR_RTL (exp);
6118
6119 case UNSAVE_EXPR:
6120 {
6121 rtx temp;
6122 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6123 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6124 return temp;
6125 }
6126
6127 case PLACEHOLDER_EXPR:
6128 {
6129 tree placeholder_expr;
6130
6131 /* If there is an object on the head of the placeholder list,
6132 see if some object in it of type TYPE or a pointer to it. For
6133 further information, see tree.def. */
6134 for (placeholder_expr = placeholder_list;
6135 placeholder_expr != 0;
6136 placeholder_expr = TREE_CHAIN (placeholder_expr))
6137 {
6138 tree need_type = TYPE_MAIN_VARIANT (type);
6139 tree object = 0;
6140 tree old_list = placeholder_list;
6141 tree elt;
6142
6143 /* Find the outermost reference that is of the type we want.
6144 If none, see if any object has a type that is a pointer to
6145 the type we want. */
6146 for (elt = TREE_PURPOSE (placeholder_expr);
6147 elt != 0 && object == 0;
6148 elt
6149 = ((TREE_CODE (elt) == COMPOUND_EXPR
6150 || TREE_CODE (elt) == COND_EXPR)
6151 ? TREE_OPERAND (elt, 1)
6152 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6153 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6154 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6155 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6156 ? TREE_OPERAND (elt, 0) : 0))
6157 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6158 object = elt;
6159
6160 for (elt = TREE_PURPOSE (placeholder_expr);
6161 elt != 0 && object == 0;
6162 elt
6163 = ((TREE_CODE (elt) == COMPOUND_EXPR
6164 || TREE_CODE (elt) == COND_EXPR)
6165 ? TREE_OPERAND (elt, 1)
6166 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6167 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6168 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6169 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6170 ? TREE_OPERAND (elt, 0) : 0))
6171 if (POINTER_TYPE_P (TREE_TYPE (elt))
6172 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6173 == need_type))
6174 object = build1 (INDIRECT_REF, need_type, elt);
6175
6176 if (object != 0)
6177 {
6178 /* Expand this object skipping the list entries before
6179 it was found in case it is also a PLACEHOLDER_EXPR.
6180 In that case, we want to translate it using subsequent
6181 entries. */
6182 placeholder_list = TREE_CHAIN (placeholder_expr);
6183 temp = expand_expr (object, original_target, tmode,
6184 ro_modifier);
6185 placeholder_list = old_list;
6186 return temp;
6187 }
6188 }
6189 }
6190
6191 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6192 abort ();
6193
6194 case WITH_RECORD_EXPR:
6195 /* Put the object on the placeholder list, expand our first operand,
6196 and pop the list. */
6197 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6198 placeholder_list);
6199 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6200 tmode, ro_modifier);
6201 placeholder_list = TREE_CHAIN (placeholder_list);
6202 return target;
6203
6204 case GOTO_EXPR:
6205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6206 expand_goto (TREE_OPERAND (exp, 0));
6207 else
6208 expand_computed_goto (TREE_OPERAND (exp, 0));
6209 return const0_rtx;
6210
6211 case EXIT_EXPR:
6212 expand_exit_loop_if_false (NULL_PTR,
6213 invert_truthvalue (TREE_OPERAND (exp, 0)));
6214 return const0_rtx;
6215
6216 case LABELED_BLOCK_EXPR:
6217 if (LABELED_BLOCK_BODY (exp))
6218 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6219 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6220 return const0_rtx;
6221
6222 case EXIT_BLOCK_EXPR:
6223 if (EXIT_BLOCK_RETURN (exp))
6224 sorry ("returned value in block_exit_expr");
6225 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6226 return const0_rtx;
6227
6228 case LOOP_EXPR:
6229 push_temp_slots ();
6230 expand_start_loop (1);
6231 expand_expr_stmt (TREE_OPERAND (exp, 0));
6232 expand_end_loop ();
6233 pop_temp_slots ();
6234
6235 return const0_rtx;
6236
6237 case BIND_EXPR:
6238 {
6239 tree vars = TREE_OPERAND (exp, 0);
6240 int vars_need_expansion = 0;
6241
6242 /* Need to open a binding contour here because
6243 if there are any cleanups they must be contained here. */
6244 expand_start_bindings (2);
6245
6246 /* Mark the corresponding BLOCK for output in its proper place. */
6247 if (TREE_OPERAND (exp, 2) != 0
6248 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6249 insert_block (TREE_OPERAND (exp, 2));
6250
6251 /* If VARS have not yet been expanded, expand them now. */
6252 while (vars)
6253 {
6254 if (DECL_RTL (vars) == 0)
6255 {
6256 vars_need_expansion = 1;
6257 expand_decl (vars);
6258 }
6259 expand_decl_init (vars);
6260 vars = TREE_CHAIN (vars);
6261 }
6262
6263 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6264
6265 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6266
6267 return temp;
6268 }
6269
6270 case RTL_EXPR:
6271 if (RTL_EXPR_SEQUENCE (exp))
6272 {
6273 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6274 abort ();
6275 emit_insns (RTL_EXPR_SEQUENCE (exp));
6276 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6277 }
6278 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6279 free_temps_for_rtl_expr (exp);
6280 return RTL_EXPR_RTL (exp);
6281
6282 case CONSTRUCTOR:
6283 /* If we don't need the result, just ensure we evaluate any
6284 subexpressions. */
6285 if (ignore)
6286 {
6287 tree elt;
6288 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6289 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6290 EXPAND_MEMORY_USE_BAD);
6291 return const0_rtx;
6292 }
6293
6294 /* All elts simple constants => refer to a constant in memory. But
6295 if this is a non-BLKmode mode, let it store a field at a time
6296 since that should make a CONST_INT or CONST_DOUBLE when we
6297 fold. Likewise, if we have a target we can use, it is best to
6298 store directly into the target unless the type is large enough
6299 that memcpy will be used. If we are making an initializer and
6300 all operands are constant, put it in memory as well. */
6301 else if ((TREE_STATIC (exp)
6302 && ((mode == BLKmode
6303 && ! (target != 0 && safe_from_p (target, exp, 1)))
6304 || TREE_ADDRESSABLE (exp)
6305 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6306 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6307 && (! MOVE_BY_PIECES_P
6308 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
6309 TYPE_ALIGN (type) / BITS_PER_UNIT))
6310 && ! mostly_zeros_p (exp))))
6311 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6312 {
6313 rtx constructor = output_constant_def (exp);
6314 if (modifier != EXPAND_CONST_ADDRESS
6315 && modifier != EXPAND_INITIALIZER
6316 && modifier != EXPAND_SUM
6317 && (! memory_address_p (GET_MODE (constructor),
6318 XEXP (constructor, 0))
6319 || (flag_force_addr
6320 && GET_CODE (XEXP (constructor, 0)) != REG)))
6321 constructor = change_address (constructor, VOIDmode,
6322 XEXP (constructor, 0));
6323 return constructor;
6324 }
6325
6326 else
6327 {
6328 /* Handle calls that pass values in multiple non-contiguous
6329 locations. The Irix 6 ABI has examples of this. */
6330 if (target == 0 || ! safe_from_p (target, exp, 1)
6331 || GET_CODE (target) == PARALLEL)
6332 {
6333 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6334 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6335 else
6336 target = assign_temp (type, 0, 1, 1);
6337 }
6338
6339 if (TREE_READONLY (exp))
6340 {
6341 if (GET_CODE (target) == MEM)
6342 target = copy_rtx (target);
6343
6344 RTX_UNCHANGING_P (target) = 1;
6345 }
6346
6347 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6348 int_size_in_bytes (TREE_TYPE (exp)));
6349 return target;
6350 }
6351
6352 case INDIRECT_REF:
6353 {
6354 tree exp1 = TREE_OPERAND (exp, 0);
6355 tree exp2;
6356 tree index;
6357 tree string = string_constant (exp1, &index);
6358
6359 /* Try to optimize reads from const strings. */
6360 if (string
6361 && TREE_CODE (string) == STRING_CST
6362 && TREE_CODE (index) == INTEGER_CST
6363 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6364 && GET_MODE_CLASS (mode) == MODE_INT
6365 && GET_MODE_SIZE (mode) == 1
6366 && modifier != EXPAND_MEMORY_USE_WO)
6367 return
6368 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6369
6370 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6371 op0 = memory_address (mode, op0);
6372
6373 if (cfun && current_function_check_memory_usage
6374 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6375 {
6376 enum memory_use_mode memory_usage;
6377 memory_usage = get_memory_usage_from_modifier (modifier);
6378
6379 if (memory_usage != MEMORY_USE_DONT)
6380 {
6381 in_check_memory_usage = 1;
6382 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6383 op0, Pmode,
6384 GEN_INT (int_size_in_bytes (type)),
6385 TYPE_MODE (sizetype),
6386 GEN_INT (memory_usage),
6387 TYPE_MODE (integer_type_node));
6388 in_check_memory_usage = 0;
6389 }
6390 }
6391
6392 temp = gen_rtx_MEM (mode, op0);
6393 /* If address was computed by addition,
6394 mark this as an element of an aggregate. */
6395 if (TREE_CODE (exp1) == PLUS_EXPR
6396 || (TREE_CODE (exp1) == SAVE_EXPR
6397 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6398 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6399 || (TREE_CODE (exp1) == ADDR_EXPR
6400 && (exp2 = TREE_OPERAND (exp1, 0))
6401 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6402 MEM_SET_IN_STRUCT_P (temp, 1);
6403
6404 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6405 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6406
6407 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6408 here, because, in C and C++, the fact that a location is accessed
6409 through a pointer to const does not mean that the value there can
6410 never change. Languages where it can never change should
6411 also set TREE_STATIC. */
6412 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6413
6414 /* If we are writing to this object and its type is a record with
6415 readonly fields, we must mark it as readonly so it will
6416 conflict with readonly references to those fields. */
6417 if (modifier == EXPAND_MEMORY_USE_WO
6418 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6419 RTX_UNCHANGING_P (temp) = 1;
6420
6421 return temp;
6422 }
6423
6424 case ARRAY_REF:
6425 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6426 abort ();
6427
6428 {
6429 tree array = TREE_OPERAND (exp, 0);
6430 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6431 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6432 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6433 HOST_WIDE_INT i;
6434
6435 /* Optimize the special-case of a zero lower bound.
6436
6437 We convert the low_bound to sizetype to avoid some problems
6438 with constant folding. (E.g. suppose the lower bound is 1,
6439 and its mode is QI. Without the conversion, (ARRAY
6440 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6441 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6442
6443 if (! integer_zerop (low_bound))
6444 index = size_diffop (index, convert (sizetype, low_bound));
6445
6446 /* Fold an expression like: "foo"[2].
6447 This is not done in fold so it won't happen inside &.
6448 Don't fold if this is for wide characters since it's too
6449 difficult to do correctly and this is a very rare case. */
6450
6451 if (TREE_CODE (array) == STRING_CST
6452 && TREE_CODE (index) == INTEGER_CST
6453 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6454 && GET_MODE_CLASS (mode) == MODE_INT
6455 && GET_MODE_SIZE (mode) == 1)
6456 return
6457 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6458
6459 /* If this is a constant index into a constant array,
6460 just get the value from the array. Handle both the cases when
6461 we have an explicit constructor and when our operand is a variable
6462 that was declared const. */
6463
6464 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6465 && TREE_CODE (index) == INTEGER_CST
6466 && 0 > compare_tree_int (index,
6467 list_length (CONSTRUCTOR_ELTS
6468 (TREE_OPERAND (exp, 0)))))
6469 {
6470 tree elem;
6471
6472 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6473 i = TREE_INT_CST_LOW (index);
6474 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6475 ;
6476
6477 if (elem)
6478 return expand_expr (fold (TREE_VALUE (elem)), target,
6479 tmode, ro_modifier);
6480 }
6481
6482 else if (optimize >= 1
6483 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6484 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6485 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6486 {
6487 if (TREE_CODE (index) == INTEGER_CST)
6488 {
6489 tree init = DECL_INITIAL (array);
6490
6491 if (TREE_CODE (init) == CONSTRUCTOR)
6492 {
6493 tree elem;
6494
6495 for (elem = CONSTRUCTOR_ELTS (init);
6496 (elem
6497 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6498 elem = TREE_CHAIN (elem))
6499 ;
6500
6501 if (elem)
6502 return expand_expr (fold (TREE_VALUE (elem)), target,
6503 tmode, ro_modifier);
6504 }
6505 else if (TREE_CODE (init) == STRING_CST
6506 && 0 > compare_tree_int (index,
6507 TREE_STRING_LENGTH (init)))
6508 return (GEN_INT
6509 (TREE_STRING_POINTER
6510 (init)[TREE_INT_CST_LOW (index)]));
6511 }
6512 }
6513 }
6514
6515 /* ... fall through ... */
6516
6517 case COMPONENT_REF:
6518 case BIT_FIELD_REF:
6519 /* If the operand is a CONSTRUCTOR, we can just extract the
6520 appropriate field if it is present. Don't do this if we have
6521 already written the data since we want to refer to that copy
6522 and varasm.c assumes that's what we'll do. */
6523 if (code != ARRAY_REF
6524 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6525 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6526 {
6527 tree elt;
6528
6529 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6530 elt = TREE_CHAIN (elt))
6531 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6532 /* We can normally use the value of the field in the
6533 CONSTRUCTOR. However, if this is a bitfield in
6534 an integral mode that we can fit in a HOST_WIDE_INT,
6535 we must mask only the number of bits in the bitfield,
6536 since this is done implicitly by the constructor. If
6537 the bitfield does not meet either of those conditions,
6538 we can't do this optimization. */
6539 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6540 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6541 == MODE_INT)
6542 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6543 <= HOST_BITS_PER_WIDE_INT))))
6544 {
6545 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6546 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6547 {
6548 HOST_WIDE_INT bitsize
6549 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6550
6551 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6552 {
6553 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6554 op0 = expand_and (op0, op1, target);
6555 }
6556 else
6557 {
6558 enum machine_mode imode
6559 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6560 tree count
6561 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6562 0);
6563
6564 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6565 target, 0);
6566 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6567 target, 0);
6568 }
6569 }
6570
6571 return op0;
6572 }
6573 }
6574
6575 {
6576 enum machine_mode mode1;
6577 HOST_WIDE_INT bitsize, bitpos;
6578 tree offset;
6579 int volatilep = 0;
6580 unsigned int alignment;
6581 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6582 &mode1, &unsignedp, &volatilep,
6583 &alignment);
6584
6585 /* If we got back the original object, something is wrong. Perhaps
6586 we are evaluating an expression too early. In any event, don't
6587 infinitely recurse. */
6588 if (tem == exp)
6589 abort ();
6590
6591 /* If TEM's type is a union of variable size, pass TARGET to the inner
6592 computation, since it will need a temporary and TARGET is known
6593 to have to do. This occurs in unchecked conversion in Ada. */
6594
6595 op0 = expand_expr (tem,
6596 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6597 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6598 != INTEGER_CST)
6599 ? target : NULL_RTX),
6600 VOIDmode,
6601 (modifier == EXPAND_INITIALIZER
6602 || modifier == EXPAND_CONST_ADDRESS)
6603 ? modifier : EXPAND_NORMAL);
6604
6605 /* If this is a constant, put it into a register if it is a
6606 legitimate constant and OFFSET is 0 and memory if it isn't. */
6607 if (CONSTANT_P (op0))
6608 {
6609 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6610 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6611 && offset == 0)
6612 op0 = force_reg (mode, op0);
6613 else
6614 op0 = validize_mem (force_const_mem (mode, op0));
6615 }
6616
6617 if (offset != 0)
6618 {
6619 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6620
6621 /* If this object is in memory, put it into a register.
6622 This case can't occur in C, but can in Ada if we have
6623 unchecked conversion of an expression from a scalar type to
6624 an array or record type. */
6625 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6626 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6627 {
6628 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6629
6630 mark_temp_addr_taken (memloc);
6631 emit_move_insn (memloc, op0);
6632 op0 = memloc;
6633 }
6634
6635 if (GET_CODE (op0) != MEM)
6636 abort ();
6637
6638 if (GET_MODE (offset_rtx) != ptr_mode)
6639 {
6640 #ifdef POINTERS_EXTEND_UNSIGNED
6641 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6642 #else
6643 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6644 #endif
6645 }
6646
6647 /* A constant address in OP0 can have VOIDmode, we must not try
6648 to call force_reg for that case. Avoid that case. */
6649 if (GET_CODE (op0) == MEM
6650 && GET_MODE (op0) == BLKmode
6651 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6652 && bitsize != 0
6653 && (bitpos % bitsize) == 0
6654 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6655 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6656 {
6657 rtx temp = change_address (op0, mode1,
6658 plus_constant (XEXP (op0, 0),
6659 (bitpos /
6660 BITS_PER_UNIT)));
6661 if (GET_CODE (XEXP (temp, 0)) == REG)
6662 op0 = temp;
6663 else
6664 op0 = change_address (op0, mode1,
6665 force_reg (GET_MODE (XEXP (temp, 0)),
6666 XEXP (temp, 0)));
6667 bitpos = 0;
6668 }
6669
6670
6671 op0 = change_address (op0, VOIDmode,
6672 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6673 force_reg (ptr_mode,
6674 offset_rtx)));
6675 }
6676
6677 /* Don't forget about volatility even if this is a bitfield. */
6678 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6679 {
6680 op0 = copy_rtx (op0);
6681 MEM_VOLATILE_P (op0) = 1;
6682 }
6683
6684 /* Check the access. */
6685 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6686 {
6687 enum memory_use_mode memory_usage;
6688 memory_usage = get_memory_usage_from_modifier (modifier);
6689
6690 if (memory_usage != MEMORY_USE_DONT)
6691 {
6692 rtx to;
6693 int size;
6694
6695 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6696 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6697
6698 /* Check the access right of the pointer. */
6699 if (size > BITS_PER_UNIT)
6700 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6701 to, Pmode,
6702 GEN_INT (size / BITS_PER_UNIT),
6703 TYPE_MODE (sizetype),
6704 GEN_INT (memory_usage),
6705 TYPE_MODE (integer_type_node));
6706 }
6707 }
6708
6709 /* In cases where an aligned union has an unaligned object
6710 as a field, we might be extracting a BLKmode value from
6711 an integer-mode (e.g., SImode) object. Handle this case
6712 by doing the extract into an object as wide as the field
6713 (which we know to be the width of a basic mode), then
6714 storing into memory, and changing the mode to BLKmode.
6715 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6716 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6717 if (mode1 == VOIDmode
6718 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6719 || (modifier != EXPAND_CONST_ADDRESS
6720 && modifier != EXPAND_INITIALIZER
6721 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6722 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6723 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6724 /* If the field isn't aligned enough to fetch as a memref,
6725 fetch it as a bit field. */
6726 || (mode1 != BLKmode
6727 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6728 && ((TYPE_ALIGN (TREE_TYPE (tem))
6729 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6730 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6731 /* If the type and the field are a constant size and the
6732 size of the type isn't the same size as the bitfield,
6733 we must use bitfield operations. */
6734 || ((bitsize >= 0
6735 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6736 == INTEGER_CST)
6737 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6738 bitsize)))))
6739 || (modifier != EXPAND_CONST_ADDRESS
6740 && modifier != EXPAND_INITIALIZER
6741 && mode == BLKmode
6742 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6743 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6744 || bitpos % TYPE_ALIGN (type) != 0)))
6745 {
6746 enum machine_mode ext_mode = mode;
6747
6748 if (ext_mode == BLKmode
6749 && ! (target != 0 && GET_CODE (op0) == MEM
6750 && GET_CODE (target) == MEM
6751 && bitpos % BITS_PER_UNIT == 0))
6752 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6753
6754 if (ext_mode == BLKmode)
6755 {
6756 /* In this case, BITPOS must start at a byte boundary and
6757 TARGET, if specified, must be a MEM. */
6758 if (GET_CODE (op0) != MEM
6759 || (target != 0 && GET_CODE (target) != MEM)
6760 || bitpos % BITS_PER_UNIT != 0)
6761 abort ();
6762
6763 op0 = change_address (op0, VOIDmode,
6764 plus_constant (XEXP (op0, 0),
6765 bitpos / BITS_PER_UNIT));
6766 if (target == 0)
6767 target = assign_temp (type, 0, 1, 1);
6768
6769 emit_block_move (target, op0,
6770 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6771 / BITS_PER_UNIT),
6772 1);
6773
6774 return target;
6775 }
6776
6777 op0 = validize_mem (op0);
6778
6779 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6780 mark_reg_pointer (XEXP (op0, 0), alignment);
6781
6782 op0 = extract_bit_field (op0, bitsize, bitpos,
6783 unsignedp, target, ext_mode, ext_mode,
6784 alignment,
6785 int_size_in_bytes (TREE_TYPE (tem)));
6786
6787 /* If the result is a record type and BITSIZE is narrower than
6788 the mode of OP0, an integral mode, and this is a big endian
6789 machine, we must put the field into the high-order bits. */
6790 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6791 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6792 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6793 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6794 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6795 - bitsize),
6796 op0, 1);
6797
6798 if (mode == BLKmode)
6799 {
6800 rtx new = assign_stack_temp (ext_mode,
6801 bitsize / BITS_PER_UNIT, 0);
6802
6803 emit_move_insn (new, op0);
6804 op0 = copy_rtx (new);
6805 PUT_MODE (op0, BLKmode);
6806 MEM_SET_IN_STRUCT_P (op0, 1);
6807 }
6808
6809 return op0;
6810 }
6811
6812 /* If the result is BLKmode, use that to access the object
6813 now as well. */
6814 if (mode == BLKmode)
6815 mode1 = BLKmode;
6816
6817 /* Get a reference to just this component. */
6818 if (modifier == EXPAND_CONST_ADDRESS
6819 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6820 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6821 (bitpos / BITS_PER_UNIT)));
6822 else
6823 op0 = change_address (op0, mode1,
6824 plus_constant (XEXP (op0, 0),
6825 (bitpos / BITS_PER_UNIT)));
6826
6827 if (GET_CODE (op0) == MEM)
6828 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6829
6830 if (GET_CODE (XEXP (op0, 0)) == REG)
6831 mark_reg_pointer (XEXP (op0, 0), alignment);
6832
6833 MEM_SET_IN_STRUCT_P (op0, 1);
6834 MEM_VOLATILE_P (op0) |= volatilep;
6835 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6836 || modifier == EXPAND_CONST_ADDRESS
6837 || modifier == EXPAND_INITIALIZER)
6838 return op0;
6839 else if (target == 0)
6840 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6841
6842 convert_move (target, op0, unsignedp);
6843 return target;
6844 }
6845
6846 /* Intended for a reference to a buffer of a file-object in Pascal.
6847 But it's not certain that a special tree code will really be
6848 necessary for these. INDIRECT_REF might work for them. */
6849 case BUFFER_REF:
6850 abort ();
6851
6852 case IN_EXPR:
6853 {
6854 /* Pascal set IN expression.
6855
6856 Algorithm:
6857 rlo = set_low - (set_low%bits_per_word);
6858 the_word = set [ (index - rlo)/bits_per_word ];
6859 bit_index = index % bits_per_word;
6860 bitmask = 1 << bit_index;
6861 return !!(the_word & bitmask); */
6862
6863 tree set = TREE_OPERAND (exp, 0);
6864 tree index = TREE_OPERAND (exp, 1);
6865 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6866 tree set_type = TREE_TYPE (set);
6867 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6868 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6869 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6870 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6871 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6872 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6873 rtx setaddr = XEXP (setval, 0);
6874 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6875 rtx rlow;
6876 rtx diff, quo, rem, addr, bit, result;
6877
6878 preexpand_calls (exp);
6879
6880 /* If domain is empty, answer is no. Likewise if index is constant
6881 and out of bounds. */
6882 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6883 && TREE_CODE (set_low_bound) == INTEGER_CST
6884 && tree_int_cst_lt (set_high_bound, set_low_bound))
6885 || (TREE_CODE (index) == INTEGER_CST
6886 && TREE_CODE (set_low_bound) == INTEGER_CST
6887 && tree_int_cst_lt (index, set_low_bound))
6888 || (TREE_CODE (set_high_bound) == INTEGER_CST
6889 && TREE_CODE (index) == INTEGER_CST
6890 && tree_int_cst_lt (set_high_bound, index))))
6891 return const0_rtx;
6892
6893 if (target == 0)
6894 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6895
6896 /* If we get here, we have to generate the code for both cases
6897 (in range and out of range). */
6898
6899 op0 = gen_label_rtx ();
6900 op1 = gen_label_rtx ();
6901
6902 if (! (GET_CODE (index_val) == CONST_INT
6903 && GET_CODE (lo_r) == CONST_INT))
6904 {
6905 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6906 GET_MODE (index_val), iunsignedp, 0, op1);
6907 }
6908
6909 if (! (GET_CODE (index_val) == CONST_INT
6910 && GET_CODE (hi_r) == CONST_INT))
6911 {
6912 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6913 GET_MODE (index_val), iunsignedp, 0, op1);
6914 }
6915
6916 /* Calculate the element number of bit zero in the first word
6917 of the set. */
6918 if (GET_CODE (lo_r) == CONST_INT)
6919 rlow = GEN_INT (INTVAL (lo_r)
6920 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6921 else
6922 rlow = expand_binop (index_mode, and_optab, lo_r,
6923 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6924 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6925
6926 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6927 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6928
6929 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6930 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6931 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6932 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6933
6934 addr = memory_address (byte_mode,
6935 expand_binop (index_mode, add_optab, diff,
6936 setaddr, NULL_RTX, iunsignedp,
6937 OPTAB_LIB_WIDEN));
6938
6939 /* Extract the bit we want to examine */
6940 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6941 gen_rtx_MEM (byte_mode, addr),
6942 make_tree (TREE_TYPE (index), rem),
6943 NULL_RTX, 1);
6944 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6945 GET_MODE (target) == byte_mode ? target : 0,
6946 1, OPTAB_LIB_WIDEN);
6947
6948 if (result != target)
6949 convert_move (target, result, 1);
6950
6951 /* Output the code to handle the out-of-range case. */
6952 emit_jump (op0);
6953 emit_label (op1);
6954 emit_move_insn (target, const0_rtx);
6955 emit_label (op0);
6956 return target;
6957 }
6958
6959 case WITH_CLEANUP_EXPR:
6960 if (RTL_EXPR_RTL (exp) == 0)
6961 {
6962 RTL_EXPR_RTL (exp)
6963 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6964 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6965
6966 /* That's it for this cleanup. */
6967 TREE_OPERAND (exp, 2) = 0;
6968 }
6969 return RTL_EXPR_RTL (exp);
6970
6971 case CLEANUP_POINT_EXPR:
6972 {
6973 /* Start a new binding layer that will keep track of all cleanup
6974 actions to be performed. */
6975 expand_start_bindings (2);
6976
6977 target_temp_slot_level = temp_slot_level;
6978
6979 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6980 /* If we're going to use this value, load it up now. */
6981 if (! ignore)
6982 op0 = force_not_mem (op0);
6983 preserve_temp_slots (op0);
6984 expand_end_bindings (NULL_TREE, 0, 0);
6985 }
6986 return op0;
6987
6988 case CALL_EXPR:
6989 /* Check for a built-in function. */
6990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6991 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6992 == FUNCTION_DECL)
6993 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6994 return expand_builtin (exp, target, subtarget, tmode, ignore);
6995
6996 /* If this call was expanded already by preexpand_calls,
6997 just return the result we got. */
6998 if (CALL_EXPR_RTL (exp) != 0)
6999 return CALL_EXPR_RTL (exp);
7000
7001 return expand_call (exp, target, ignore);
7002
7003 case NON_LVALUE_EXPR:
7004 case NOP_EXPR:
7005 case CONVERT_EXPR:
7006 case REFERENCE_EXPR:
7007 if (TREE_CODE (type) == UNION_TYPE)
7008 {
7009 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7010
7011 /* If both input and output are BLKmode, this conversion
7012 isn't actually doing anything unless we need to make the
7013 alignment stricter. */
7014 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7015 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7016 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7017 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7018 modifier);
7019
7020 if (target == 0)
7021 {
7022 if (mode != BLKmode)
7023 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7024 else
7025 target = assign_temp (type, 0, 1, 1);
7026 }
7027
7028 if (GET_CODE (target) == MEM)
7029 /* Store data into beginning of memory target. */
7030 store_expr (TREE_OPERAND (exp, 0),
7031 change_address (target, TYPE_MODE (valtype), 0), 0);
7032
7033 else if (GET_CODE (target) == REG)
7034 /* Store this field into a union of the proper type. */
7035 store_field (target,
7036 MIN ((int_size_in_bytes (TREE_TYPE
7037 (TREE_OPERAND (exp, 0)))
7038 * BITS_PER_UNIT),
7039 GET_MODE_BITSIZE (mode)),
7040 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7041 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7042 else
7043 abort ();
7044
7045 /* Return the entire union. */
7046 return target;
7047 }
7048
7049 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7050 {
7051 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7052 ro_modifier);
7053
7054 /* If the signedness of the conversion differs and OP0 is
7055 a promoted SUBREG, clear that indication since we now
7056 have to do the proper extension. */
7057 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7058 && GET_CODE (op0) == SUBREG)
7059 SUBREG_PROMOTED_VAR_P (op0) = 0;
7060
7061 return op0;
7062 }
7063
7064 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7065 if (GET_MODE (op0) == mode)
7066 return op0;
7067
7068 /* If OP0 is a constant, just convert it into the proper mode. */
7069 if (CONSTANT_P (op0))
7070 return
7071 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7072 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7073
7074 if (modifier == EXPAND_INITIALIZER)
7075 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7076
7077 if (target == 0)
7078 return
7079 convert_to_mode (mode, op0,
7080 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7081 else
7082 convert_move (target, op0,
7083 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7084 return target;
7085
7086 case PLUS_EXPR:
7087 /* We come here from MINUS_EXPR when the second operand is a
7088 constant. */
7089 plus_expr:
7090 this_optab = add_optab;
7091
7092 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7093 something else, make sure we add the register to the constant and
7094 then to the other thing. This case can occur during strength
7095 reduction and doing it this way will produce better code if the
7096 frame pointer or argument pointer is eliminated.
7097
7098 fold-const.c will ensure that the constant is always in the inner
7099 PLUS_EXPR, so the only case we need to do anything about is if
7100 sp, ap, or fp is our second argument, in which case we must swap
7101 the innermost first argument and our second argument. */
7102
7103 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7104 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7105 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7106 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7107 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7108 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7109 {
7110 tree t = TREE_OPERAND (exp, 1);
7111
7112 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7113 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7114 }
7115
7116 /* If the result is to be ptr_mode and we are adding an integer to
7117 something, we might be forming a constant. So try to use
7118 plus_constant. If it produces a sum and we can't accept it,
7119 use force_operand. This allows P = &ARR[const] to generate
7120 efficient code on machines where a SYMBOL_REF is not a valid
7121 address.
7122
7123 If this is an EXPAND_SUM call, always return the sum. */
7124 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7125 || mode == ptr_mode)
7126 {
7127 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7128 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7129 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7130 {
7131 rtx constant_part;
7132
7133 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7134 EXPAND_SUM);
7135 /* Use immed_double_const to ensure that the constant is
7136 truncated according to the mode of OP1, then sign extended
7137 to a HOST_WIDE_INT. Using the constant directly can result
7138 in non-canonical RTL in a 64x32 cross compile. */
7139 constant_part
7140 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7141 (HOST_WIDE_INT) 0,
7142 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7143 op1 = plus_constant (op1, INTVAL (constant_part));
7144 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7145 op1 = force_operand (op1, target);
7146 return op1;
7147 }
7148
7149 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7150 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7151 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7152 {
7153 rtx constant_part;
7154
7155 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7156 EXPAND_SUM);
7157 if (! CONSTANT_P (op0))
7158 {
7159 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7160 VOIDmode, modifier);
7161 /* Don't go to both_summands if modifier
7162 says it's not right to return a PLUS. */
7163 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7164 goto binop2;
7165 goto both_summands;
7166 }
7167 /* Use immed_double_const to ensure that the constant is
7168 truncated according to the mode of OP1, then sign extended
7169 to a HOST_WIDE_INT. Using the constant directly can result
7170 in non-canonical RTL in a 64x32 cross compile. */
7171 constant_part
7172 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7173 (HOST_WIDE_INT) 0,
7174 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7175 op0 = plus_constant (op0, INTVAL (constant_part));
7176 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7177 op0 = force_operand (op0, target);
7178 return op0;
7179 }
7180 }
7181
7182 /* No sense saving up arithmetic to be done
7183 if it's all in the wrong mode to form part of an address.
7184 And force_operand won't know whether to sign-extend or
7185 zero-extend. */
7186 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7187 || mode != ptr_mode)
7188 goto binop;
7189
7190 preexpand_calls (exp);
7191 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7192 subtarget = 0;
7193
7194 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7195 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7196
7197 both_summands:
7198 /* Make sure any term that's a sum with a constant comes last. */
7199 if (GET_CODE (op0) == PLUS
7200 && CONSTANT_P (XEXP (op0, 1)))
7201 {
7202 temp = op0;
7203 op0 = op1;
7204 op1 = temp;
7205 }
7206 /* If adding to a sum including a constant,
7207 associate it to put the constant outside. */
7208 if (GET_CODE (op1) == PLUS
7209 && CONSTANT_P (XEXP (op1, 1)))
7210 {
7211 rtx constant_term = const0_rtx;
7212
7213 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7214 if (temp != 0)
7215 op0 = temp;
7216 /* Ensure that MULT comes first if there is one. */
7217 else if (GET_CODE (op0) == MULT)
7218 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7219 else
7220 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7221
7222 /* Let's also eliminate constants from op0 if possible. */
7223 op0 = eliminate_constant_term (op0, &constant_term);
7224
7225 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7226 their sum should be a constant. Form it into OP1, since the
7227 result we want will then be OP0 + OP1. */
7228
7229 temp = simplify_binary_operation (PLUS, mode, constant_term,
7230 XEXP (op1, 1));
7231 if (temp != 0)
7232 op1 = temp;
7233 else
7234 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7235 }
7236
7237 /* Put a constant term last and put a multiplication first. */
7238 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7239 temp = op1, op1 = op0, op0 = temp;
7240
7241 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7242 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7243
7244 case MINUS_EXPR:
7245 /* For initializers, we are allowed to return a MINUS of two
7246 symbolic constants. Here we handle all cases when both operands
7247 are constant. */
7248 /* Handle difference of two symbolic constants,
7249 for the sake of an initializer. */
7250 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7251 && really_constant_p (TREE_OPERAND (exp, 0))
7252 && really_constant_p (TREE_OPERAND (exp, 1)))
7253 {
7254 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7255 VOIDmode, ro_modifier);
7256 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7257 VOIDmode, ro_modifier);
7258
7259 /* If the last operand is a CONST_INT, use plus_constant of
7260 the negated constant. Else make the MINUS. */
7261 if (GET_CODE (op1) == CONST_INT)
7262 return plus_constant (op0, - INTVAL (op1));
7263 else
7264 return gen_rtx_MINUS (mode, op0, op1);
7265 }
7266 /* Convert A - const to A + (-const). */
7267 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7268 {
7269 tree negated = fold (build1 (NEGATE_EXPR, type,
7270 TREE_OPERAND (exp, 1)));
7271
7272 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7273 /* If we can't negate the constant in TYPE, leave it alone and
7274 expand_binop will negate it for us. We used to try to do it
7275 here in the signed version of TYPE, but that doesn't work
7276 on POINTER_TYPEs. */;
7277 else
7278 {
7279 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7280 goto plus_expr;
7281 }
7282 }
7283 this_optab = sub_optab;
7284 goto binop;
7285
7286 case MULT_EXPR:
7287 preexpand_calls (exp);
7288 /* If first operand is constant, swap them.
7289 Thus the following special case checks need only
7290 check the second operand. */
7291 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7292 {
7293 register tree t1 = TREE_OPERAND (exp, 0);
7294 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7295 TREE_OPERAND (exp, 1) = t1;
7296 }
7297
7298 /* Attempt to return something suitable for generating an
7299 indexed address, for machines that support that. */
7300
7301 if (modifier == EXPAND_SUM && mode == ptr_mode
7302 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7303 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7304 {
7305 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7306 EXPAND_SUM);
7307
7308 /* Apply distributive law if OP0 is x+c. */
7309 if (GET_CODE (op0) == PLUS
7310 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7311 return
7312 gen_rtx_PLUS
7313 (mode,
7314 gen_rtx_MULT
7315 (mode, XEXP (op0, 0),
7316 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7317 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7318 * INTVAL (XEXP (op0, 1))));
7319
7320 if (GET_CODE (op0) != REG)
7321 op0 = force_operand (op0, NULL_RTX);
7322 if (GET_CODE (op0) != REG)
7323 op0 = copy_to_mode_reg (mode, op0);
7324
7325 return
7326 gen_rtx_MULT (mode, op0,
7327 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7328 }
7329
7330 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7331 subtarget = 0;
7332
7333 /* Check for multiplying things that have been extended
7334 from a narrower type. If this machine supports multiplying
7335 in that narrower type with a result in the desired type,
7336 do it that way, and avoid the explicit type-conversion. */
7337 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7338 && TREE_CODE (type) == INTEGER_TYPE
7339 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7340 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7341 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7342 && int_fits_type_p (TREE_OPERAND (exp, 1),
7343 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7344 /* Don't use a widening multiply if a shift will do. */
7345 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7346 > HOST_BITS_PER_WIDE_INT)
7347 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7348 ||
7349 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7350 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7351 ==
7352 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7353 /* If both operands are extended, they must either both
7354 be zero-extended or both be sign-extended. */
7355 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7356 ==
7357 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7358 {
7359 enum machine_mode innermode
7360 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7361 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 ? smul_widen_optab : umul_widen_optab);
7363 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7364 ? umul_widen_optab : smul_widen_optab);
7365 if (mode == GET_MODE_WIDER_MODE (innermode))
7366 {
7367 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7368 {
7369 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7370 NULL_RTX, VOIDmode, 0);
7371 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7372 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7373 VOIDmode, 0);
7374 else
7375 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7376 NULL_RTX, VOIDmode, 0);
7377 goto binop2;
7378 }
7379 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7380 && innermode == word_mode)
7381 {
7382 rtx htem;
7383 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7384 NULL_RTX, VOIDmode, 0);
7385 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7386 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7387 VOIDmode, 0);
7388 else
7389 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7390 NULL_RTX, VOIDmode, 0);
7391 temp = expand_binop (mode, other_optab, op0, op1, target,
7392 unsignedp, OPTAB_LIB_WIDEN);
7393 htem = expand_mult_highpart_adjust (innermode,
7394 gen_highpart (innermode, temp),
7395 op0, op1,
7396 gen_highpart (innermode, temp),
7397 unsignedp);
7398 emit_move_insn (gen_highpart (innermode, temp), htem);
7399 return temp;
7400 }
7401 }
7402 }
7403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7405 return expand_mult (mode, op0, op1, target, unsignedp);
7406
7407 case TRUNC_DIV_EXPR:
7408 case FLOOR_DIV_EXPR:
7409 case CEIL_DIV_EXPR:
7410 case ROUND_DIV_EXPR:
7411 case EXACT_DIV_EXPR:
7412 preexpand_calls (exp);
7413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7414 subtarget = 0;
7415 /* Possible optimization: compute the dividend with EXPAND_SUM
7416 then if the divisor is constant can optimize the case
7417 where some terms of the dividend have coeffs divisible by it. */
7418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7419 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7420 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7421
7422 case RDIV_EXPR:
7423 this_optab = flodiv_optab;
7424 goto binop;
7425
7426 case TRUNC_MOD_EXPR:
7427 case FLOOR_MOD_EXPR:
7428 case CEIL_MOD_EXPR:
7429 case ROUND_MOD_EXPR:
7430 preexpand_calls (exp);
7431 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7432 subtarget = 0;
7433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7435 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7436
7437 case FIX_ROUND_EXPR:
7438 case FIX_FLOOR_EXPR:
7439 case FIX_CEIL_EXPR:
7440 abort (); /* Not used for C. */
7441
7442 case FIX_TRUNC_EXPR:
7443 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7444 if (target == 0)
7445 target = gen_reg_rtx (mode);
7446 expand_fix (target, op0, unsignedp);
7447 return target;
7448
7449 case FLOAT_EXPR:
7450 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7451 if (target == 0)
7452 target = gen_reg_rtx (mode);
7453 /* expand_float can't figure out what to do if FROM has VOIDmode.
7454 So give it the correct mode. With -O, cse will optimize this. */
7455 if (GET_MODE (op0) == VOIDmode)
7456 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7457 op0);
7458 expand_float (target, op0,
7459 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7460 return target;
7461
7462 case NEGATE_EXPR:
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7464 temp = expand_unop (mode, neg_optab, op0, target, 0);
7465 if (temp == 0)
7466 abort ();
7467 return temp;
7468
7469 case ABS_EXPR:
7470 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7471
7472 /* Handle complex values specially. */
7473 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7474 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7475 return expand_complex_abs (mode, op0, target, unsignedp);
7476
7477 /* Unsigned abs is simply the operand. Testing here means we don't
7478 risk generating incorrect code below. */
7479 if (TREE_UNSIGNED (type))
7480 return op0;
7481
7482 return expand_abs (mode, op0, target,
7483 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7484
7485 case MAX_EXPR:
7486 case MIN_EXPR:
7487 target = original_target;
7488 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7489 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7490 || GET_MODE (target) != mode
7491 || (GET_CODE (target) == REG
7492 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7493 target = gen_reg_rtx (mode);
7494 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7496
7497 /* First try to do it with a special MIN or MAX instruction.
7498 If that does not win, use a conditional jump to select the proper
7499 value. */
7500 this_optab = (TREE_UNSIGNED (type)
7501 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7502 : (code == MIN_EXPR ? smin_optab : smax_optab));
7503
7504 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7505 OPTAB_WIDEN);
7506 if (temp != 0)
7507 return temp;
7508
7509 /* At this point, a MEM target is no longer useful; we will get better
7510 code without it. */
7511
7512 if (GET_CODE (target) == MEM)
7513 target = gen_reg_rtx (mode);
7514
7515 if (target != op0)
7516 emit_move_insn (target, op0);
7517
7518 op0 = gen_label_rtx ();
7519
7520 /* If this mode is an integer too wide to compare properly,
7521 compare word by word. Rely on cse to optimize constant cases. */
7522 if (GET_MODE_CLASS (mode) == MODE_INT
7523 && ! can_compare_p (GE, mode, ccp_jump))
7524 {
7525 if (code == MAX_EXPR)
7526 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7527 target, op1, NULL_RTX, op0);
7528 else
7529 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7530 op1, target, NULL_RTX, op0);
7531 }
7532 else
7533 {
7534 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7535 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7536 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7537 op0);
7538 }
7539 emit_move_insn (target, op1);
7540 emit_label (op0);
7541 return target;
7542
7543 case BIT_NOT_EXPR:
7544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7545 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7546 if (temp == 0)
7547 abort ();
7548 return temp;
7549
7550 case FFS_EXPR:
7551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7552 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7553 if (temp == 0)
7554 abort ();
7555 return temp;
7556
7557 /* ??? Can optimize bitwise operations with one arg constant.
7558 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7559 and (a bitwise1 b) bitwise2 b (etc)
7560 but that is probably not worth while. */
7561
7562 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7563 boolean values when we want in all cases to compute both of them. In
7564 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7565 as actual zero-or-1 values and then bitwise anding. In cases where
7566 there cannot be any side effects, better code would be made by
7567 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7568 how to recognize those cases. */
7569
7570 case TRUTH_AND_EXPR:
7571 case BIT_AND_EXPR:
7572 this_optab = and_optab;
7573 goto binop;
7574
7575 case TRUTH_OR_EXPR:
7576 case BIT_IOR_EXPR:
7577 this_optab = ior_optab;
7578 goto binop;
7579
7580 case TRUTH_XOR_EXPR:
7581 case BIT_XOR_EXPR:
7582 this_optab = xor_optab;
7583 goto binop;
7584
7585 case LSHIFT_EXPR:
7586 case RSHIFT_EXPR:
7587 case LROTATE_EXPR:
7588 case RROTATE_EXPR:
7589 preexpand_calls (exp);
7590 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7591 subtarget = 0;
7592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7593 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7594 unsignedp);
7595
7596 /* Could determine the answer when only additive constants differ. Also,
7597 the addition of one can be handled by changing the condition. */
7598 case LT_EXPR:
7599 case LE_EXPR:
7600 case GT_EXPR:
7601 case GE_EXPR:
7602 case EQ_EXPR:
7603 case NE_EXPR:
7604 case UNORDERED_EXPR:
7605 case ORDERED_EXPR:
7606 case UNLT_EXPR:
7607 case UNLE_EXPR:
7608 case UNGT_EXPR:
7609 case UNGE_EXPR:
7610 case UNEQ_EXPR:
7611 preexpand_calls (exp);
7612 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7613 if (temp != 0)
7614 return temp;
7615
7616 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7617 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7618 && original_target
7619 && GET_CODE (original_target) == REG
7620 && (GET_MODE (original_target)
7621 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7622 {
7623 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7624 VOIDmode, 0);
7625
7626 if (temp != original_target)
7627 temp = copy_to_reg (temp);
7628
7629 op1 = gen_label_rtx ();
7630 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7631 GET_MODE (temp), unsignedp, 0, op1);
7632 emit_move_insn (temp, const1_rtx);
7633 emit_label (op1);
7634 return temp;
7635 }
7636
7637 /* If no set-flag instruction, must generate a conditional
7638 store into a temporary variable. Drop through
7639 and handle this like && and ||. */
7640
7641 case TRUTH_ANDIF_EXPR:
7642 case TRUTH_ORIF_EXPR:
7643 if (! ignore
7644 && (target == 0 || ! safe_from_p (target, exp, 1)
7645 /* Make sure we don't have a hard reg (such as function's return
7646 value) live across basic blocks, if not optimizing. */
7647 || (!optimize && GET_CODE (target) == REG
7648 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7649 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7650
7651 if (target)
7652 emit_clr_insn (target);
7653
7654 op1 = gen_label_rtx ();
7655 jumpifnot (exp, op1);
7656
7657 if (target)
7658 emit_0_to_1_insn (target);
7659
7660 emit_label (op1);
7661 return ignore ? const0_rtx : target;
7662
7663 case TRUTH_NOT_EXPR:
7664 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7665 /* The parser is careful to generate TRUTH_NOT_EXPR
7666 only with operands that are always zero or one. */
7667 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7668 target, 1, OPTAB_LIB_WIDEN);
7669 if (temp == 0)
7670 abort ();
7671 return temp;
7672
7673 case COMPOUND_EXPR:
7674 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7675 emit_queue ();
7676 return expand_expr (TREE_OPERAND (exp, 1),
7677 (ignore ? const0_rtx : target),
7678 VOIDmode, 0);
7679
7680 case COND_EXPR:
7681 /* If we would have a "singleton" (see below) were it not for a
7682 conversion in each arm, bring that conversion back out. */
7683 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7684 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7685 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7686 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7687 {
7688 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7689 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7690
7691 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7692 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7693 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7694 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7695 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7696 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7697 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7698 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7699 return expand_expr (build1 (NOP_EXPR, type,
7700 build (COND_EXPR, TREE_TYPE (true),
7701 TREE_OPERAND (exp, 0),
7702 true, false)),
7703 target, tmode, modifier);
7704 }
7705
7706 {
7707 /* Note that COND_EXPRs whose type is a structure or union
7708 are required to be constructed to contain assignments of
7709 a temporary variable, so that we can evaluate them here
7710 for side effect only. If type is void, we must do likewise. */
7711
7712 /* If an arm of the branch requires a cleanup,
7713 only that cleanup is performed. */
7714
7715 tree singleton = 0;
7716 tree binary_op = 0, unary_op = 0;
7717
7718 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7719 convert it to our mode, if necessary. */
7720 if (integer_onep (TREE_OPERAND (exp, 1))
7721 && integer_zerop (TREE_OPERAND (exp, 2))
7722 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7723 {
7724 if (ignore)
7725 {
7726 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7727 ro_modifier);
7728 return const0_rtx;
7729 }
7730
7731 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7732 if (GET_MODE (op0) == mode)
7733 return op0;
7734
7735 if (target == 0)
7736 target = gen_reg_rtx (mode);
7737 convert_move (target, op0, unsignedp);
7738 return target;
7739 }
7740
7741 /* Check for X ? A + B : A. If we have this, we can copy A to the
7742 output and conditionally add B. Similarly for unary operations.
7743 Don't do this if X has side-effects because those side effects
7744 might affect A or B and the "?" operation is a sequence point in
7745 ANSI. (operand_equal_p tests for side effects.) */
7746
7747 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7748 && operand_equal_p (TREE_OPERAND (exp, 2),
7749 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7750 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7751 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7752 && operand_equal_p (TREE_OPERAND (exp, 1),
7753 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7754 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7755 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7756 && operand_equal_p (TREE_OPERAND (exp, 2),
7757 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7758 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7759 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7760 && operand_equal_p (TREE_OPERAND (exp, 1),
7761 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7762 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7763
7764 /* If we are not to produce a result, we have no target. Otherwise,
7765 if a target was specified use it; it will not be used as an
7766 intermediate target unless it is safe. If no target, use a
7767 temporary. */
7768
7769 if (ignore)
7770 temp = 0;
7771 else if (original_target
7772 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7773 || (singleton && GET_CODE (original_target) == REG
7774 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7775 && original_target == var_rtx (singleton)))
7776 && GET_MODE (original_target) == mode
7777 #ifdef HAVE_conditional_move
7778 && (! can_conditionally_move_p (mode)
7779 || GET_CODE (original_target) == REG
7780 || TREE_ADDRESSABLE (type))
7781 #endif
7782 && ! (GET_CODE (original_target) == MEM
7783 && MEM_VOLATILE_P (original_target)))
7784 temp = original_target;
7785 else if (TREE_ADDRESSABLE (type))
7786 abort ();
7787 else
7788 temp = assign_temp (type, 0, 0, 1);
7789
7790 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7791 do the test of X as a store-flag operation, do this as
7792 A + ((X != 0) << log C). Similarly for other simple binary
7793 operators. Only do for C == 1 if BRANCH_COST is low. */
7794 if (temp && singleton && binary_op
7795 && (TREE_CODE (binary_op) == PLUS_EXPR
7796 || TREE_CODE (binary_op) == MINUS_EXPR
7797 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7798 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7799 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7800 : integer_onep (TREE_OPERAND (binary_op, 1)))
7801 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7802 {
7803 rtx result;
7804 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7805 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7806 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7807 : xor_optab);
7808
7809 /* If we had X ? A : A + 1, do this as A + (X == 0).
7810
7811 We have to invert the truth value here and then put it
7812 back later if do_store_flag fails. We cannot simply copy
7813 TREE_OPERAND (exp, 0) to another variable and modify that
7814 because invert_truthvalue can modify the tree pointed to
7815 by its argument. */
7816 if (singleton == TREE_OPERAND (exp, 1))
7817 TREE_OPERAND (exp, 0)
7818 = invert_truthvalue (TREE_OPERAND (exp, 0));
7819
7820 result = do_store_flag (TREE_OPERAND (exp, 0),
7821 (safe_from_p (temp, singleton, 1)
7822 ? temp : NULL_RTX),
7823 mode, BRANCH_COST <= 1);
7824
7825 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7826 result = expand_shift (LSHIFT_EXPR, mode, result,
7827 build_int_2 (tree_log2
7828 (TREE_OPERAND
7829 (binary_op, 1)),
7830 0),
7831 (safe_from_p (temp, singleton, 1)
7832 ? temp : NULL_RTX), 0);
7833
7834 if (result)
7835 {
7836 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7837 return expand_binop (mode, boptab, op1, result, temp,
7838 unsignedp, OPTAB_LIB_WIDEN);
7839 }
7840 else if (singleton == TREE_OPERAND (exp, 1))
7841 TREE_OPERAND (exp, 0)
7842 = invert_truthvalue (TREE_OPERAND (exp, 0));
7843 }
7844
7845 do_pending_stack_adjust ();
7846 NO_DEFER_POP;
7847 op0 = gen_label_rtx ();
7848
7849 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7850 {
7851 if (temp != 0)
7852 {
7853 /* If the target conflicts with the other operand of the
7854 binary op, we can't use it. Also, we can't use the target
7855 if it is a hard register, because evaluating the condition
7856 might clobber it. */
7857 if ((binary_op
7858 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7859 || (GET_CODE (temp) == REG
7860 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7861 temp = gen_reg_rtx (mode);
7862 store_expr (singleton, temp, 0);
7863 }
7864 else
7865 expand_expr (singleton,
7866 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7867 if (singleton == TREE_OPERAND (exp, 1))
7868 jumpif (TREE_OPERAND (exp, 0), op0);
7869 else
7870 jumpifnot (TREE_OPERAND (exp, 0), op0);
7871
7872 start_cleanup_deferral ();
7873 if (binary_op && temp == 0)
7874 /* Just touch the other operand. */
7875 expand_expr (TREE_OPERAND (binary_op, 1),
7876 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7877 else if (binary_op)
7878 store_expr (build (TREE_CODE (binary_op), type,
7879 make_tree (type, temp),
7880 TREE_OPERAND (binary_op, 1)),
7881 temp, 0);
7882 else
7883 store_expr (build1 (TREE_CODE (unary_op), type,
7884 make_tree (type, temp)),
7885 temp, 0);
7886 op1 = op0;
7887 }
7888 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7889 comparison operator. If we have one of these cases, set the
7890 output to A, branch on A (cse will merge these two references),
7891 then set the output to FOO. */
7892 else if (temp
7893 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7894 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7895 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7896 TREE_OPERAND (exp, 1), 0)
7897 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7898 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7899 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7900 {
7901 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7902 temp = gen_reg_rtx (mode);
7903 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7904 jumpif (TREE_OPERAND (exp, 0), op0);
7905
7906 start_cleanup_deferral ();
7907 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7908 op1 = op0;
7909 }
7910 else if (temp
7911 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7912 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7913 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7914 TREE_OPERAND (exp, 2), 0)
7915 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7916 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7917 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7918 {
7919 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7920 temp = gen_reg_rtx (mode);
7921 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7922 jumpifnot (TREE_OPERAND (exp, 0), op0);
7923
7924 start_cleanup_deferral ();
7925 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7926 op1 = op0;
7927 }
7928 else
7929 {
7930 op1 = gen_label_rtx ();
7931 jumpifnot (TREE_OPERAND (exp, 0), op0);
7932
7933 start_cleanup_deferral ();
7934
7935 /* One branch of the cond can be void, if it never returns. For
7936 example A ? throw : E */
7937 if (temp != 0
7938 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7939 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7940 else
7941 expand_expr (TREE_OPERAND (exp, 1),
7942 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7943 end_cleanup_deferral ();
7944 emit_queue ();
7945 emit_jump_insn (gen_jump (op1));
7946 emit_barrier ();
7947 emit_label (op0);
7948 start_cleanup_deferral ();
7949 if (temp != 0
7950 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7951 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7952 else
7953 expand_expr (TREE_OPERAND (exp, 2),
7954 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7955 }
7956
7957 end_cleanup_deferral ();
7958
7959 emit_queue ();
7960 emit_label (op1);
7961 OK_DEFER_POP;
7962
7963 return temp;
7964 }
7965
7966 case TARGET_EXPR:
7967 {
7968 /* Something needs to be initialized, but we didn't know
7969 where that thing was when building the tree. For example,
7970 it could be the return value of a function, or a parameter
7971 to a function which lays down in the stack, or a temporary
7972 variable which must be passed by reference.
7973
7974 We guarantee that the expression will either be constructed
7975 or copied into our original target. */
7976
7977 tree slot = TREE_OPERAND (exp, 0);
7978 tree cleanups = NULL_TREE;
7979 tree exp1;
7980
7981 if (TREE_CODE (slot) != VAR_DECL)
7982 abort ();
7983
7984 if (! ignore)
7985 target = original_target;
7986
7987 /* Set this here so that if we get a target that refers to a
7988 register variable that's already been used, put_reg_into_stack
7989 knows that it should fix up those uses. */
7990 TREE_USED (slot) = 1;
7991
7992 if (target == 0)
7993 {
7994 if (DECL_RTL (slot) != 0)
7995 {
7996 target = DECL_RTL (slot);
7997 /* If we have already expanded the slot, so don't do
7998 it again. (mrs) */
7999 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8000 return target;
8001 }
8002 else
8003 {
8004 target = assign_temp (type, 2, 0, 1);
8005 /* All temp slots at this level must not conflict. */
8006 preserve_temp_slots (target);
8007 DECL_RTL (slot) = target;
8008 if (TREE_ADDRESSABLE (slot))
8009 {
8010 TREE_ADDRESSABLE (slot) = 0;
8011 mark_addressable (slot);
8012 }
8013
8014 /* Since SLOT is not known to the called function
8015 to belong to its stack frame, we must build an explicit
8016 cleanup. This case occurs when we must build up a reference
8017 to pass the reference as an argument. In this case,
8018 it is very likely that such a reference need not be
8019 built here. */
8020
8021 if (TREE_OPERAND (exp, 2) == 0)
8022 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8023 cleanups = TREE_OPERAND (exp, 2);
8024 }
8025 }
8026 else
8027 {
8028 /* This case does occur, when expanding a parameter which
8029 needs to be constructed on the stack. The target
8030 is the actual stack address that we want to initialize.
8031 The function we call will perform the cleanup in this case. */
8032
8033 /* If we have already assigned it space, use that space,
8034 not target that we were passed in, as our target
8035 parameter is only a hint. */
8036 if (DECL_RTL (slot) != 0)
8037 {
8038 target = DECL_RTL (slot);
8039 /* If we have already expanded the slot, so don't do
8040 it again. (mrs) */
8041 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8042 return target;
8043 }
8044 else
8045 {
8046 DECL_RTL (slot) = target;
8047 /* If we must have an addressable slot, then make sure that
8048 the RTL that we just stored in slot is OK. */
8049 if (TREE_ADDRESSABLE (slot))
8050 {
8051 TREE_ADDRESSABLE (slot) = 0;
8052 mark_addressable (slot);
8053 }
8054 }
8055 }
8056
8057 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8058 /* Mark it as expanded. */
8059 TREE_OPERAND (exp, 1) = NULL_TREE;
8060
8061 store_expr (exp1, target, 0);
8062
8063 expand_decl_cleanup (NULL_TREE, cleanups);
8064
8065 return target;
8066 }
8067
8068 case INIT_EXPR:
8069 {
8070 tree lhs = TREE_OPERAND (exp, 0);
8071 tree rhs = TREE_OPERAND (exp, 1);
8072 tree noncopied_parts = 0;
8073 tree lhs_type = TREE_TYPE (lhs);
8074
8075 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8076 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8077 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8078 TYPE_NONCOPIED_PARTS (lhs_type));
8079 while (noncopied_parts != 0)
8080 {
8081 expand_assignment (TREE_VALUE (noncopied_parts),
8082 TREE_PURPOSE (noncopied_parts), 0, 0);
8083 noncopied_parts = TREE_CHAIN (noncopied_parts);
8084 }
8085 return temp;
8086 }
8087
8088 case MODIFY_EXPR:
8089 {
8090 /* If lhs is complex, expand calls in rhs before computing it.
8091 That's so we don't compute a pointer and save it over a call.
8092 If lhs is simple, compute it first so we can give it as a
8093 target if the rhs is just a call. This avoids an extra temp and copy
8094 and that prevents a partial-subsumption which makes bad code.
8095 Actually we could treat component_ref's of vars like vars. */
8096
8097 tree lhs = TREE_OPERAND (exp, 0);
8098 tree rhs = TREE_OPERAND (exp, 1);
8099 tree noncopied_parts = 0;
8100 tree lhs_type = TREE_TYPE (lhs);
8101
8102 temp = 0;
8103
8104 if (TREE_CODE (lhs) != VAR_DECL
8105 && TREE_CODE (lhs) != RESULT_DECL
8106 && TREE_CODE (lhs) != PARM_DECL
8107 && ! (TREE_CODE (lhs) == INDIRECT_REF
8108 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8109 preexpand_calls (exp);
8110
8111 /* Check for |= or &= of a bitfield of size one into another bitfield
8112 of size 1. In this case, (unless we need the result of the
8113 assignment) we can do this more efficiently with a
8114 test followed by an assignment, if necessary.
8115
8116 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8117 things change so we do, this code should be enhanced to
8118 support it. */
8119 if (ignore
8120 && TREE_CODE (lhs) == COMPONENT_REF
8121 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8122 || TREE_CODE (rhs) == BIT_AND_EXPR)
8123 && TREE_OPERAND (rhs, 0) == lhs
8124 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8125 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8126 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8127 {
8128 rtx label = gen_label_rtx ();
8129
8130 do_jump (TREE_OPERAND (rhs, 1),
8131 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8132 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8133 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8134 (TREE_CODE (rhs) == BIT_IOR_EXPR
8135 ? integer_one_node
8136 : integer_zero_node)),
8137 0, 0);
8138 do_pending_stack_adjust ();
8139 emit_label (label);
8140 return const0_rtx;
8141 }
8142
8143 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8144 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8145 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8146 TYPE_NONCOPIED_PARTS (lhs_type));
8147
8148 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8149 while (noncopied_parts != 0)
8150 {
8151 expand_assignment (TREE_PURPOSE (noncopied_parts),
8152 TREE_VALUE (noncopied_parts), 0, 0);
8153 noncopied_parts = TREE_CHAIN (noncopied_parts);
8154 }
8155 return temp;
8156 }
8157
8158 case RETURN_EXPR:
8159 if (!TREE_OPERAND (exp, 0))
8160 expand_null_return ();
8161 else
8162 expand_return (TREE_OPERAND (exp, 0));
8163 return const0_rtx;
8164
8165 case PREINCREMENT_EXPR:
8166 case PREDECREMENT_EXPR:
8167 return expand_increment (exp, 0, ignore);
8168
8169 case POSTINCREMENT_EXPR:
8170 case POSTDECREMENT_EXPR:
8171 /* Faster to treat as pre-increment if result is not used. */
8172 return expand_increment (exp, ! ignore, ignore);
8173
8174 case ADDR_EXPR:
8175 /* If nonzero, TEMP will be set to the address of something that might
8176 be a MEM corresponding to a stack slot. */
8177 temp = 0;
8178
8179 /* Are we taking the address of a nested function? */
8180 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8181 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8182 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8183 && ! TREE_STATIC (exp))
8184 {
8185 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8186 op0 = force_operand (op0, target);
8187 }
8188 /* If we are taking the address of something erroneous, just
8189 return a zero. */
8190 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8191 return const0_rtx;
8192 else
8193 {
8194 /* We make sure to pass const0_rtx down if we came in with
8195 ignore set, to avoid doing the cleanups twice for something. */
8196 op0 = expand_expr (TREE_OPERAND (exp, 0),
8197 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8198 (modifier == EXPAND_INITIALIZER
8199 ? modifier : EXPAND_CONST_ADDRESS));
8200
8201 /* If we are going to ignore the result, OP0 will have been set
8202 to const0_rtx, so just return it. Don't get confused and
8203 think we are taking the address of the constant. */
8204 if (ignore)
8205 return op0;
8206
8207 op0 = protect_from_queue (op0, 0);
8208
8209 /* We would like the object in memory. If it is a constant, we can
8210 have it be statically allocated into memory. For a non-constant,
8211 we need to allocate some memory and store the value into it. */
8212
8213 if (CONSTANT_P (op0))
8214 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8215 op0);
8216 else if (GET_CODE (op0) == MEM)
8217 {
8218 mark_temp_addr_taken (op0);
8219 temp = XEXP (op0, 0);
8220 }
8221
8222 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8223 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8224 {
8225 /* If this object is in a register, it must be not
8226 be BLKmode. */
8227 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8228 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8229
8230 mark_temp_addr_taken (memloc);
8231 emit_move_insn (memloc, op0);
8232 op0 = memloc;
8233 }
8234
8235 if (GET_CODE (op0) != MEM)
8236 abort ();
8237
8238 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8239 {
8240 temp = XEXP (op0, 0);
8241 #ifdef POINTERS_EXTEND_UNSIGNED
8242 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8243 && mode == ptr_mode)
8244 temp = convert_memory_address (ptr_mode, temp);
8245 #endif
8246 return temp;
8247 }
8248
8249 op0 = force_operand (XEXP (op0, 0), target);
8250 }
8251
8252 if (flag_force_addr && GET_CODE (op0) != REG)
8253 op0 = force_reg (Pmode, op0);
8254
8255 if (GET_CODE (op0) == REG
8256 && ! REG_USERVAR_P (op0))
8257 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8258
8259 /* If we might have had a temp slot, add an equivalent address
8260 for it. */
8261 if (temp != 0)
8262 update_temp_slot_address (temp, op0);
8263
8264 #ifdef POINTERS_EXTEND_UNSIGNED
8265 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8266 && mode == ptr_mode)
8267 op0 = convert_memory_address (ptr_mode, op0);
8268 #endif
8269
8270 return op0;
8271
8272 case ENTRY_VALUE_EXPR:
8273 abort ();
8274
8275 /* COMPLEX type for Extended Pascal & Fortran */
8276 case COMPLEX_EXPR:
8277 {
8278 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8279 rtx insns;
8280
8281 /* Get the rtx code of the operands. */
8282 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8283 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8284
8285 if (! target)
8286 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8287
8288 start_sequence ();
8289
8290 /* Move the real (op0) and imaginary (op1) parts to their location. */
8291 emit_move_insn (gen_realpart (mode, target), op0);
8292 emit_move_insn (gen_imagpart (mode, target), op1);
8293
8294 insns = get_insns ();
8295 end_sequence ();
8296
8297 /* Complex construction should appear as a single unit. */
8298 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8299 each with a separate pseudo as destination.
8300 It's not correct for flow to treat them as a unit. */
8301 if (GET_CODE (target) != CONCAT)
8302 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8303 else
8304 emit_insns (insns);
8305
8306 return target;
8307 }
8308
8309 case REALPART_EXPR:
8310 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8311 return gen_realpart (mode, op0);
8312
8313 case IMAGPART_EXPR:
8314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8315 return gen_imagpart (mode, op0);
8316
8317 case CONJ_EXPR:
8318 {
8319 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8320 rtx imag_t;
8321 rtx insns;
8322
8323 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8324
8325 if (! target)
8326 target = gen_reg_rtx (mode);
8327
8328 start_sequence ();
8329
8330 /* Store the realpart and the negated imagpart to target. */
8331 emit_move_insn (gen_realpart (partmode, target),
8332 gen_realpart (partmode, op0));
8333
8334 imag_t = gen_imagpart (partmode, target);
8335 temp = expand_unop (partmode, neg_optab,
8336 gen_imagpart (partmode, op0), imag_t, 0);
8337 if (temp != imag_t)
8338 emit_move_insn (imag_t, temp);
8339
8340 insns = get_insns ();
8341 end_sequence ();
8342
8343 /* Conjugate should appear as a single unit
8344 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8345 each with a separate pseudo as destination.
8346 It's not correct for flow to treat them as a unit. */
8347 if (GET_CODE (target) != CONCAT)
8348 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8349 else
8350 emit_insns (insns);
8351
8352 return target;
8353 }
8354
8355 case TRY_CATCH_EXPR:
8356 {
8357 tree handler = TREE_OPERAND (exp, 1);
8358
8359 expand_eh_region_start ();
8360
8361 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8362
8363 expand_eh_region_end (handler);
8364
8365 return op0;
8366 }
8367
8368 case TRY_FINALLY_EXPR:
8369 {
8370 tree try_block = TREE_OPERAND (exp, 0);
8371 tree finally_block = TREE_OPERAND (exp, 1);
8372 rtx finally_label = gen_label_rtx ();
8373 rtx done_label = gen_label_rtx ();
8374 rtx return_link = gen_reg_rtx (Pmode);
8375 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8376 (tree) finally_label, (tree) return_link);
8377 TREE_SIDE_EFFECTS (cleanup) = 1;
8378
8379 /* Start a new binding layer that will keep track of all cleanup
8380 actions to be performed. */
8381 expand_start_bindings (2);
8382
8383 target_temp_slot_level = temp_slot_level;
8384
8385 expand_decl_cleanup (NULL_TREE, cleanup);
8386 op0 = expand_expr (try_block, target, tmode, modifier);
8387
8388 preserve_temp_slots (op0);
8389 expand_end_bindings (NULL_TREE, 0, 0);
8390 emit_jump (done_label);
8391 emit_label (finally_label);
8392 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8393 emit_indirect_jump (return_link);
8394 emit_label (done_label);
8395 return op0;
8396 }
8397
8398 case GOTO_SUBROUTINE_EXPR:
8399 {
8400 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8401 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8402 rtx return_address = gen_label_rtx ();
8403 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8404 emit_jump (subr);
8405 emit_label (return_address);
8406 return const0_rtx;
8407 }
8408
8409 case POPDCC_EXPR:
8410 {
8411 rtx dcc = get_dynamic_cleanup_chain ();
8412 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8413 return const0_rtx;
8414 }
8415
8416 case POPDHC_EXPR:
8417 {
8418 rtx dhc = get_dynamic_handler_chain ();
8419 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8420 return const0_rtx;
8421 }
8422
8423 case VA_ARG_EXPR:
8424 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8425
8426 default:
8427 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8428 }
8429
8430 /* Here to do an ordinary binary operator, generating an instruction
8431 from the optab already placed in `this_optab'. */
8432 binop:
8433 preexpand_calls (exp);
8434 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8435 subtarget = 0;
8436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8438 binop2:
8439 temp = expand_binop (mode, this_optab, op0, op1, target,
8440 unsignedp, OPTAB_LIB_WIDEN);
8441 if (temp == 0)
8442 abort ();
8443 return temp;
8444 }
8445 \f
8446 /* Similar to expand_expr, except that we don't specify a target, target
8447 mode, or modifier and we return the alignment of the inner type. This is
8448 used in cases where it is not necessary to align the result to the
8449 alignment of its type as long as we know the alignment of the result, for
8450 example for comparisons of BLKmode values. */
8451
8452 static rtx
8453 expand_expr_unaligned (exp, palign)
8454 register tree exp;
8455 unsigned int *palign;
8456 {
8457 register rtx op0;
8458 tree type = TREE_TYPE (exp);
8459 register enum machine_mode mode = TYPE_MODE (type);
8460
8461 /* Default the alignment we return to that of the type. */
8462 *palign = TYPE_ALIGN (type);
8463
8464 /* The only cases in which we do anything special is if the resulting mode
8465 is BLKmode. */
8466 if (mode != BLKmode)
8467 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8468
8469 switch (TREE_CODE (exp))
8470 {
8471 case CONVERT_EXPR:
8472 case NOP_EXPR:
8473 case NON_LVALUE_EXPR:
8474 /* Conversions between BLKmode values don't change the underlying
8475 alignment or value. */
8476 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8477 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8478 break;
8479
8480 case ARRAY_REF:
8481 /* Much of the code for this case is copied directly from expand_expr.
8482 We need to duplicate it here because we will do something different
8483 in the fall-through case, so we need to handle the same exceptions
8484 it does. */
8485 {
8486 tree array = TREE_OPERAND (exp, 0);
8487 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8488 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8489 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8490 HOST_WIDE_INT i;
8491
8492 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8493 abort ();
8494
8495 /* Optimize the special-case of a zero lower bound.
8496
8497 We convert the low_bound to sizetype to avoid some problems
8498 with constant folding. (E.g. suppose the lower bound is 1,
8499 and its mode is QI. Without the conversion, (ARRAY
8500 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8501 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8502
8503 if (! integer_zerop (low_bound))
8504 index = size_diffop (index, convert (sizetype, low_bound));
8505
8506 /* If this is a constant index into a constant array,
8507 just get the value from the array. Handle both the cases when
8508 we have an explicit constructor and when our operand is a variable
8509 that was declared const. */
8510
8511 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8512 && 0 > compare_tree_int (index,
8513 list_length (CONSTRUCTOR_ELTS
8514 (TREE_OPERAND (exp, 0)))))
8515 {
8516 tree elem;
8517
8518 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8519 i = TREE_INT_CST_LOW (index);
8520 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8521 ;
8522
8523 if (elem)
8524 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8525 }
8526
8527 else if (optimize >= 1
8528 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8529 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8530 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8531 {
8532 if (TREE_CODE (index) == INTEGER_CST)
8533 {
8534 tree init = DECL_INITIAL (array);
8535
8536 if (TREE_CODE (init) == CONSTRUCTOR)
8537 {
8538 tree elem;
8539
8540 for (elem = CONSTRUCTOR_ELTS (init);
8541 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8542 elem = TREE_CHAIN (elem))
8543 ;
8544
8545 if (elem)
8546 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8547 palign);
8548 }
8549 }
8550 }
8551 }
8552
8553 /* ... fall through ... */
8554
8555 case COMPONENT_REF:
8556 case BIT_FIELD_REF:
8557 /* If the operand is a CONSTRUCTOR, we can just extract the
8558 appropriate field if it is present. Don't do this if we have
8559 already written the data since we want to refer to that copy
8560 and varasm.c assumes that's what we'll do. */
8561 if (TREE_CODE (exp) != ARRAY_REF
8562 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8563 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8564 {
8565 tree elt;
8566
8567 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8568 elt = TREE_CHAIN (elt))
8569 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8570 /* Note that unlike the case in expand_expr, we know this is
8571 BLKmode and hence not an integer. */
8572 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8573 }
8574
8575 {
8576 enum machine_mode mode1;
8577 HOST_WIDE_INT bitsize, bitpos;
8578 tree offset;
8579 int volatilep = 0;
8580 unsigned int alignment;
8581 int unsignedp;
8582 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8583 &mode1, &unsignedp, &volatilep,
8584 &alignment);
8585
8586 /* If we got back the original object, something is wrong. Perhaps
8587 we are evaluating an expression too early. In any event, don't
8588 infinitely recurse. */
8589 if (tem == exp)
8590 abort ();
8591
8592 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8593
8594 /* If this is a constant, put it into a register if it is a
8595 legitimate constant and OFFSET is 0 and memory if it isn't. */
8596 if (CONSTANT_P (op0))
8597 {
8598 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8599
8600 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8601 && offset == 0)
8602 op0 = force_reg (inner_mode, op0);
8603 else
8604 op0 = validize_mem (force_const_mem (inner_mode, op0));
8605 }
8606
8607 if (offset != 0)
8608 {
8609 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8610
8611 /* If this object is in a register, put it into memory.
8612 This case can't occur in C, but can in Ada if we have
8613 unchecked conversion of an expression from a scalar type to
8614 an array or record type. */
8615 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8616 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8617 {
8618 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8619
8620 mark_temp_addr_taken (memloc);
8621 emit_move_insn (memloc, op0);
8622 op0 = memloc;
8623 }
8624
8625 if (GET_CODE (op0) != MEM)
8626 abort ();
8627
8628 if (GET_MODE (offset_rtx) != ptr_mode)
8629 {
8630 #ifdef POINTERS_EXTEND_UNSIGNED
8631 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8632 #else
8633 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8634 #endif
8635 }
8636
8637 op0 = change_address (op0, VOIDmode,
8638 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8639 force_reg (ptr_mode,
8640 offset_rtx)));
8641 }
8642
8643 /* Don't forget about volatility even if this is a bitfield. */
8644 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8645 {
8646 op0 = copy_rtx (op0);
8647 MEM_VOLATILE_P (op0) = 1;
8648 }
8649
8650 /* Check the access. */
8651 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8652 {
8653 rtx to;
8654 int size;
8655
8656 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8657 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8658
8659 /* Check the access right of the pointer. */
8660 if (size > BITS_PER_UNIT)
8661 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8662 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8663 TYPE_MODE (sizetype),
8664 GEN_INT (MEMORY_USE_RO),
8665 TYPE_MODE (integer_type_node));
8666 }
8667
8668 /* In cases where an aligned union has an unaligned object
8669 as a field, we might be extracting a BLKmode value from
8670 an integer-mode (e.g., SImode) object. Handle this case
8671 by doing the extract into an object as wide as the field
8672 (which we know to be the width of a basic mode), then
8673 storing into memory, and changing the mode to BLKmode.
8674 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8675 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8676 if (mode1 == VOIDmode
8677 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8678 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8679 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8680 || bitpos % TYPE_ALIGN (type) != 0)))
8681 {
8682 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8683
8684 if (ext_mode == BLKmode)
8685 {
8686 /* In this case, BITPOS must start at a byte boundary. */
8687 if (GET_CODE (op0) != MEM
8688 || bitpos % BITS_PER_UNIT != 0)
8689 abort ();
8690
8691 op0 = change_address (op0, VOIDmode,
8692 plus_constant (XEXP (op0, 0),
8693 bitpos / BITS_PER_UNIT));
8694 }
8695 else
8696 {
8697 rtx new = assign_stack_temp (ext_mode,
8698 bitsize / BITS_PER_UNIT, 0);
8699
8700 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8701 unsignedp, NULL_RTX, ext_mode,
8702 ext_mode, alignment,
8703 int_size_in_bytes (TREE_TYPE (tem)));
8704
8705 /* If the result is a record type and BITSIZE is narrower than
8706 the mode of OP0, an integral mode, and this is a big endian
8707 machine, we must put the field into the high-order bits. */
8708 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8709 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8710 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8711 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8712 size_int (GET_MODE_BITSIZE
8713 (GET_MODE (op0))
8714 - bitsize),
8715 op0, 1);
8716
8717
8718 emit_move_insn (new, op0);
8719 op0 = copy_rtx (new);
8720 PUT_MODE (op0, BLKmode);
8721 }
8722 }
8723 else
8724 /* Get a reference to just this component. */
8725 op0 = change_address (op0, mode1,
8726 plus_constant (XEXP (op0, 0),
8727 (bitpos / BITS_PER_UNIT)));
8728
8729 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8730
8731 /* Adjust the alignment in case the bit position is not
8732 a multiple of the alignment of the inner object. */
8733 while (bitpos % alignment != 0)
8734 alignment >>= 1;
8735
8736 if (GET_CODE (XEXP (op0, 0)) == REG)
8737 mark_reg_pointer (XEXP (op0, 0), alignment);
8738
8739 MEM_IN_STRUCT_P (op0) = 1;
8740 MEM_VOLATILE_P (op0) |= volatilep;
8741
8742 *palign = alignment;
8743 return op0;
8744 }
8745
8746 default:
8747 break;
8748
8749 }
8750
8751 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8752 }
8753 \f
8754 /* Return the tree node if a ARG corresponds to a string constant or zero
8755 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8756 in bytes within the string that ARG is accessing. The type of the
8757 offset will be `sizetype'. */
8758
8759 tree
8760 string_constant (arg, ptr_offset)
8761 tree arg;
8762 tree *ptr_offset;
8763 {
8764 STRIP_NOPS (arg);
8765
8766 if (TREE_CODE (arg) == ADDR_EXPR
8767 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8768 {
8769 *ptr_offset = size_zero_node;
8770 return TREE_OPERAND (arg, 0);
8771 }
8772 else if (TREE_CODE (arg) == PLUS_EXPR)
8773 {
8774 tree arg0 = TREE_OPERAND (arg, 0);
8775 tree arg1 = TREE_OPERAND (arg, 1);
8776
8777 STRIP_NOPS (arg0);
8778 STRIP_NOPS (arg1);
8779
8780 if (TREE_CODE (arg0) == ADDR_EXPR
8781 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8782 {
8783 *ptr_offset = convert (sizetype, arg1);
8784 return TREE_OPERAND (arg0, 0);
8785 }
8786 else if (TREE_CODE (arg1) == ADDR_EXPR
8787 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8788 {
8789 *ptr_offset = convert (sizetype, arg0);
8790 return TREE_OPERAND (arg1, 0);
8791 }
8792 }
8793
8794 return 0;
8795 }
8796 \f
8797 /* Expand code for a post- or pre- increment or decrement
8798 and return the RTX for the result.
8799 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8800
8801 static rtx
8802 expand_increment (exp, post, ignore)
8803 register tree exp;
8804 int post, ignore;
8805 {
8806 register rtx op0, op1;
8807 register rtx temp, value;
8808 register tree incremented = TREE_OPERAND (exp, 0);
8809 optab this_optab = add_optab;
8810 int icode;
8811 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8812 int op0_is_copy = 0;
8813 int single_insn = 0;
8814 /* 1 means we can't store into OP0 directly,
8815 because it is a subreg narrower than a word,
8816 and we don't dare clobber the rest of the word. */
8817 int bad_subreg = 0;
8818
8819 /* Stabilize any component ref that might need to be
8820 evaluated more than once below. */
8821 if (!post
8822 || TREE_CODE (incremented) == BIT_FIELD_REF
8823 || (TREE_CODE (incremented) == COMPONENT_REF
8824 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8825 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8826 incremented = stabilize_reference (incremented);
8827 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8828 ones into save exprs so that they don't accidentally get evaluated
8829 more than once by the code below. */
8830 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8831 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8832 incremented = save_expr (incremented);
8833
8834 /* Compute the operands as RTX.
8835 Note whether OP0 is the actual lvalue or a copy of it:
8836 I believe it is a copy iff it is a register or subreg
8837 and insns were generated in computing it. */
8838
8839 temp = get_last_insn ();
8840 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8841
8842 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8843 in place but instead must do sign- or zero-extension during assignment,
8844 so we copy it into a new register and let the code below use it as
8845 a copy.
8846
8847 Note that we can safely modify this SUBREG since it is know not to be
8848 shared (it was made by the expand_expr call above). */
8849
8850 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8851 {
8852 if (post)
8853 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8854 else
8855 bad_subreg = 1;
8856 }
8857 else if (GET_CODE (op0) == SUBREG
8858 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8859 {
8860 /* We cannot increment this SUBREG in place. If we are
8861 post-incrementing, get a copy of the old value. Otherwise,
8862 just mark that we cannot increment in place. */
8863 if (post)
8864 op0 = copy_to_reg (op0);
8865 else
8866 bad_subreg = 1;
8867 }
8868
8869 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8870 && temp != get_last_insn ());
8871 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8872 EXPAND_MEMORY_USE_BAD);
8873
8874 /* Decide whether incrementing or decrementing. */
8875 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8876 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8877 this_optab = sub_optab;
8878
8879 /* Convert decrement by a constant into a negative increment. */
8880 if (this_optab == sub_optab
8881 && GET_CODE (op1) == CONST_INT)
8882 {
8883 op1 = GEN_INT (- INTVAL (op1));
8884 this_optab = add_optab;
8885 }
8886
8887 /* For a preincrement, see if we can do this with a single instruction. */
8888 if (!post)
8889 {
8890 icode = (int) this_optab->handlers[(int) mode].insn_code;
8891 if (icode != (int) CODE_FOR_nothing
8892 /* Make sure that OP0 is valid for operands 0 and 1
8893 of the insn we want to queue. */
8894 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8895 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8896 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8897 single_insn = 1;
8898 }
8899
8900 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8901 then we cannot just increment OP0. We must therefore contrive to
8902 increment the original value. Then, for postincrement, we can return
8903 OP0 since it is a copy of the old value. For preincrement, expand here
8904 unless we can do it with a single insn.
8905
8906 Likewise if storing directly into OP0 would clobber high bits
8907 we need to preserve (bad_subreg). */
8908 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8909 {
8910 /* This is the easiest way to increment the value wherever it is.
8911 Problems with multiple evaluation of INCREMENTED are prevented
8912 because either (1) it is a component_ref or preincrement,
8913 in which case it was stabilized above, or (2) it is an array_ref
8914 with constant index in an array in a register, which is
8915 safe to reevaluate. */
8916 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8917 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8918 ? MINUS_EXPR : PLUS_EXPR),
8919 TREE_TYPE (exp),
8920 incremented,
8921 TREE_OPERAND (exp, 1));
8922
8923 while (TREE_CODE (incremented) == NOP_EXPR
8924 || TREE_CODE (incremented) == CONVERT_EXPR)
8925 {
8926 newexp = convert (TREE_TYPE (incremented), newexp);
8927 incremented = TREE_OPERAND (incremented, 0);
8928 }
8929
8930 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8931 return post ? op0 : temp;
8932 }
8933
8934 if (post)
8935 {
8936 /* We have a true reference to the value in OP0.
8937 If there is an insn to add or subtract in this mode, queue it.
8938 Queueing the increment insn avoids the register shuffling
8939 that often results if we must increment now and first save
8940 the old value for subsequent use. */
8941
8942 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8943 op0 = stabilize (op0);
8944 #endif
8945
8946 icode = (int) this_optab->handlers[(int) mode].insn_code;
8947 if (icode != (int) CODE_FOR_nothing
8948 /* Make sure that OP0 is valid for operands 0 and 1
8949 of the insn we want to queue. */
8950 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8951 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8952 {
8953 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8954 op1 = force_reg (mode, op1);
8955
8956 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8957 }
8958 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8959 {
8960 rtx addr = (general_operand (XEXP (op0, 0), mode)
8961 ? force_reg (Pmode, XEXP (op0, 0))
8962 : copy_to_reg (XEXP (op0, 0)));
8963 rtx temp, result;
8964
8965 op0 = change_address (op0, VOIDmode, addr);
8966 temp = force_reg (GET_MODE (op0), op0);
8967 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8968 op1 = force_reg (mode, op1);
8969
8970 /* The increment queue is LIFO, thus we have to `queue'
8971 the instructions in reverse order. */
8972 enqueue_insn (op0, gen_move_insn (op0, temp));
8973 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8974 return result;
8975 }
8976 }
8977
8978 /* Preincrement, or we can't increment with one simple insn. */
8979 if (post)
8980 /* Save a copy of the value before inc or dec, to return it later. */
8981 temp = value = copy_to_reg (op0);
8982 else
8983 /* Arrange to return the incremented value. */
8984 /* Copy the rtx because expand_binop will protect from the queue,
8985 and the results of that would be invalid for us to return
8986 if our caller does emit_queue before using our result. */
8987 temp = copy_rtx (value = op0);
8988
8989 /* Increment however we can. */
8990 op1 = expand_binop (mode, this_optab, value, op1,
8991 current_function_check_memory_usage ? NULL_RTX : op0,
8992 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8993 /* Make sure the value is stored into OP0. */
8994 if (op1 != op0)
8995 emit_move_insn (op0, op1);
8996
8997 return temp;
8998 }
8999 \f
9000 /* Expand all function calls contained within EXP, innermost ones first.
9001 But don't look within expressions that have sequence points.
9002 For each CALL_EXPR, record the rtx for its value
9003 in the CALL_EXPR_RTL field. */
9004
9005 static void
9006 preexpand_calls (exp)
9007 tree exp;
9008 {
9009 register int nops, i;
9010 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9011
9012 if (! do_preexpand_calls)
9013 return;
9014
9015 /* Only expressions and references can contain calls. */
9016
9017 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9018 return;
9019
9020 switch (TREE_CODE (exp))
9021 {
9022 case CALL_EXPR:
9023 /* Do nothing if already expanded. */
9024 if (CALL_EXPR_RTL (exp) != 0
9025 /* Do nothing if the call returns a variable-sized object. */
9026 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9027 /* Do nothing to built-in functions. */
9028 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9029 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9030 == FUNCTION_DECL)
9031 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9032 return;
9033
9034 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9035 return;
9036
9037 case COMPOUND_EXPR:
9038 case COND_EXPR:
9039 case TRUTH_ANDIF_EXPR:
9040 case TRUTH_ORIF_EXPR:
9041 /* If we find one of these, then we can be sure
9042 the adjust will be done for it (since it makes jumps).
9043 Do it now, so that if this is inside an argument
9044 of a function, we don't get the stack adjustment
9045 after some other args have already been pushed. */
9046 do_pending_stack_adjust ();
9047 return;
9048
9049 case BLOCK:
9050 case RTL_EXPR:
9051 case WITH_CLEANUP_EXPR:
9052 case CLEANUP_POINT_EXPR:
9053 case TRY_CATCH_EXPR:
9054 return;
9055
9056 case SAVE_EXPR:
9057 if (SAVE_EXPR_RTL (exp) != 0)
9058 return;
9059
9060 default:
9061 break;
9062 }
9063
9064 nops = tree_code_length[(int) TREE_CODE (exp)];
9065 for (i = 0; i < nops; i++)
9066 if (TREE_OPERAND (exp, i) != 0)
9067 {
9068 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9069 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9070 It doesn't happen before the call is made. */
9071 ;
9072 else
9073 {
9074 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9075 if (type == 'e' || type == '<' || type == '1' || type == '2'
9076 || type == 'r')
9077 preexpand_calls (TREE_OPERAND (exp, i));
9078 }
9079 }
9080 }
9081 \f
9082 /* At the start of a function, record that we have no previously-pushed
9083 arguments waiting to be popped. */
9084
9085 void
9086 init_pending_stack_adjust ()
9087 {
9088 pending_stack_adjust = 0;
9089 }
9090
9091 /* When exiting from function, if safe, clear out any pending stack adjust
9092 so the adjustment won't get done.
9093
9094 Note, if the current function calls alloca, then it must have a
9095 frame pointer regardless of the value of flag_omit_frame_pointer. */
9096
9097 void
9098 clear_pending_stack_adjust ()
9099 {
9100 #ifdef EXIT_IGNORE_STACK
9101 if (optimize > 0
9102 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9103 && EXIT_IGNORE_STACK
9104 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9105 && ! flag_inline_functions)
9106 pending_stack_adjust = 0;
9107 #endif
9108 }
9109
9110 /* Pop any previously-pushed arguments that have not been popped yet. */
9111
9112 void
9113 do_pending_stack_adjust ()
9114 {
9115 if (inhibit_defer_pop == 0)
9116 {
9117 if (pending_stack_adjust != 0)
9118 adjust_stack (GEN_INT (pending_stack_adjust));
9119 pending_stack_adjust = 0;
9120 }
9121 }
9122 \f
9123 /* Expand conditional expressions. */
9124
9125 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9126 LABEL is an rtx of code CODE_LABEL, in this function and all the
9127 functions here. */
9128
9129 void
9130 jumpifnot (exp, label)
9131 tree exp;
9132 rtx label;
9133 {
9134 do_jump (exp, label, NULL_RTX);
9135 }
9136
9137 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9138
9139 void
9140 jumpif (exp, label)
9141 tree exp;
9142 rtx label;
9143 {
9144 do_jump (exp, NULL_RTX, label);
9145 }
9146
9147 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9148 the result is zero, or IF_TRUE_LABEL if the result is one.
9149 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9150 meaning fall through in that case.
9151
9152 do_jump always does any pending stack adjust except when it does not
9153 actually perform a jump. An example where there is no jump
9154 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9155
9156 This function is responsible for optimizing cases such as
9157 &&, || and comparison operators in EXP. */
9158
9159 void
9160 do_jump (exp, if_false_label, if_true_label)
9161 tree exp;
9162 rtx if_false_label, if_true_label;
9163 {
9164 register enum tree_code code = TREE_CODE (exp);
9165 /* Some cases need to create a label to jump to
9166 in order to properly fall through.
9167 These cases set DROP_THROUGH_LABEL nonzero. */
9168 rtx drop_through_label = 0;
9169 rtx temp;
9170 int i;
9171 tree type;
9172 enum machine_mode mode;
9173
9174 #ifdef MAX_INTEGER_COMPUTATION_MODE
9175 check_max_integer_computation_mode (exp);
9176 #endif
9177
9178 emit_queue ();
9179
9180 switch (code)
9181 {
9182 case ERROR_MARK:
9183 break;
9184
9185 case INTEGER_CST:
9186 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9187 if (temp)
9188 emit_jump (temp);
9189 break;
9190
9191 #if 0
9192 /* This is not true with #pragma weak */
9193 case ADDR_EXPR:
9194 /* The address of something can never be zero. */
9195 if (if_true_label)
9196 emit_jump (if_true_label);
9197 break;
9198 #endif
9199
9200 case NOP_EXPR:
9201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9202 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9203 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9204 goto normal;
9205 case CONVERT_EXPR:
9206 /* If we are narrowing the operand, we have to do the compare in the
9207 narrower mode. */
9208 if ((TYPE_PRECISION (TREE_TYPE (exp))
9209 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9210 goto normal;
9211 case NON_LVALUE_EXPR:
9212 case REFERENCE_EXPR:
9213 case ABS_EXPR:
9214 case NEGATE_EXPR:
9215 case LROTATE_EXPR:
9216 case RROTATE_EXPR:
9217 /* These cannot change zero->non-zero or vice versa. */
9218 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9219 break;
9220
9221 case WITH_RECORD_EXPR:
9222 /* Put the object on the placeholder list, recurse through our first
9223 operand, and pop the list. */
9224 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9225 placeholder_list);
9226 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9227 placeholder_list = TREE_CHAIN (placeholder_list);
9228 break;
9229
9230 #if 0
9231 /* This is never less insns than evaluating the PLUS_EXPR followed by
9232 a test and can be longer if the test is eliminated. */
9233 case PLUS_EXPR:
9234 /* Reduce to minus. */
9235 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9236 TREE_OPERAND (exp, 0),
9237 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9238 TREE_OPERAND (exp, 1))));
9239 /* Process as MINUS. */
9240 #endif
9241
9242 case MINUS_EXPR:
9243 /* Non-zero iff operands of minus differ. */
9244 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9245 TREE_OPERAND (exp, 0),
9246 TREE_OPERAND (exp, 1)),
9247 NE, NE, if_false_label, if_true_label);
9248 break;
9249
9250 case BIT_AND_EXPR:
9251 /* If we are AND'ing with a small constant, do this comparison in the
9252 smallest type that fits. If the machine doesn't have comparisons
9253 that small, it will be converted back to the wider comparison.
9254 This helps if we are testing the sign bit of a narrower object.
9255 combine can't do this for us because it can't know whether a
9256 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9257
9258 if (! SLOW_BYTE_ACCESS
9259 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9260 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9261 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9262 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9263 && (type = type_for_mode (mode, 1)) != 0
9264 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9265 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9266 != CODE_FOR_nothing))
9267 {
9268 do_jump (convert (type, exp), if_false_label, if_true_label);
9269 break;
9270 }
9271 goto normal;
9272
9273 case TRUTH_NOT_EXPR:
9274 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9275 break;
9276
9277 case TRUTH_ANDIF_EXPR:
9278 if (if_false_label == 0)
9279 if_false_label = drop_through_label = gen_label_rtx ();
9280 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9281 start_cleanup_deferral ();
9282 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9283 end_cleanup_deferral ();
9284 break;
9285
9286 case TRUTH_ORIF_EXPR:
9287 if (if_true_label == 0)
9288 if_true_label = drop_through_label = gen_label_rtx ();
9289 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9290 start_cleanup_deferral ();
9291 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9292 end_cleanup_deferral ();
9293 break;
9294
9295 case COMPOUND_EXPR:
9296 push_temp_slots ();
9297 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9298 preserve_temp_slots (NULL_RTX);
9299 free_temp_slots ();
9300 pop_temp_slots ();
9301 emit_queue ();
9302 do_pending_stack_adjust ();
9303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9304 break;
9305
9306 case COMPONENT_REF:
9307 case BIT_FIELD_REF:
9308 case ARRAY_REF:
9309 {
9310 HOST_WIDE_INT bitsize, bitpos;
9311 int unsignedp;
9312 enum machine_mode mode;
9313 tree type;
9314 tree offset;
9315 int volatilep = 0;
9316 unsigned int alignment;
9317
9318 /* Get description of this reference. We don't actually care
9319 about the underlying object here. */
9320 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9321 &mode, &unsignedp, &volatilep,
9322 &alignment);
9323
9324 type = type_for_size (bitsize, unsignedp);
9325 if (! SLOW_BYTE_ACCESS
9326 && type != 0 && bitsize >= 0
9327 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9328 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9329 != CODE_FOR_nothing))
9330 {
9331 do_jump (convert (type, exp), if_false_label, if_true_label);
9332 break;
9333 }
9334 goto normal;
9335 }
9336
9337 case COND_EXPR:
9338 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9339 if (integer_onep (TREE_OPERAND (exp, 1))
9340 && integer_zerop (TREE_OPERAND (exp, 2)))
9341 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9342
9343 else if (integer_zerop (TREE_OPERAND (exp, 1))
9344 && integer_onep (TREE_OPERAND (exp, 2)))
9345 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9346
9347 else
9348 {
9349 register rtx label1 = gen_label_rtx ();
9350 drop_through_label = gen_label_rtx ();
9351
9352 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9353
9354 start_cleanup_deferral ();
9355 /* Now the THEN-expression. */
9356 do_jump (TREE_OPERAND (exp, 1),
9357 if_false_label ? if_false_label : drop_through_label,
9358 if_true_label ? if_true_label : drop_through_label);
9359 /* In case the do_jump just above never jumps. */
9360 do_pending_stack_adjust ();
9361 emit_label (label1);
9362
9363 /* Now the ELSE-expression. */
9364 do_jump (TREE_OPERAND (exp, 2),
9365 if_false_label ? if_false_label : drop_through_label,
9366 if_true_label ? if_true_label : drop_through_label);
9367 end_cleanup_deferral ();
9368 }
9369 break;
9370
9371 case EQ_EXPR:
9372 {
9373 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9374
9375 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9376 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9377 {
9378 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9379 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9380 do_jump
9381 (fold
9382 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9383 fold (build (EQ_EXPR, TREE_TYPE (exp),
9384 fold (build1 (REALPART_EXPR,
9385 TREE_TYPE (inner_type),
9386 exp0)),
9387 fold (build1 (REALPART_EXPR,
9388 TREE_TYPE (inner_type),
9389 exp1)))),
9390 fold (build (EQ_EXPR, TREE_TYPE (exp),
9391 fold (build1 (IMAGPART_EXPR,
9392 TREE_TYPE (inner_type),
9393 exp0)),
9394 fold (build1 (IMAGPART_EXPR,
9395 TREE_TYPE (inner_type),
9396 exp1)))))),
9397 if_false_label, if_true_label);
9398 }
9399
9400 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9401 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9402
9403 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9404 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9405 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9406 else
9407 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9408 break;
9409 }
9410
9411 case NE_EXPR:
9412 {
9413 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9414
9415 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9416 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9417 {
9418 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9419 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9420 do_jump
9421 (fold
9422 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9423 fold (build (NE_EXPR, TREE_TYPE (exp),
9424 fold (build1 (REALPART_EXPR,
9425 TREE_TYPE (inner_type),
9426 exp0)),
9427 fold (build1 (REALPART_EXPR,
9428 TREE_TYPE (inner_type),
9429 exp1)))),
9430 fold (build (NE_EXPR, TREE_TYPE (exp),
9431 fold (build1 (IMAGPART_EXPR,
9432 TREE_TYPE (inner_type),
9433 exp0)),
9434 fold (build1 (IMAGPART_EXPR,
9435 TREE_TYPE (inner_type),
9436 exp1)))))),
9437 if_false_label, if_true_label);
9438 }
9439
9440 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9441 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9442
9443 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9444 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9445 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9446 else
9447 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9448 break;
9449 }
9450
9451 case LT_EXPR:
9452 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9453 if (GET_MODE_CLASS (mode) == MODE_INT
9454 && ! can_compare_p (LT, mode, ccp_jump))
9455 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9456 else
9457 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9458 break;
9459
9460 case LE_EXPR:
9461 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9462 if (GET_MODE_CLASS (mode) == MODE_INT
9463 && ! can_compare_p (LE, mode, ccp_jump))
9464 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9465 else
9466 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9467 break;
9468
9469 case GT_EXPR:
9470 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9471 if (GET_MODE_CLASS (mode) == MODE_INT
9472 && ! can_compare_p (GT, mode, ccp_jump))
9473 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9474 else
9475 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9476 break;
9477
9478 case GE_EXPR:
9479 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9480 if (GET_MODE_CLASS (mode) == MODE_INT
9481 && ! can_compare_p (GE, mode, ccp_jump))
9482 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9483 else
9484 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9485 break;
9486
9487 case UNORDERED_EXPR:
9488 case ORDERED_EXPR:
9489 {
9490 enum rtx_code cmp, rcmp;
9491 int do_rev;
9492
9493 if (code == UNORDERED_EXPR)
9494 cmp = UNORDERED, rcmp = ORDERED;
9495 else
9496 cmp = ORDERED, rcmp = UNORDERED;
9497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9498
9499 do_rev = 0;
9500 if (! can_compare_p (cmp, mode, ccp_jump)
9501 && (can_compare_p (rcmp, mode, ccp_jump)
9502 /* If the target doesn't provide either UNORDERED or ORDERED
9503 comparisons, canonicalize on UNORDERED for the library. */
9504 || rcmp == UNORDERED))
9505 do_rev = 1;
9506
9507 if (! do_rev)
9508 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9509 else
9510 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9511 }
9512 break;
9513
9514 {
9515 enum rtx_code rcode1;
9516 enum tree_code tcode2;
9517
9518 case UNLT_EXPR:
9519 rcode1 = UNLT;
9520 tcode2 = LT_EXPR;
9521 goto unordered_bcc;
9522 case UNLE_EXPR:
9523 rcode1 = UNLE;
9524 tcode2 = LE_EXPR;
9525 goto unordered_bcc;
9526 case UNGT_EXPR:
9527 rcode1 = UNGT;
9528 tcode2 = GT_EXPR;
9529 goto unordered_bcc;
9530 case UNGE_EXPR:
9531 rcode1 = UNGE;
9532 tcode2 = GE_EXPR;
9533 goto unordered_bcc;
9534 case UNEQ_EXPR:
9535 rcode1 = UNEQ;
9536 tcode2 = EQ_EXPR;
9537 goto unordered_bcc;
9538
9539 unordered_bcc:
9540 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9541 if (can_compare_p (rcode1, mode, ccp_jump))
9542 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9543 if_true_label);
9544 else
9545 {
9546 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9547 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9548 tree cmp0, cmp1;
9549
9550 /* If the target doesn't support combined unordered
9551 compares, decompose into UNORDERED + comparison. */
9552 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9553 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9554 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9555 do_jump (exp, if_false_label, if_true_label);
9556 }
9557 }
9558 break;
9559
9560 default:
9561 normal:
9562 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9563 #if 0
9564 /* This is not needed any more and causes poor code since it causes
9565 comparisons and tests from non-SI objects to have different code
9566 sequences. */
9567 /* Copy to register to avoid generating bad insns by cse
9568 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9569 if (!cse_not_expected && GET_CODE (temp) == MEM)
9570 temp = copy_to_reg (temp);
9571 #endif
9572 do_pending_stack_adjust ();
9573 /* Do any postincrements in the expression that was tested. */
9574 emit_queue ();
9575
9576 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9577 {
9578 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9579 if (target)
9580 emit_jump (target);
9581 }
9582 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9583 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9584 /* Note swapping the labels gives us not-equal. */
9585 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9586 else if (GET_MODE (temp) != VOIDmode)
9587 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9588 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9589 GET_MODE (temp), NULL_RTX, 0,
9590 if_false_label, if_true_label);
9591 else
9592 abort ();
9593 }
9594
9595 if (drop_through_label)
9596 {
9597 /* If do_jump produces code that might be jumped around,
9598 do any stack adjusts from that code, before the place
9599 where control merges in. */
9600 do_pending_stack_adjust ();
9601 emit_label (drop_through_label);
9602 }
9603 }
9604 \f
9605 /* Given a comparison expression EXP for values too wide to be compared
9606 with one insn, test the comparison and jump to the appropriate label.
9607 The code of EXP is ignored; we always test GT if SWAP is 0,
9608 and LT if SWAP is 1. */
9609
9610 static void
9611 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9612 tree exp;
9613 int swap;
9614 rtx if_false_label, if_true_label;
9615 {
9616 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9617 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9618 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9619 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9620
9621 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9622 }
9623
9624 /* Compare OP0 with OP1, word at a time, in mode MODE.
9625 UNSIGNEDP says to do unsigned comparison.
9626 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9627
9628 void
9629 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9630 enum machine_mode mode;
9631 int unsignedp;
9632 rtx op0, op1;
9633 rtx if_false_label, if_true_label;
9634 {
9635 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9636 rtx drop_through_label = 0;
9637 int i;
9638
9639 if (! if_true_label || ! if_false_label)
9640 drop_through_label = gen_label_rtx ();
9641 if (! if_true_label)
9642 if_true_label = drop_through_label;
9643 if (! if_false_label)
9644 if_false_label = drop_through_label;
9645
9646 /* Compare a word at a time, high order first. */
9647 for (i = 0; i < nwords; i++)
9648 {
9649 rtx op0_word, op1_word;
9650
9651 if (WORDS_BIG_ENDIAN)
9652 {
9653 op0_word = operand_subword_force (op0, i, mode);
9654 op1_word = operand_subword_force (op1, i, mode);
9655 }
9656 else
9657 {
9658 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9659 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9660 }
9661
9662 /* All but high-order word must be compared as unsigned. */
9663 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9664 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9665 NULL_RTX, if_true_label);
9666
9667 /* Consider lower words only if these are equal. */
9668 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9669 NULL_RTX, 0, NULL_RTX, if_false_label);
9670 }
9671
9672 if (if_false_label)
9673 emit_jump (if_false_label);
9674 if (drop_through_label)
9675 emit_label (drop_through_label);
9676 }
9677
9678 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9679 with one insn, test the comparison and jump to the appropriate label. */
9680
9681 static void
9682 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9683 tree exp;
9684 rtx if_false_label, if_true_label;
9685 {
9686 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9687 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9688 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9689 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9690 int i;
9691 rtx drop_through_label = 0;
9692
9693 if (! if_false_label)
9694 drop_through_label = if_false_label = gen_label_rtx ();
9695
9696 for (i = 0; i < nwords; i++)
9697 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9698 operand_subword_force (op1, i, mode),
9699 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9700 word_mode, NULL_RTX, 0, if_false_label,
9701 NULL_RTX);
9702
9703 if (if_true_label)
9704 emit_jump (if_true_label);
9705 if (drop_through_label)
9706 emit_label (drop_through_label);
9707 }
9708 \f
9709 /* Jump according to whether OP0 is 0.
9710 We assume that OP0 has an integer mode that is too wide
9711 for the available compare insns. */
9712
9713 void
9714 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9715 rtx op0;
9716 rtx if_false_label, if_true_label;
9717 {
9718 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9719 rtx part;
9720 int i;
9721 rtx drop_through_label = 0;
9722
9723 /* The fastest way of doing this comparison on almost any machine is to
9724 "or" all the words and compare the result. If all have to be loaded
9725 from memory and this is a very wide item, it's possible this may
9726 be slower, but that's highly unlikely. */
9727
9728 part = gen_reg_rtx (word_mode);
9729 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9730 for (i = 1; i < nwords && part != 0; i++)
9731 part = expand_binop (word_mode, ior_optab, part,
9732 operand_subword_force (op0, i, GET_MODE (op0)),
9733 part, 1, OPTAB_WIDEN);
9734
9735 if (part != 0)
9736 {
9737 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9738 NULL_RTX, 0, if_false_label, if_true_label);
9739
9740 return;
9741 }
9742
9743 /* If we couldn't do the "or" simply, do this with a series of compares. */
9744 if (! if_false_label)
9745 drop_through_label = if_false_label = gen_label_rtx ();
9746
9747 for (i = 0; i < nwords; i++)
9748 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9749 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9750 if_false_label, NULL_RTX);
9751
9752 if (if_true_label)
9753 emit_jump (if_true_label);
9754
9755 if (drop_through_label)
9756 emit_label (drop_through_label);
9757 }
9758 \f
9759 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9760 (including code to compute the values to be compared)
9761 and set (CC0) according to the result.
9762 The decision as to signed or unsigned comparison must be made by the caller.
9763
9764 We force a stack adjustment unless there are currently
9765 things pushed on the stack that aren't yet used.
9766
9767 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9768 compared.
9769
9770 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9771 size of MODE should be used. */
9772
9773 rtx
9774 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9775 register rtx op0, op1;
9776 enum rtx_code code;
9777 int unsignedp;
9778 enum machine_mode mode;
9779 rtx size;
9780 unsigned int align;
9781 {
9782 rtx tem;
9783
9784 /* If one operand is constant, make it the second one. Only do this
9785 if the other operand is not constant as well. */
9786
9787 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9788 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9789 {
9790 tem = op0;
9791 op0 = op1;
9792 op1 = tem;
9793 code = swap_condition (code);
9794 }
9795
9796 if (flag_force_mem)
9797 {
9798 op0 = force_not_mem (op0);
9799 op1 = force_not_mem (op1);
9800 }
9801
9802 do_pending_stack_adjust ();
9803
9804 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9805 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9806 return tem;
9807
9808 #if 0
9809 /* There's no need to do this now that combine.c can eliminate lots of
9810 sign extensions. This can be less efficient in certain cases on other
9811 machines. */
9812
9813 /* If this is a signed equality comparison, we can do it as an
9814 unsigned comparison since zero-extension is cheaper than sign
9815 extension and comparisons with zero are done as unsigned. This is
9816 the case even on machines that can do fast sign extension, since
9817 zero-extension is easier to combine with other operations than
9818 sign-extension is. If we are comparing against a constant, we must
9819 convert it to what it would look like unsigned. */
9820 if ((code == EQ || code == NE) && ! unsignedp
9821 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9822 {
9823 if (GET_CODE (op1) == CONST_INT
9824 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9825 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9826 unsignedp = 1;
9827 }
9828 #endif
9829
9830 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9831
9832 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9833 }
9834
9835 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9836 The decision as to signed or unsigned comparison must be made by the caller.
9837
9838 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9839 compared.
9840
9841 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9842 size of MODE should be used. */
9843
9844 void
9845 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9846 if_false_label, if_true_label)
9847 register rtx op0, op1;
9848 enum rtx_code code;
9849 int unsignedp;
9850 enum machine_mode mode;
9851 rtx size;
9852 unsigned int align;
9853 rtx if_false_label, if_true_label;
9854 {
9855 rtx tem;
9856 int dummy_true_label = 0;
9857
9858 /* Reverse the comparison if that is safe and we want to jump if it is
9859 false. */
9860 if (! if_true_label && ! FLOAT_MODE_P (mode))
9861 {
9862 if_true_label = if_false_label;
9863 if_false_label = 0;
9864 code = reverse_condition (code);
9865 }
9866
9867 /* If one operand is constant, make it the second one. Only do this
9868 if the other operand is not constant as well. */
9869
9870 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9871 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9872 {
9873 tem = op0;
9874 op0 = op1;
9875 op1 = tem;
9876 code = swap_condition (code);
9877 }
9878
9879 if (flag_force_mem)
9880 {
9881 op0 = force_not_mem (op0);
9882 op1 = force_not_mem (op1);
9883 }
9884
9885 do_pending_stack_adjust ();
9886
9887 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9888 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9889 {
9890 if (tem == const_true_rtx)
9891 {
9892 if (if_true_label)
9893 emit_jump (if_true_label);
9894 }
9895 else
9896 {
9897 if (if_false_label)
9898 emit_jump (if_false_label);
9899 }
9900 return;
9901 }
9902
9903 #if 0
9904 /* There's no need to do this now that combine.c can eliminate lots of
9905 sign extensions. This can be less efficient in certain cases on other
9906 machines. */
9907
9908 /* If this is a signed equality comparison, we can do it as an
9909 unsigned comparison since zero-extension is cheaper than sign
9910 extension and comparisons with zero are done as unsigned. This is
9911 the case even on machines that can do fast sign extension, since
9912 zero-extension is easier to combine with other operations than
9913 sign-extension is. If we are comparing against a constant, we must
9914 convert it to what it would look like unsigned. */
9915 if ((code == EQ || code == NE) && ! unsignedp
9916 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9917 {
9918 if (GET_CODE (op1) == CONST_INT
9919 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9920 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9921 unsignedp = 1;
9922 }
9923 #endif
9924
9925 if (! if_true_label)
9926 {
9927 dummy_true_label = 1;
9928 if_true_label = gen_label_rtx ();
9929 }
9930
9931 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9932 if_true_label);
9933
9934 if (if_false_label)
9935 emit_jump (if_false_label);
9936 if (dummy_true_label)
9937 emit_label (if_true_label);
9938 }
9939
9940 /* Generate code for a comparison expression EXP (including code to compute
9941 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9942 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9943 generated code will drop through.
9944 SIGNED_CODE should be the rtx operation for this comparison for
9945 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9946
9947 We force a stack adjustment unless there are currently
9948 things pushed on the stack that aren't yet used. */
9949
9950 static void
9951 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9952 if_true_label)
9953 register tree exp;
9954 enum rtx_code signed_code, unsigned_code;
9955 rtx if_false_label, if_true_label;
9956 {
9957 unsigned int align0, align1;
9958 register rtx op0, op1;
9959 register tree type;
9960 register enum machine_mode mode;
9961 int unsignedp;
9962 enum rtx_code code;
9963
9964 /* Don't crash if the comparison was erroneous. */
9965 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9966 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9967 return;
9968
9969 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9970 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9971 mode = TYPE_MODE (type);
9972 unsignedp = TREE_UNSIGNED (type);
9973 code = unsignedp ? unsigned_code : signed_code;
9974
9975 #ifdef HAVE_canonicalize_funcptr_for_compare
9976 /* If function pointers need to be "canonicalized" before they can
9977 be reliably compared, then canonicalize them. */
9978 if (HAVE_canonicalize_funcptr_for_compare
9979 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9980 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9981 == FUNCTION_TYPE))
9982 {
9983 rtx new_op0 = gen_reg_rtx (mode);
9984
9985 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9986 op0 = new_op0;
9987 }
9988
9989 if (HAVE_canonicalize_funcptr_for_compare
9990 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9991 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9992 == FUNCTION_TYPE))
9993 {
9994 rtx new_op1 = gen_reg_rtx (mode);
9995
9996 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9997 op1 = new_op1;
9998 }
9999 #endif
10000
10001 /* Do any postincrements in the expression that was tested. */
10002 emit_queue ();
10003
10004 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10005 ((mode == BLKmode)
10006 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10007 MIN (align0, align1) / BITS_PER_UNIT,
10008 if_false_label, if_true_label);
10009 }
10010 \f
10011 /* Generate code to calculate EXP using a store-flag instruction
10012 and return an rtx for the result. EXP is either a comparison
10013 or a TRUTH_NOT_EXPR whose operand is a comparison.
10014
10015 If TARGET is nonzero, store the result there if convenient.
10016
10017 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10018 cheap.
10019
10020 Return zero if there is no suitable set-flag instruction
10021 available on this machine.
10022
10023 Once expand_expr has been called on the arguments of the comparison,
10024 we are committed to doing the store flag, since it is not safe to
10025 re-evaluate the expression. We emit the store-flag insn by calling
10026 emit_store_flag, but only expand the arguments if we have a reason
10027 to believe that emit_store_flag will be successful. If we think that
10028 it will, but it isn't, we have to simulate the store-flag with a
10029 set/jump/set sequence. */
10030
10031 static rtx
10032 do_store_flag (exp, target, mode, only_cheap)
10033 tree exp;
10034 rtx target;
10035 enum machine_mode mode;
10036 int only_cheap;
10037 {
10038 enum rtx_code code;
10039 tree arg0, arg1, type;
10040 tree tem;
10041 enum machine_mode operand_mode;
10042 int invert = 0;
10043 int unsignedp;
10044 rtx op0, op1;
10045 enum insn_code icode;
10046 rtx subtarget = target;
10047 rtx result, label;
10048
10049 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10050 result at the end. We can't simply invert the test since it would
10051 have already been inverted if it were valid. This case occurs for
10052 some floating-point comparisons. */
10053
10054 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10055 invert = 1, exp = TREE_OPERAND (exp, 0);
10056
10057 arg0 = TREE_OPERAND (exp, 0);
10058 arg1 = TREE_OPERAND (exp, 1);
10059 type = TREE_TYPE (arg0);
10060 operand_mode = TYPE_MODE (type);
10061 unsignedp = TREE_UNSIGNED (type);
10062
10063 /* We won't bother with BLKmode store-flag operations because it would mean
10064 passing a lot of information to emit_store_flag. */
10065 if (operand_mode == BLKmode)
10066 return 0;
10067
10068 /* We won't bother with store-flag operations involving function pointers
10069 when function pointers must be canonicalized before comparisons. */
10070 #ifdef HAVE_canonicalize_funcptr_for_compare
10071 if (HAVE_canonicalize_funcptr_for_compare
10072 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10073 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10074 == FUNCTION_TYPE))
10075 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10076 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10077 == FUNCTION_TYPE))))
10078 return 0;
10079 #endif
10080
10081 STRIP_NOPS (arg0);
10082 STRIP_NOPS (arg1);
10083
10084 /* Get the rtx comparison code to use. We know that EXP is a comparison
10085 operation of some type. Some comparisons against 1 and -1 can be
10086 converted to comparisons with zero. Do so here so that the tests
10087 below will be aware that we have a comparison with zero. These
10088 tests will not catch constants in the first operand, but constants
10089 are rarely passed as the first operand. */
10090
10091 switch (TREE_CODE (exp))
10092 {
10093 case EQ_EXPR:
10094 code = EQ;
10095 break;
10096 case NE_EXPR:
10097 code = NE;
10098 break;
10099 case LT_EXPR:
10100 if (integer_onep (arg1))
10101 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10102 else
10103 code = unsignedp ? LTU : LT;
10104 break;
10105 case LE_EXPR:
10106 if (! unsignedp && integer_all_onesp (arg1))
10107 arg1 = integer_zero_node, code = LT;
10108 else
10109 code = unsignedp ? LEU : LE;
10110 break;
10111 case GT_EXPR:
10112 if (! unsignedp && integer_all_onesp (arg1))
10113 arg1 = integer_zero_node, code = GE;
10114 else
10115 code = unsignedp ? GTU : GT;
10116 break;
10117 case GE_EXPR:
10118 if (integer_onep (arg1))
10119 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10120 else
10121 code = unsignedp ? GEU : GE;
10122 break;
10123
10124 case UNORDERED_EXPR:
10125 code = UNORDERED;
10126 break;
10127 case ORDERED_EXPR:
10128 code = ORDERED;
10129 break;
10130 case UNLT_EXPR:
10131 code = UNLT;
10132 break;
10133 case UNLE_EXPR:
10134 code = UNLE;
10135 break;
10136 case UNGT_EXPR:
10137 code = UNGT;
10138 break;
10139 case UNGE_EXPR:
10140 code = UNGE;
10141 break;
10142 case UNEQ_EXPR:
10143 code = UNEQ;
10144 break;
10145
10146 default:
10147 abort ();
10148 }
10149
10150 /* Put a constant second. */
10151 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10152 {
10153 tem = arg0; arg0 = arg1; arg1 = tem;
10154 code = swap_condition (code);
10155 }
10156
10157 /* If this is an equality or inequality test of a single bit, we can
10158 do this by shifting the bit being tested to the low-order bit and
10159 masking the result with the constant 1. If the condition was EQ,
10160 we xor it with 1. This does not require an scc insn and is faster
10161 than an scc insn even if we have it. */
10162
10163 if ((code == NE || code == EQ)
10164 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10165 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10166 {
10167 tree inner = TREE_OPERAND (arg0, 0);
10168 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10169 int ops_unsignedp;
10170
10171 /* If INNER is a right shift of a constant and it plus BITNUM does
10172 not overflow, adjust BITNUM and INNER. */
10173
10174 if (TREE_CODE (inner) == RSHIFT_EXPR
10175 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10176 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10177 && bitnum < TYPE_PRECISION (type)
10178 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10179 bitnum - TYPE_PRECISION (type)))
10180 {
10181 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10182 inner = TREE_OPERAND (inner, 0);
10183 }
10184
10185 /* If we are going to be able to omit the AND below, we must do our
10186 operations as unsigned. If we must use the AND, we have a choice.
10187 Normally unsigned is faster, but for some machines signed is. */
10188 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10189 #ifdef LOAD_EXTEND_OP
10190 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10191 #else
10192 : 1
10193 #endif
10194 );
10195
10196 if (subtarget == 0 || GET_CODE (subtarget) != REG
10197 || GET_MODE (subtarget) != operand_mode
10198 || ! safe_from_p (subtarget, inner, 1))
10199 subtarget = 0;
10200
10201 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10202
10203 if (bitnum != 0)
10204 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10205 size_int (bitnum), subtarget, ops_unsignedp);
10206
10207 if (GET_MODE (op0) != mode)
10208 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10209
10210 if ((code == EQ && ! invert) || (code == NE && invert))
10211 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10212 ops_unsignedp, OPTAB_LIB_WIDEN);
10213
10214 /* Put the AND last so it can combine with more things. */
10215 if (bitnum != TYPE_PRECISION (type) - 1)
10216 op0 = expand_and (op0, const1_rtx, subtarget);
10217
10218 return op0;
10219 }
10220
10221 /* Now see if we are likely to be able to do this. Return if not. */
10222 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10223 return 0;
10224
10225 icode = setcc_gen_code[(int) code];
10226 if (icode == CODE_FOR_nothing
10227 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10228 {
10229 /* We can only do this if it is one of the special cases that
10230 can be handled without an scc insn. */
10231 if ((code == LT && integer_zerop (arg1))
10232 || (! only_cheap && code == GE && integer_zerop (arg1)))
10233 ;
10234 else if (BRANCH_COST >= 0
10235 && ! only_cheap && (code == NE || code == EQ)
10236 && TREE_CODE (type) != REAL_TYPE
10237 && ((abs_optab->handlers[(int) operand_mode].insn_code
10238 != CODE_FOR_nothing)
10239 || (ffs_optab->handlers[(int) operand_mode].insn_code
10240 != CODE_FOR_nothing)))
10241 ;
10242 else
10243 return 0;
10244 }
10245
10246 preexpand_calls (exp);
10247 if (subtarget == 0 || GET_CODE (subtarget) != REG
10248 || GET_MODE (subtarget) != operand_mode
10249 || ! safe_from_p (subtarget, arg1, 1))
10250 subtarget = 0;
10251
10252 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10253 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10254
10255 if (target == 0)
10256 target = gen_reg_rtx (mode);
10257
10258 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10259 because, if the emit_store_flag does anything it will succeed and
10260 OP0 and OP1 will not be used subsequently. */
10261
10262 result = emit_store_flag (target, code,
10263 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10264 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10265 operand_mode, unsignedp, 1);
10266
10267 if (result)
10268 {
10269 if (invert)
10270 result = expand_binop (mode, xor_optab, result, const1_rtx,
10271 result, 0, OPTAB_LIB_WIDEN);
10272 return result;
10273 }
10274
10275 /* If this failed, we have to do this with set/compare/jump/set code. */
10276 if (GET_CODE (target) != REG
10277 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10278 target = gen_reg_rtx (GET_MODE (target));
10279
10280 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10281 result = compare_from_rtx (op0, op1, code, unsignedp,
10282 operand_mode, NULL_RTX, 0);
10283 if (GET_CODE (result) == CONST_INT)
10284 return (((result == const0_rtx && ! invert)
10285 || (result != const0_rtx && invert))
10286 ? const0_rtx : const1_rtx);
10287
10288 label = gen_label_rtx ();
10289 if (bcc_gen_fctn[(int) code] == 0)
10290 abort ();
10291
10292 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10293 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10294 emit_label (label);
10295
10296 return target;
10297 }
10298 \f
10299 /* Generate a tablejump instruction (used for switch statements). */
10300
10301 #ifdef HAVE_tablejump
10302
10303 /* INDEX is the value being switched on, with the lowest value
10304 in the table already subtracted.
10305 MODE is its expected mode (needed if INDEX is constant).
10306 RANGE is the length of the jump table.
10307 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10308
10309 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10310 index value is out of range. */
10311
10312 void
10313 do_tablejump (index, mode, range, table_label, default_label)
10314 rtx index, range, table_label, default_label;
10315 enum machine_mode mode;
10316 {
10317 register rtx temp, vector;
10318
10319 /* Do an unsigned comparison (in the proper mode) between the index
10320 expression and the value which represents the length of the range.
10321 Since we just finished subtracting the lower bound of the range
10322 from the index expression, this comparison allows us to simultaneously
10323 check that the original index expression value is both greater than
10324 or equal to the minimum value of the range and less than or equal to
10325 the maximum value of the range. */
10326
10327 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10328 0, default_label);
10329
10330 /* If index is in range, it must fit in Pmode.
10331 Convert to Pmode so we can index with it. */
10332 if (mode != Pmode)
10333 index = convert_to_mode (Pmode, index, 1);
10334
10335 /* Don't let a MEM slip thru, because then INDEX that comes
10336 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10337 and break_out_memory_refs will go to work on it and mess it up. */
10338 #ifdef PIC_CASE_VECTOR_ADDRESS
10339 if (flag_pic && GET_CODE (index) != REG)
10340 index = copy_to_mode_reg (Pmode, index);
10341 #endif
10342
10343 /* If flag_force_addr were to affect this address
10344 it could interfere with the tricky assumptions made
10345 about addresses that contain label-refs,
10346 which may be valid only very near the tablejump itself. */
10347 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10348 GET_MODE_SIZE, because this indicates how large insns are. The other
10349 uses should all be Pmode, because they are addresses. This code
10350 could fail if addresses and insns are not the same size. */
10351 index = gen_rtx_PLUS (Pmode,
10352 gen_rtx_MULT (Pmode, index,
10353 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10354 gen_rtx_LABEL_REF (Pmode, table_label));
10355 #ifdef PIC_CASE_VECTOR_ADDRESS
10356 if (flag_pic)
10357 index = PIC_CASE_VECTOR_ADDRESS (index);
10358 else
10359 #endif
10360 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10361 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10362 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10363 RTX_UNCHANGING_P (vector) = 1;
10364 convert_move (temp, vector, 0);
10365
10366 emit_jump_insn (gen_tablejump (temp, table_label));
10367
10368 /* If we are generating PIC code or if the table is PC-relative, the
10369 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10370 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10371 emit_barrier ();
10372 }
10373
10374 #endif /* HAVE_tablejump */