Index: ChangeLog
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
111 };
112
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
115
116 struct store_by_pieces
117 {
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
125 PTR constfundata;
126 int reverse;
127 };
128
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
136 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
137 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
138 static tree emit_block_move_libcall_fn PARAMS ((int));
139 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
150 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
151 static tree clear_storage_libcall_fn PARAMS ((int));
152 static rtx compress_float_constant PARAMS ((rtx, rtx));
153 static rtx get_subtarget PARAMS ((rtx));
154 static int is_zeros_p PARAMS ((tree));
155 static int mostly_zeros_p PARAMS ((tree));
156 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
157 HOST_WIDE_INT, enum machine_mode,
158 tree, tree, int, int));
159 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int, tree,
163 int));
164 static rtx var_rtx PARAMS ((tree));
165 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
167 static int is_aligning_offset PARAMS ((tree, tree));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 rtx, rtx));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
185
186 /* Record for each mode whether we can float-extend from memory. */
187
188 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
189
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
192
193 #ifndef MOVE_RATIO
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 #define MOVE_RATIO 2
196 #else
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 #endif
200 #endif
201
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 #endif
208
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
211
212 #ifndef CLEAR_RATIO
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
215 #else
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 #endif
219 #endif
220
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 #endif
227
228 /* This array records the insn_code of insns to perform block moves. */
229 enum insn_code movstr_optab[NUM_MACHINE_MODES];
230
231 /* This array records the insn_code of insns to perform block clears. */
232 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
233
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
239 \f
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
242
243 void
244 init_expr_once ()
245 {
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
251
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
261
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
270
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
275
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
286
287 REGNO (reg) = regno;
288
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
293
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
308 }
309 }
310
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
315 {
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 {
320 enum insn_code ic;
321
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
325
326 PUT_MODE (mem, srcmode);
327
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
330 }
331 }
332 }
333
334 /* This is run at the start of compiling a function. */
335
336 void
337 init_expr ()
338 {
339 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
340
341 pending_chain = 0;
342 pending_stack_adjust = 0;
343 stack_pointer_delta = 0;
344 inhibit_defer_pop = 0;
345 saveregs_value = 0;
346 apply_args_value = 0;
347 forced_labels = 0;
348 }
349
350 /* Small sanity check that the queue is empty at the end of a function. */
351
352 void
353 finish_expr_for_function ()
354 {
355 if (pending_chain)
356 abort ();
357 }
358 \f
359 /* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
361
362 /* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
365
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
368
369 static rtx
370 enqueue_insn (var, body)
371 rtx var, body;
372 {
373 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
374 body, pending_chain);
375 return pending_chain;
376 }
377
378 /* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
384
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
388
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
392
393 rtx
394 protect_from_queue (x, modify)
395 rtx x;
396 int modify;
397 {
398 RTX_CODE code = GET_CODE (x);
399
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404 #endif
405
406 if (code != QUEUED)
407 {
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
415 {
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
418
419 if (QUEUED_INSN (y))
420 {
421 rtx temp = gen_reg_rtx (GET_MODE (x));
422
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
426 }
427
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
431 }
432
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
436 {
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
439 {
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
442 }
443 }
444 else if (code == PLUS || code == MULT)
445 {
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
449 {
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
453 }
454 }
455 return x;
456 }
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
472 }
473
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
478
479 int
480 queued_subexp_p (x)
481 rtx x;
482 {
483 enum rtx_code code = GET_CODE (x);
484 switch (code)
485 {
486 case QUEUED:
487 return 1;
488 case MEM:
489 return queued_subexp_p (XEXP (x, 0));
490 case MULT:
491 case PLUS:
492 case MINUS:
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
495 default:
496 return 0;
497 }
498 }
499
500 /* Perform all the pending incrementations. */
501
502 void
503 emit_queue ()
504 {
505 rtx p;
506 while ((p = pending_chain))
507 {
508 rtx body = QUEUED_BODY (p);
509
510 switch (GET_CODE (body))
511 {
512 case INSN:
513 case JUMP_INSN:
514 case CALL_INSN:
515 case CODE_LABEL:
516 case BARRIER:
517 case NOTE:
518 QUEUED_INSN (p) = body;
519 emit_insn (body);
520 break;
521
522 #ifdef ENABLE_CHECKING
523 case SEQUENCE:
524 abort ();
525 break;
526 #endif
527
528 default:
529 QUEUED_INSN (p) = emit_insn (body);
530 break;
531 }
532
533 pending_chain = QUEUED_NEXT (p);
534 }
535 }
536 \f
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
541
542 void
543 convert_move (to, from, unsignedp)
544 rtx to, from;
545 int unsignedp;
546 {
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
551 enum insn_code code;
552 rtx libcall;
553
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
556 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
557
558 to = protect_from_queue (to, 1);
559 from = protect_from_queue (from, 0);
560
561 if (to_real != from_real)
562 abort ();
563
564 /* If FROM is a SUBREG that indicates that we have already done at least
565 the required extension, strip it. We don't handle such SUBREGs as
566 TO here. */
567
568 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
569 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
570 >= GET_MODE_SIZE (to_mode))
571 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
572 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573
574 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 abort ();
576
577 if (to_mode == from_mode
578 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 {
580 emit_move_insn (to, from);
581 return;
582 }
583
584 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
585 {
586 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
587 abort ();
588
589 if (VECTOR_MODE_P (to_mode))
590 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
591 else
592 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
593
594 emit_move_insn (to, from);
595 return;
596 }
597
598 if (to_real != from_real)
599 abort ();
600
601 if (to_real)
602 {
603 rtx value, insns;
604
605 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
606 {
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, 0))
609 != CODE_FOR_nothing)
610 {
611 emit_unop_insn (code, to, from, UNKNOWN);
612 return;
613 }
614 }
615
616 #ifdef HAVE_trunchfqf2
617 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
618 {
619 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
620 return;
621 }
622 #endif
623 #ifdef HAVE_trunctqfqf2
624 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncsfqf2
631 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncdfqf2
638 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncxfqf2
645 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_trunctfqf2
652 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
653 {
654 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658
659 #ifdef HAVE_trunctqfhf2
660 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
661 {
662 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncsfhf2
667 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncdfhf2
674 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfhf2
681 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfhf2
688 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694
695 #ifdef HAVE_truncsftqf2
696 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
697 {
698 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
699 return;
700 }
701 #endif
702 #ifdef HAVE_truncdftqf2
703 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_truncxftqf2
710 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716 #ifdef HAVE_trunctftqf2
717 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
718 {
719 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
720 return;
721 }
722 #endif
723
724 #ifdef HAVE_truncdfsf2
725 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
726 {
727 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
728 return;
729 }
730 #endif
731 #ifdef HAVE_truncxfsf2
732 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
733 {
734 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
735 return;
736 }
737 #endif
738 #ifdef HAVE_trunctfsf2
739 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
740 {
741 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
742 return;
743 }
744 #endif
745 #ifdef HAVE_truncxfdf2
746 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
747 {
748 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
749 return;
750 }
751 #endif
752 #ifdef HAVE_trunctfdf2
753 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
754 {
755 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
756 return;
757 }
758 #endif
759
760 libcall = (rtx) 0;
761 switch (from_mode)
762 {
763 case SFmode:
764 switch (to_mode)
765 {
766 case DFmode:
767 libcall = extendsfdf2_libfunc;
768 break;
769
770 case XFmode:
771 libcall = extendsfxf2_libfunc;
772 break;
773
774 case TFmode:
775 libcall = extendsftf2_libfunc;
776 break;
777
778 default:
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797
798 default:
799 break;
800 }
801 break;
802
803 case XFmode:
804 switch (to_mode)
805 {
806 case SFmode:
807 libcall = truncxfsf2_libfunc;
808 break;
809
810 case DFmode:
811 libcall = truncxfdf2_libfunc;
812 break;
813
814 default:
815 break;
816 }
817 break;
818
819 case TFmode:
820 switch (to_mode)
821 {
822 case SFmode:
823 libcall = trunctfsf2_libfunc;
824 break;
825
826 case DFmode:
827 libcall = trunctfdf2_libfunc;
828 break;
829
830 default:
831 break;
832 }
833 break;
834
835 default:
836 break;
837 }
838
839 if (libcall == (rtx) 0)
840 /* This conversion is not implemented yet. */
841 abort ();
842
843 start_sequence ();
844 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
845 1, from, from_mode);
846 insns = get_insns ();
847 end_sequence ();
848 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
849 from));
850 return;
851 }
852
853 /* Now both modes are integers. */
854
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
858 {
859 rtx insns;
860 rtx lowpart;
861 rtx fill_value;
862 rtx lowfrom;
863 int i;
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
866
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
869 != CODE_FOR_nothing)
870 {
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
878 return;
879 }
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
884 {
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
890 return;
891 }
892
893 /* No special multiword conversion insn; do it by hand. */
894 start_sequence ();
895
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
898
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
901
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
905 else
906 lowpart_mode = from_mode;
907
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
909
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
912
913 /* Compute the value to put in each remaining word. */
914 if (unsignedp)
915 fill_value = const0_rtx;
916 else
917 {
918 #ifdef HAVE_slt
919 if (HAVE_slt
920 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
921 && STORE_FLAG_VALUE == -1)
922 {
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
924 lowpart_mode, 0);
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
927 }
928 else
929 #endif
930 {
931 fill_value
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
934 NULL_RTX, 0);
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
936 }
937 }
938
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
941 {
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
944
945 if (subword == 0)
946 abort ();
947
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
950 }
951
952 insns = get_insns ();
953 end_sequence ();
954
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
957 return;
958 }
959
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
963 {
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
972 return;
973 }
974
975 /* Handle pointer conversion. */ /* SPEE 900220. */
976 if (to_mode == PQImode)
977 {
978 if (from_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
980
981 #ifdef HAVE_truncqipqi2
982 if (HAVE_truncqipqi2)
983 {
984 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
985 return;
986 }
987 #endif /* HAVE_truncqipqi2 */
988 abort ();
989 }
990
991 if (from_mode == PQImode)
992 {
993 if (to_mode != QImode)
994 {
995 from = convert_to_mode (QImode, from, unsignedp);
996 from_mode = QImode;
997 }
998 else
999 {
1000 #ifdef HAVE_extendpqiqi2
1001 if (HAVE_extendpqiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_extendpqiqi2 */
1007 abort ();
1008 }
1009 }
1010
1011 if (to_mode == PSImode)
1012 {
1013 if (from_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1015
1016 #ifdef HAVE_truncsipsi2
1017 if (HAVE_truncsipsi2)
1018 {
1019 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1020 return;
1021 }
1022 #endif /* HAVE_truncsipsi2 */
1023 abort ();
1024 }
1025
1026 if (from_mode == PSImode)
1027 {
1028 if (to_mode != SImode)
1029 {
1030 from = convert_to_mode (SImode, from, unsignedp);
1031 from_mode = SImode;
1032 }
1033 else
1034 {
1035 #ifdef HAVE_extendpsisi2
1036 if (! unsignedp && HAVE_extendpsisi2)
1037 {
1038 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1039 return;
1040 }
1041 #endif /* HAVE_extendpsisi2 */
1042 #ifdef HAVE_zero_extendpsisi2
1043 if (unsignedp && HAVE_zero_extendpsisi2)
1044 {
1045 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1046 return;
1047 }
1048 #endif /* HAVE_zero_extendpsisi2 */
1049 abort ();
1050 }
1051 }
1052
1053 if (to_mode == PDImode)
1054 {
1055 if (from_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1057
1058 #ifdef HAVE_truncdipdi2
1059 if (HAVE_truncdipdi2)
1060 {
1061 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1062 return;
1063 }
1064 #endif /* HAVE_truncdipdi2 */
1065 abort ();
1066 }
1067
1068 if (from_mode == PDImode)
1069 {
1070 if (to_mode != DImode)
1071 {
1072 from = convert_to_mode (DImode, from, unsignedp);
1073 from_mode = DImode;
1074 }
1075 else
1076 {
1077 #ifdef HAVE_extendpdidi2
1078 if (HAVE_extendpdidi2)
1079 {
1080 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1081 return;
1082 }
1083 #endif /* HAVE_extendpdidi2 */
1084 abort ();
1085 }
1086 }
1087
1088 /* Now follow all the conversions between integers
1089 no more than a word long. */
1090
1091 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1092 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1093 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1094 GET_MODE_BITSIZE (from_mode)))
1095 {
1096 if (!((GET_CODE (from) == MEM
1097 && ! MEM_VOLATILE_P (from)
1098 && direct_load[(int) to_mode]
1099 && ! mode_dependent_address_p (XEXP (from, 0)))
1100 || GET_CODE (from) == REG
1101 || GET_CODE (from) == SUBREG))
1102 from = force_reg (from_mode, from);
1103 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1104 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1105 from = copy_to_reg (from);
1106 emit_move_insn (to, gen_lowpart (to_mode, from));
1107 return;
1108 }
1109
1110 /* Handle extension. */
1111 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1112 {
1113 /* Convert directly if that works. */
1114 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1115 != CODE_FOR_nothing)
1116 {
1117 if (flag_force_mem)
1118 from = force_not_mem (from);
1119
1120 emit_unop_insn (code, to, from, equiv_code);
1121 return;
1122 }
1123 else
1124 {
1125 enum machine_mode intermediate;
1126 rtx tmp;
1127 tree shift_amount;
1128
1129 /* Search for a mode to convert via. */
1130 for (intermediate = from_mode; intermediate != VOIDmode;
1131 intermediate = GET_MODE_WIDER_MODE (intermediate))
1132 if (((can_extend_p (to_mode, intermediate, unsignedp)
1133 != CODE_FOR_nothing)
1134 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1135 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1136 GET_MODE_BITSIZE (intermediate))))
1137 && (can_extend_p (intermediate, from_mode, unsignedp)
1138 != CODE_FOR_nothing))
1139 {
1140 convert_move (to, convert_to_mode (intermediate, from,
1141 unsignedp), unsignedp);
1142 return;
1143 }
1144
1145 /* No suitable intermediate mode.
1146 Generate what we need with shifts. */
1147 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1148 - GET_MODE_BITSIZE (from_mode), 0);
1149 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1150 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1151 to, unsignedp);
1152 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1153 to, unsignedp);
1154 if (tmp != to)
1155 emit_move_insn (to, tmp);
1156 return;
1157 }
1158 }
1159
1160 /* Support special truncate insns for certain modes. */
1161
1162 if (from_mode == DImode && to_mode == SImode)
1163 {
1164 #ifdef HAVE_truncdisi2
1165 if (HAVE_truncdisi2)
1166 {
1167 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == DImode && to_mode == HImode)
1176 {
1177 #ifdef HAVE_truncdihi2
1178 if (HAVE_truncdihi2)
1179 {
1180 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == DImode && to_mode == QImode)
1189 {
1190 #ifdef HAVE_truncdiqi2
1191 if (HAVE_truncdiqi2)
1192 {
1193 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == SImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_truncsihi2
1204 if (HAVE_truncsihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == SImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_truncsiqi2
1217 if (HAVE_truncsiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 if (from_mode == HImode && to_mode == QImode)
1228 {
1229 #ifdef HAVE_trunchiqi2
1230 if (HAVE_trunchiqi2)
1231 {
1232 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1233 return;
1234 }
1235 #endif
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 return;
1238 }
1239
1240 if (from_mode == TImode && to_mode == DImode)
1241 {
1242 #ifdef HAVE_trunctidi2
1243 if (HAVE_trunctidi2)
1244 {
1245 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1246 return;
1247 }
1248 #endif
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 return;
1251 }
1252
1253 if (from_mode == TImode && to_mode == SImode)
1254 {
1255 #ifdef HAVE_trunctisi2
1256 if (HAVE_trunctisi2)
1257 {
1258 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1259 return;
1260 }
1261 #endif
1262 convert_move (to, force_reg (from_mode, from), unsignedp);
1263 return;
1264 }
1265
1266 if (from_mode == TImode && to_mode == HImode)
1267 {
1268 #ifdef HAVE_trunctihi2
1269 if (HAVE_trunctihi2)
1270 {
1271 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1272 return;
1273 }
1274 #endif
1275 convert_move (to, force_reg (from_mode, from), unsignedp);
1276 return;
1277 }
1278
1279 if (from_mode == TImode && to_mode == QImode)
1280 {
1281 #ifdef HAVE_trunctiqi2
1282 if (HAVE_trunctiqi2)
1283 {
1284 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1285 return;
1286 }
1287 #endif
1288 convert_move (to, force_reg (from_mode, from), unsignedp);
1289 return;
1290 }
1291
1292 /* Handle truncation of volatile memrefs, and so on;
1293 the things that couldn't be truncated directly,
1294 and for which there was no special instruction. */
1295 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1296 {
1297 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1298 emit_move_insn (to, temp);
1299 return;
1300 }
1301
1302 /* Mode combination is not recognized. */
1303 abort ();
1304 }
1305
1306 /* Return an rtx for a value that would result
1307 from converting X to mode MODE.
1308 Both X and MODE may be floating, or both integer.
1309 UNSIGNEDP is nonzero if X is an unsigned value.
1310 This can be done by referring to a part of X in place
1311 or by copying to a new temporary with conversion.
1312
1313 This function *must not* call protect_from_queue
1314 except when putting X into an insn (in which case convert_move does it). */
1315
1316 rtx
1317 convert_to_mode (mode, x, unsignedp)
1318 enum machine_mode mode;
1319 rtx x;
1320 int unsignedp;
1321 {
1322 return convert_modes (mode, VOIDmode, x, unsignedp);
1323 }
1324
1325 /* Return an rtx for a value that would result
1326 from converting X from mode OLDMODE to mode MODE.
1327 Both modes may be floating, or both integer.
1328 UNSIGNEDP is nonzero if X is an unsigned value.
1329
1330 This can be done by referring to a part of X in place
1331 or by copying to a new temporary with conversion.
1332
1333 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1334
1335 This function *must not* call protect_from_queue
1336 except when putting X into an insn (in which case convert_move does it). */
1337
1338 rtx
1339 convert_modes (mode, oldmode, x, unsignedp)
1340 enum machine_mode mode, oldmode;
1341 rtx x;
1342 int unsignedp;
1343 {
1344 rtx temp;
1345
1346 /* If FROM is a SUBREG that indicates that we have already done at least
1347 the required extension, strip it. */
1348
1349 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1350 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1351 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1352 x = gen_lowpart (mode, x);
1353
1354 if (GET_MODE (x) != VOIDmode)
1355 oldmode = GET_MODE (x);
1356
1357 if (mode == oldmode)
1358 return x;
1359
1360 /* There is one case that we must handle specially: If we are converting
1361 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1362 we are to interpret the constant as unsigned, gen_lowpart will do
1363 the wrong if the constant appears negative. What we want to do is
1364 make the high-order word of the constant zero, not all ones. */
1365
1366 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1367 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1368 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1369 {
1370 HOST_WIDE_INT val = INTVAL (x);
1371
1372 if (oldmode != VOIDmode
1373 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1374 {
1375 int width = GET_MODE_BITSIZE (oldmode);
1376
1377 /* We need to zero extend VAL. */
1378 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 }
1380
1381 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1382 }
1383
1384 /* We can do this with a gen_lowpart if both desired and current modes
1385 are integer, and this is either a constant integer, a register, or a
1386 non-volatile MEM. Except for the constant case where MODE is no
1387 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1388
1389 if ((GET_CODE (x) == CONST_INT
1390 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1391 || (GET_MODE_CLASS (mode) == MODE_INT
1392 && GET_MODE_CLASS (oldmode) == MODE_INT
1393 && (GET_CODE (x) == CONST_DOUBLE
1394 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1395 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1396 && direct_load[(int) mode])
1397 || (GET_CODE (x) == REG
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1400 {
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1406 {
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1409
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1413 if (! unsignedp
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1416
1417 return gen_int_mode (val, mode);
1418 }
1419
1420 return gen_lowpart (mode, x);
1421 }
1422
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1425 return temp;
1426 }
1427 \f
1428 /* This macro is used to determine what the largest unit size that
1429 move_by_pieces can use is. */
1430
1431 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1432 move efficiently, as opposed to MOVE_MAX which is the maximum
1433 number of bytes we can move with a single instruction. */
1434
1435 #ifndef MOVE_MAX_PIECES
1436 #define MOVE_MAX_PIECES MOVE_MAX
1437 #endif
1438
1439 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1440 store efficiently. Due to internal GCC limitations, this is
1441 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1442 for an immediate constant. */
1443
1444 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1445
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1449
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1452
1453 ALIGN is maximum alignment we can assume. */
1454
1455 void
1456 move_by_pieces (to, from, len, align)
1457 rtx to, from;
1458 unsigned HOST_WIDE_INT len;
1459 unsigned int align;
1460 {
1461 struct move_by_pieces data;
1462 rtx to_addr, from_addr = XEXP (from, 0);
1463 unsigned int max_size = MOVE_MAX_PIECES + 1;
1464 enum machine_mode mode = VOIDmode, tmode;
1465 enum insn_code icode;
1466
1467 data.offset = 0;
1468 data.from_addr = from_addr;
1469 if (to)
1470 {
1471 to_addr = XEXP (to, 0);
1472 data.to = to;
1473 data.autinc_to
1474 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1475 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1476 data.reverse
1477 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1478 }
1479 else
1480 {
1481 to_addr = NULL_RTX;
1482 data.to = NULL_RTX;
1483 data.autinc_to = 1;
1484 #ifdef STACK_GROWS_DOWNWARD
1485 data.reverse = 1;
1486 #else
1487 data.reverse = 0;
1488 #endif
1489 }
1490 data.to_addr = to_addr;
1491 data.from = from;
1492 data.autinc_from
1493 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1494 || GET_CODE (from_addr) == POST_INC
1495 || GET_CODE (from_addr) == POST_DEC);
1496
1497 data.explicit_inc_from = 0;
1498 data.explicit_inc_to = 0;
1499 if (data.reverse) data.offset = len;
1500 data.len = len;
1501
1502 /* If copying requires more than two move insns,
1503 copy addresses to registers (to make displacements shorter)
1504 and use post-increment if available. */
1505 if (!(data.autinc_from && data.autinc_to)
1506 && move_by_pieces_ninsns (len, align) > 2)
1507 {
1508 /* Find the mode of the largest move... */
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1512 mode = tmode;
1513
1514 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1515 {
1516 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1517 data.autinc_from = 1;
1518 data.explicit_inc_from = -1;
1519 }
1520 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1521 {
1522 data.from_addr = copy_addr_to_reg (from_addr);
1523 data.autinc_from = 1;
1524 data.explicit_inc_from = 1;
1525 }
1526 if (!data.autinc_from && CONSTANT_P (from_addr))
1527 data.from_addr = copy_addr_to_reg (from_addr);
1528 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1529 {
1530 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1531 data.autinc_to = 1;
1532 data.explicit_inc_to = -1;
1533 }
1534 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1535 {
1536 data.to_addr = copy_addr_to_reg (to_addr);
1537 data.autinc_to = 1;
1538 data.explicit_inc_to = 1;
1539 }
1540 if (!data.autinc_to && CONSTANT_P (to_addr))
1541 data.to_addr = copy_addr_to_reg (to_addr);
1542 }
1543
1544 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1545 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1546 align = MOVE_MAX * BITS_PER_UNIT;
1547
1548 /* First move what we can in the largest integer mode, then go to
1549 successively smaller modes. */
1550
1551 while (max_size > 1)
1552 {
1553 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1554 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1555 if (GET_MODE_SIZE (tmode) < max_size)
1556 mode = tmode;
1557
1558 if (mode == VOIDmode)
1559 break;
1560
1561 icode = mov_optab->handlers[(int) mode].insn_code;
1562 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1563 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1564
1565 max_size = GET_MODE_SIZE (mode);
1566 }
1567
1568 /* The code above should have handled everything. */
1569 if (data.len > 0)
1570 abort ();
1571 }
1572
1573 /* Return number of insns required to move L bytes by pieces.
1574 ALIGN (in bits) is maximum alignment we can assume. */
1575
1576 static unsigned HOST_WIDE_INT
1577 move_by_pieces_ninsns (l, align)
1578 unsigned HOST_WIDE_INT l;
1579 unsigned int align;
1580 {
1581 unsigned HOST_WIDE_INT n_insns = 0;
1582 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1583
1584 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1585 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1586 align = MOVE_MAX * BITS_PER_UNIT;
1587
1588 while (max_size > 1)
1589 {
1590 enum machine_mode mode = VOIDmode, tmode;
1591 enum insn_code icode;
1592
1593 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1594 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1595 if (GET_MODE_SIZE (tmode) < max_size)
1596 mode = tmode;
1597
1598 if (mode == VOIDmode)
1599 break;
1600
1601 icode = mov_optab->handlers[(int) mode].insn_code;
1602 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1603 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1604
1605 max_size = GET_MODE_SIZE (mode);
1606 }
1607
1608 if (l)
1609 abort ();
1610 return n_insns;
1611 }
1612
1613 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1614 with move instructions for mode MODE. GENFUN is the gen_... function
1615 to make a move insn for that mode. DATA has all the other info. */
1616
1617 static void
1618 move_by_pieces_1 (genfun, mode, data)
1619 rtx (*genfun) PARAMS ((rtx, ...));
1620 enum machine_mode mode;
1621 struct move_by_pieces *data;
1622 {
1623 unsigned int size = GET_MODE_SIZE (mode);
1624 rtx to1 = NULL_RTX, from1;
1625
1626 while (data->len >= size)
1627 {
1628 if (data->reverse)
1629 data->offset -= size;
1630
1631 if (data->to)
1632 {
1633 if (data->autinc_to)
1634 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1635 data->offset);
1636 else
1637 to1 = adjust_address (data->to, mode, data->offset);
1638 }
1639
1640 if (data->autinc_from)
1641 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1642 data->offset);
1643 else
1644 from1 = adjust_address (data->from, mode, data->offset);
1645
1646 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1647 emit_insn (gen_add2_insn (data->to_addr,
1648 GEN_INT (-(HOST_WIDE_INT)size)));
1649 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1650 emit_insn (gen_add2_insn (data->from_addr,
1651 GEN_INT (-(HOST_WIDE_INT)size)));
1652
1653 if (data->to)
1654 emit_insn ((*genfun) (to1, from1));
1655 else
1656 {
1657 #ifdef PUSH_ROUNDING
1658 emit_single_push_insn (mode, from1, NULL);
1659 #else
1660 abort ();
1661 #endif
1662 }
1663
1664 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1665 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1666 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1667 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1668
1669 if (! data->reverse)
1670 data->offset += size;
1671
1672 data->len -= size;
1673 }
1674 }
1675 \f
1676 /* Emit code to move a block Y to a block X. This may be done with
1677 string-move instructions, with multiple scalar move instructions,
1678 or with a library call.
1679
1680 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1681 SIZE is an rtx that says how long they are.
1682 ALIGN is the maximum alignment we can assume they have.
1683 METHOD describes what kind of copy this is, and what mechanisms may be used.
1684
1685 Return the address of the new block, if memcpy is called and returns it,
1686 0 otherwise. */
1687
1688 rtx
1689 emit_block_move (x, y, size, method)
1690 rtx x, y, size;
1691 enum block_op_methods method;
1692 {
1693 bool may_use_call;
1694 rtx retval = 0;
1695 unsigned int align;
1696
1697 switch (method)
1698 {
1699 case BLOCK_OP_NORMAL:
1700 may_use_call = true;
1701 break;
1702
1703 case BLOCK_OP_CALL_PARM:
1704 may_use_call = block_move_libcall_safe_for_call_parm ();
1705
1706 /* Make inhibit_defer_pop nonzero around the library call
1707 to force it to pop the arguments right away. */
1708 NO_DEFER_POP;
1709 break;
1710
1711 case BLOCK_OP_NO_LIBCALL:
1712 may_use_call = false;
1713 break;
1714
1715 default:
1716 abort ();
1717 }
1718
1719 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1720
1721 if (GET_MODE (x) != BLKmode)
1722 abort ();
1723 if (GET_MODE (y) != BLKmode)
1724 abort ();
1725
1726 x = protect_from_queue (x, 1);
1727 y = protect_from_queue (y, 0);
1728 size = protect_from_queue (size, 0);
1729
1730 if (GET_CODE (x) != MEM)
1731 abort ();
1732 if (GET_CODE (y) != MEM)
1733 abort ();
1734 if (size == 0)
1735 abort ();
1736
1737 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1738 can be incorrect is coming from __builtin_memcpy. */
1739 if (GET_CODE (size) == CONST_INT)
1740 {
1741 x = shallow_copy_rtx (x);
1742 y = shallow_copy_rtx (y);
1743 set_mem_size (x, size);
1744 set_mem_size (y, size);
1745 }
1746
1747 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1748 move_by_pieces (x, y, INTVAL (size), align);
1749 else if (emit_block_move_via_movstr (x, y, size, align))
1750 ;
1751 else if (may_use_call)
1752 retval = emit_block_move_via_libcall (x, y, size);
1753 else
1754 emit_block_move_via_loop (x, y, size, align);
1755
1756 if (method == BLOCK_OP_CALL_PARM)
1757 OK_DEFER_POP;
1758
1759 return retval;
1760 }
1761
1762 /* A subroutine of emit_block_move. Returns true if calling the
1763 block move libcall will not clobber any parameters which may have
1764 already been placed on the stack. */
1765
1766 static bool
1767 block_move_libcall_safe_for_call_parm ()
1768 {
1769 if (PUSH_ARGS)
1770 return true;
1771 else
1772 {
1773 /* Check to see whether memcpy takes all register arguments. */
1774 static enum {
1775 takes_regs_uninit, takes_regs_no, takes_regs_yes
1776 } takes_regs = takes_regs_uninit;
1777
1778 switch (takes_regs)
1779 {
1780 case takes_regs_uninit:
1781 {
1782 CUMULATIVE_ARGS args_so_far;
1783 tree fn, arg;
1784
1785 fn = emit_block_move_libcall_fn (false);
1786 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1787
1788 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1789 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1790 {
1791 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1792 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1793 if (!tmp || !REG_P (tmp))
1794 goto fail_takes_regs;
1795 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1796 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1797 NULL_TREE, 1))
1798 goto fail_takes_regs;
1799 #endif
1800 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1801 }
1802 }
1803 takes_regs = takes_regs_yes;
1804 /* FALLTHRU */
1805
1806 case takes_regs_yes:
1807 return true;
1808
1809 fail_takes_regs:
1810 takes_regs = takes_regs_no;
1811 /* FALLTHRU */
1812 case takes_regs_no:
1813 return false;
1814
1815 default:
1816 abort ();
1817 }
1818 }
1819 }
1820
1821 /* A subroutine of emit_block_move. Expand a movstr pattern;
1822 return true if successful. */
1823
1824 static bool
1825 emit_block_move_via_movstr (x, y, size, align)
1826 rtx x, y, size;
1827 unsigned int align;
1828 {
1829 /* Try the most limited insn first, because there's no point
1830 including more than one in the machine description unless
1831 the more limited one has some advantage. */
1832
1833 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1834 enum machine_mode mode;
1835
1836 /* Since this is a move insn, we don't care about volatility. */
1837 volatile_ok = 1;
1838
1839 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1840 mode = GET_MODE_WIDER_MODE (mode))
1841 {
1842 enum insn_code code = movstr_optab[(int) mode];
1843 insn_operand_predicate_fn pred;
1844
1845 if (code != CODE_FOR_nothing
1846 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1847 here because if SIZE is less than the mode mask, as it is
1848 returned by the macro, it will definitely be less than the
1849 actual mode mask. */
1850 && ((GET_CODE (size) == CONST_INT
1851 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1852 <= (GET_MODE_MASK (mode) >> 1)))
1853 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1854 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1855 || (*pred) (x, BLKmode))
1856 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1857 || (*pred) (y, BLKmode))
1858 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1859 || (*pred) (opalign, VOIDmode)))
1860 {
1861 rtx op2;
1862 rtx last = get_last_insn ();
1863 rtx pat;
1864
1865 op2 = convert_to_mode (mode, size, 1);
1866 pred = insn_data[(int) code].operand[2].predicate;
1867 if (pred != 0 && ! (*pred) (op2, mode))
1868 op2 = copy_to_mode_reg (mode, op2);
1869
1870 /* ??? When called via emit_block_move_for_call, it'd be
1871 nice if there were some way to inform the backend, so
1872 that it doesn't fail the expansion because it thinks
1873 emitting the libcall would be more efficient. */
1874
1875 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1876 if (pat)
1877 {
1878 emit_insn (pat);
1879 volatile_ok = 0;
1880 return true;
1881 }
1882 else
1883 delete_insns_since (last);
1884 }
1885 }
1886
1887 volatile_ok = 0;
1888 return false;
1889 }
1890
1891 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1892 Return the return value from memcpy, 0 otherwise. */
1893
1894 static rtx
1895 emit_block_move_via_libcall (dst, src, size)
1896 rtx dst, src, size;
1897 {
1898 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1899 enum machine_mode size_mode;
1900 rtx retval;
1901
1902 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1903
1904 It is unsafe to save the value generated by protect_from_queue
1905 and reuse it later. Consider what happens if emit_queue is
1906 called before the return value from protect_from_queue is used.
1907
1908 Expansion of the CALL_EXPR below will call emit_queue before
1909 we are finished emitting RTL for argument setup. So if we are
1910 not careful we could get the wrong value for an argument.
1911
1912 To avoid this problem we go ahead and emit code to copy X, Y &
1913 SIZE into new pseudos. We can then place those new pseudos
1914 into an RTL_EXPR and use them later, even after a call to
1915 emit_queue.
1916
1917 Note this is not strictly needed for library calls since they
1918 do not call emit_queue before loading their arguments. However,
1919 we may need to have library calls call emit_queue in the future
1920 since failing to do so could cause problems for targets which
1921 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1922
1923 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1924 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1925
1926 if (TARGET_MEM_FUNCTIONS)
1927 size_mode = TYPE_MODE (sizetype);
1928 else
1929 size_mode = TYPE_MODE (unsigned_type_node);
1930 size = convert_to_mode (size_mode, size, 1);
1931 size = copy_to_mode_reg (size_mode, size);
1932
1933 /* It is incorrect to use the libcall calling conventions to call
1934 memcpy in this context. This could be a user call to memcpy and
1935 the user may wish to examine the return value from memcpy. For
1936 targets where libcalls and normal calls have different conventions
1937 for returning pointers, we could end up generating incorrect code.
1938
1939 For convenience, we generate the call to bcopy this way as well. */
1940
1941 dst_tree = make_tree (ptr_type_node, dst);
1942 src_tree = make_tree (ptr_type_node, src);
1943 if (TARGET_MEM_FUNCTIONS)
1944 size_tree = make_tree (sizetype, size);
1945 else
1946 size_tree = make_tree (unsigned_type_node, size);
1947
1948 fn = emit_block_move_libcall_fn (true);
1949 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1950 if (TARGET_MEM_FUNCTIONS)
1951 {
1952 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1953 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1954 }
1955 else
1956 {
1957 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1958 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1959 }
1960
1961 /* Now we have to build up the CALL_EXPR itself. */
1962 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1963 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1964 call_expr, arg_list, NULL_TREE);
1965 TREE_SIDE_EFFECTS (call_expr) = 1;
1966
1967 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1968
1969 /* If we are initializing a readonly value, show the above call
1970 clobbered it. Otherwise, a load from it may erroneously be
1971 hoisted from a loop. */
1972 if (RTX_UNCHANGING_P (dst))
1973 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1974
1975 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1976 }
1977
1978 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1979 for the function we use for block copies. The first time FOR_CALL
1980 is true, we call assemble_external. */
1981
1982 static GTY(()) tree block_move_fn;
1983
1984 static tree
1985 emit_block_move_libcall_fn (for_call)
1986 int for_call;
1987 {
1988 static bool emitted_extern;
1989 tree fn = block_move_fn, args;
1990
1991 if (!fn)
1992 {
1993 if (TARGET_MEM_FUNCTIONS)
1994 {
1995 fn = get_identifier ("memcpy");
1996 args = build_function_type_list (ptr_type_node, ptr_type_node,
1997 const_ptr_type_node, sizetype,
1998 NULL_TREE);
1999 }
2000 else
2001 {
2002 fn = get_identifier ("bcopy");
2003 args = build_function_type_list (void_type_node, const_ptr_type_node,
2004 ptr_type_node, unsigned_type_node,
2005 NULL_TREE);
2006 }
2007
2008 fn = build_decl (FUNCTION_DECL, fn, args);
2009 DECL_EXTERNAL (fn) = 1;
2010 TREE_PUBLIC (fn) = 1;
2011 DECL_ARTIFICIAL (fn) = 1;
2012 TREE_NOTHROW (fn) = 1;
2013
2014 block_move_fn = fn;
2015 }
2016
2017 if (for_call && !emitted_extern)
2018 {
2019 emitted_extern = true;
2020 make_decl_rtl (fn, NULL);
2021 assemble_external (fn);
2022 }
2023
2024 return fn;
2025 }
2026
2027 /* A subroutine of emit_block_move. Copy the data via an explicit
2028 loop. This is used only when libcalls are forbidden. */
2029 /* ??? It'd be nice to copy in hunks larger than QImode. */
2030
2031 static void
2032 emit_block_move_via_loop (x, y, size, align)
2033 rtx x, y, size;
2034 unsigned int align ATTRIBUTE_UNUSED;
2035 {
2036 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2037 enum machine_mode iter_mode;
2038
2039 iter_mode = GET_MODE (size);
2040 if (iter_mode == VOIDmode)
2041 iter_mode = word_mode;
2042
2043 top_label = gen_label_rtx ();
2044 cmp_label = gen_label_rtx ();
2045 iter = gen_reg_rtx (iter_mode);
2046
2047 emit_move_insn (iter, const0_rtx);
2048
2049 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2050 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2051 do_pending_stack_adjust ();
2052
2053 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2054
2055 emit_jump (cmp_label);
2056 emit_label (top_label);
2057
2058 tmp = convert_modes (Pmode, iter_mode, iter, true);
2059 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2060 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2061 x = change_address (x, QImode, x_addr);
2062 y = change_address (y, QImode, y_addr);
2063
2064 emit_move_insn (x, y);
2065
2066 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2067 true, OPTAB_LIB_WIDEN);
2068 if (tmp != iter)
2069 emit_move_insn (iter, tmp);
2070
2071 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2072 emit_label (cmp_label);
2073
2074 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2075 true, top_label);
2076
2077 emit_note (NULL, NOTE_INSN_LOOP_END);
2078 }
2079 \f
2080 /* Copy all or part of a value X into registers starting at REGNO.
2081 The number of registers to be filled is NREGS. */
2082
2083 void
2084 move_block_to_reg (regno, x, nregs, mode)
2085 int regno;
2086 rtx x;
2087 int nregs;
2088 enum machine_mode mode;
2089 {
2090 int i;
2091 #ifdef HAVE_load_multiple
2092 rtx pat;
2093 rtx last;
2094 #endif
2095
2096 if (nregs == 0)
2097 return;
2098
2099 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2100 x = validize_mem (force_const_mem (mode, x));
2101
2102 /* See if the machine can do this with a load multiple insn. */
2103 #ifdef HAVE_load_multiple
2104 if (HAVE_load_multiple)
2105 {
2106 last = get_last_insn ();
2107 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2108 GEN_INT (nregs));
2109 if (pat)
2110 {
2111 emit_insn (pat);
2112 return;
2113 }
2114 else
2115 delete_insns_since (last);
2116 }
2117 #endif
2118
2119 for (i = 0; i < nregs; i++)
2120 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2121 operand_subword_force (x, i, mode));
2122 }
2123
2124 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2125 The number of registers to be filled is NREGS. SIZE indicates the number
2126 of bytes in the object X. */
2127
2128 void
2129 move_block_from_reg (regno, x, nregs, size)
2130 int regno;
2131 rtx x;
2132 int nregs;
2133 int size;
2134 {
2135 int i;
2136 #ifdef HAVE_store_multiple
2137 rtx pat;
2138 rtx last;
2139 #endif
2140 enum machine_mode mode;
2141
2142 if (nregs == 0)
2143 return;
2144
2145 /* If SIZE is that of a mode no bigger than a word, just use that
2146 mode's store operation. */
2147 if (size <= UNITS_PER_WORD
2148 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2149 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2150 {
2151 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2152 return;
2153 }
2154
2155 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2156 to the left before storing to memory. Note that the previous test
2157 doesn't handle all cases (e.g. SIZE == 3). */
2158 if (size < UNITS_PER_WORD
2159 && BYTES_BIG_ENDIAN
2160 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2161 {
2162 rtx tem = operand_subword (x, 0, 1, BLKmode);
2163 rtx shift;
2164
2165 if (tem == 0)
2166 abort ();
2167
2168 shift = expand_shift (LSHIFT_EXPR, word_mode,
2169 gen_rtx_REG (word_mode, regno),
2170 build_int_2 ((UNITS_PER_WORD - size)
2171 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2172 emit_move_insn (tem, shift);
2173 return;
2174 }
2175
2176 /* See if the machine can do this with a store multiple insn. */
2177 #ifdef HAVE_store_multiple
2178 if (HAVE_store_multiple)
2179 {
2180 last = get_last_insn ();
2181 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2182 GEN_INT (nregs));
2183 if (pat)
2184 {
2185 emit_insn (pat);
2186 return;
2187 }
2188 else
2189 delete_insns_since (last);
2190 }
2191 #endif
2192
2193 for (i = 0; i < nregs; i++)
2194 {
2195 rtx tem = operand_subword (x, i, 1, BLKmode);
2196
2197 if (tem == 0)
2198 abort ();
2199
2200 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2201 }
2202 }
2203
2204 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2205 registers represented by a PARALLEL. SSIZE represents the total size of
2206 block SRC in bytes, or -1 if not known. */
2207 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2208 the balance will be in what would be the low-order memory addresses, i.e.
2209 left justified for big endian, right justified for little endian. This
2210 happens to be true for the targets currently using this support. If this
2211 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2212 would be needed. */
2213
2214 void
2215 emit_group_load (dst, orig_src, ssize)
2216 rtx dst, orig_src;
2217 int ssize;
2218 {
2219 rtx *tmps, src;
2220 int start, i;
2221
2222 if (GET_CODE (dst) != PARALLEL)
2223 abort ();
2224
2225 /* Check for a NULL entry, used to indicate that the parameter goes
2226 both on the stack and in registers. */
2227 if (XEXP (XVECEXP (dst, 0, 0), 0))
2228 start = 0;
2229 else
2230 start = 1;
2231
2232 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2233
2234 /* Process the pieces. */
2235 for (i = start; i < XVECLEN (dst, 0); i++)
2236 {
2237 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2238 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2239 unsigned int bytelen = GET_MODE_SIZE (mode);
2240 int shift = 0;
2241
2242 /* Handle trailing fragments that run over the size of the struct. */
2243 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2244 {
2245 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2246 bytelen = ssize - bytepos;
2247 if (bytelen <= 0)
2248 abort ();
2249 }
2250
2251 /* If we won't be loading directly from memory, protect the real source
2252 from strange tricks we might play; but make sure that the source can
2253 be loaded directly into the destination. */
2254 src = orig_src;
2255 if (GET_CODE (orig_src) != MEM
2256 && (!CONSTANT_P (orig_src)
2257 || (GET_MODE (orig_src) != mode
2258 && GET_MODE (orig_src) != VOIDmode)))
2259 {
2260 if (GET_MODE (orig_src) == VOIDmode)
2261 src = gen_reg_rtx (mode);
2262 else
2263 src = gen_reg_rtx (GET_MODE (orig_src));
2264
2265 emit_move_insn (src, orig_src);
2266 }
2267
2268 /* Optimize the access just a bit. */
2269 if (GET_CODE (src) == MEM
2270 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2271 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2272 && bytelen == GET_MODE_SIZE (mode))
2273 {
2274 tmps[i] = gen_reg_rtx (mode);
2275 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2276 }
2277 else if (GET_CODE (src) == CONCAT)
2278 {
2279 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2280 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2281
2282 if ((bytepos == 0 && bytelen == slen0)
2283 || (bytepos != 0 && bytepos + bytelen <= slen))
2284 {
2285 /* The following assumes that the concatenated objects all
2286 have the same size. In this case, a simple calculation
2287 can be used to determine the object and the bit field
2288 to be extracted. */
2289 tmps[i] = XEXP (src, bytepos / slen0);
2290 if (! CONSTANT_P (tmps[i])
2291 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2292 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2293 (bytepos % slen0) * BITS_PER_UNIT,
2294 1, NULL_RTX, mode, mode, ssize);
2295 }
2296 else if (bytepos == 0)
2297 {
2298 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2299 emit_move_insn (mem, src);
2300 tmps[i] = adjust_address (mem, mode, 0);
2301 }
2302 else
2303 abort ();
2304 }
2305 else if (CONSTANT_P (src)
2306 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2307 tmps[i] = src;
2308 else
2309 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2310 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2311 mode, mode, ssize);
2312
2313 if (BYTES_BIG_ENDIAN && shift)
2314 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2315 tmps[i], 0, OPTAB_WIDEN);
2316 }
2317
2318 emit_queue ();
2319
2320 /* Copy the extracted pieces into the proper (probable) hard regs. */
2321 for (i = start; i < XVECLEN (dst, 0); i++)
2322 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2323 }
2324
2325 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2326 registers represented by a PARALLEL. SSIZE represents the total size of
2327 block DST, or -1 if not known. */
2328
2329 void
2330 emit_group_store (orig_dst, src, ssize)
2331 rtx orig_dst, src;
2332 int ssize;
2333 {
2334 rtx *tmps, dst;
2335 int start, i;
2336
2337 if (GET_CODE (src) != PARALLEL)
2338 abort ();
2339
2340 /* Check for a NULL entry, used to indicate that the parameter goes
2341 both on the stack and in registers. */
2342 if (XEXP (XVECEXP (src, 0, 0), 0))
2343 start = 0;
2344 else
2345 start = 1;
2346
2347 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2348
2349 /* Copy the (probable) hard regs into pseudos. */
2350 for (i = start; i < XVECLEN (src, 0); i++)
2351 {
2352 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2353 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2354 emit_move_insn (tmps[i], reg);
2355 }
2356 emit_queue ();
2357
2358 /* If we won't be storing directly into memory, protect the real destination
2359 from strange tricks we might play. */
2360 dst = orig_dst;
2361 if (GET_CODE (dst) == PARALLEL)
2362 {
2363 rtx temp;
2364
2365 /* We can get a PARALLEL dst if there is a conditional expression in
2366 a return statement. In that case, the dst and src are the same,
2367 so no action is necessary. */
2368 if (rtx_equal_p (dst, src))
2369 return;
2370
2371 /* It is unclear if we can ever reach here, but we may as well handle
2372 it. Allocate a temporary, and split this into a store/load to/from
2373 the temporary. */
2374
2375 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2376 emit_group_store (temp, src, ssize);
2377 emit_group_load (dst, temp, ssize);
2378 return;
2379 }
2380 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2381 {
2382 dst = gen_reg_rtx (GET_MODE (orig_dst));
2383 /* Make life a bit easier for combine. */
2384 emit_move_insn (dst, const0_rtx);
2385 }
2386
2387 /* Process the pieces. */
2388 for (i = start; i < XVECLEN (src, 0); i++)
2389 {
2390 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2391 enum machine_mode mode = GET_MODE (tmps[i]);
2392 unsigned int bytelen = GET_MODE_SIZE (mode);
2393 rtx dest = dst;
2394
2395 /* Handle trailing fragments that run over the size of the struct. */
2396 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2397 {
2398 if (BYTES_BIG_ENDIAN)
2399 {
2400 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2401 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2402 tmps[i], 0, OPTAB_WIDEN);
2403 }
2404 bytelen = ssize - bytepos;
2405 }
2406
2407 if (GET_CODE (dst) == CONCAT)
2408 {
2409 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2410 dest = XEXP (dst, 0);
2411 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2412 {
2413 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2414 dest = XEXP (dst, 1);
2415 }
2416 else
2417 abort ();
2418 }
2419
2420 /* Optimize the access just a bit. */
2421 if (GET_CODE (dest) == MEM
2422 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2423 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2424 && bytelen == GET_MODE_SIZE (mode))
2425 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2426 else
2427 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2428 mode, tmps[i], ssize);
2429 }
2430
2431 emit_queue ();
2432
2433 /* Copy from the pseudo into the (probable) hard reg. */
2434 if (GET_CODE (dst) == REG)
2435 emit_move_insn (orig_dst, dst);
2436 }
2437
2438 /* Generate code to copy a BLKmode object of TYPE out of a
2439 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2440 is null, a stack temporary is created. TGTBLK is returned.
2441
2442 The primary purpose of this routine is to handle functions
2443 that return BLKmode structures in registers. Some machines
2444 (the PA for example) want to return all small structures
2445 in registers regardless of the structure's alignment. */
2446
2447 rtx
2448 copy_blkmode_from_reg (tgtblk, srcreg, type)
2449 rtx tgtblk;
2450 rtx srcreg;
2451 tree type;
2452 {
2453 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2454 rtx src = NULL, dst = NULL;
2455 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2456 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2457
2458 if (tgtblk == 0)
2459 {
2460 tgtblk = assign_temp (build_qualified_type (type,
2461 (TYPE_QUALS (type)
2462 | TYPE_QUAL_CONST)),
2463 0, 1, 1);
2464 preserve_temp_slots (tgtblk);
2465 }
2466
2467 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2468 into a new pseudo which is a full word.
2469
2470 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2471 the wrong part of the register gets copied so we fake a type conversion
2472 in place. */
2473 if (GET_MODE (srcreg) != BLKmode
2474 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2475 {
2476 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2477 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2478 else
2479 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2480 }
2481
2482 /* Structures whose size is not a multiple of a word are aligned
2483 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2484 machine, this means we must skip the empty high order bytes when
2485 calculating the bit offset. */
2486 if (BYTES_BIG_ENDIAN
2487 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2488 && bytes % UNITS_PER_WORD)
2489 big_endian_correction
2490 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2491
2492 /* Copy the structure BITSIZE bites at a time.
2493
2494 We could probably emit more efficient code for machines which do not use
2495 strict alignment, but it doesn't seem worth the effort at the current
2496 time. */
2497 for (bitpos = 0, xbitpos = big_endian_correction;
2498 bitpos < bytes * BITS_PER_UNIT;
2499 bitpos += bitsize, xbitpos += bitsize)
2500 {
2501 /* We need a new source operand each time xbitpos is on a
2502 word boundary and when xbitpos == big_endian_correction
2503 (the first time through). */
2504 if (xbitpos % BITS_PER_WORD == 0
2505 || xbitpos == big_endian_correction)
2506 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2507 GET_MODE (srcreg));
2508
2509 /* We need a new destination operand each time bitpos is on
2510 a word boundary. */
2511 if (bitpos % BITS_PER_WORD == 0)
2512 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2513
2514 /* Use xbitpos for the source extraction (right justified) and
2515 xbitpos for the destination store (left justified). */
2516 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2517 extract_bit_field (src, bitsize,
2518 xbitpos % BITS_PER_WORD, 1,
2519 NULL_RTX, word_mode, word_mode,
2520 BITS_PER_WORD),
2521 BITS_PER_WORD);
2522 }
2523
2524 return tgtblk;
2525 }
2526
2527 /* Add a USE expression for REG to the (possibly empty) list pointed
2528 to by CALL_FUSAGE. REG must denote a hard register. */
2529
2530 void
2531 use_reg (call_fusage, reg)
2532 rtx *call_fusage, reg;
2533 {
2534 if (GET_CODE (reg) != REG
2535 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2536 abort ();
2537
2538 *call_fusage
2539 = gen_rtx_EXPR_LIST (VOIDmode,
2540 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2541 }
2542
2543 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2544 starting at REGNO. All of these registers must be hard registers. */
2545
2546 void
2547 use_regs (call_fusage, regno, nregs)
2548 rtx *call_fusage;
2549 int regno;
2550 int nregs;
2551 {
2552 int i;
2553
2554 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2555 abort ();
2556
2557 for (i = 0; i < nregs; i++)
2558 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2559 }
2560
2561 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2562 PARALLEL REGS. This is for calls that pass values in multiple
2563 non-contiguous locations. The Irix 6 ABI has examples of this. */
2564
2565 void
2566 use_group_regs (call_fusage, regs)
2567 rtx *call_fusage;
2568 rtx regs;
2569 {
2570 int i;
2571
2572 for (i = 0; i < XVECLEN (regs, 0); i++)
2573 {
2574 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2575
2576 /* A NULL entry means the parameter goes both on the stack and in
2577 registers. This can also be a MEM for targets that pass values
2578 partially on the stack and partially in registers. */
2579 if (reg != 0 && GET_CODE (reg) == REG)
2580 use_reg (call_fusage, reg);
2581 }
2582 }
2583 \f
2584
2585 /* Determine whether the LEN bytes generated by CONSTFUN can be
2586 stored to memory using several move instructions. CONSTFUNDATA is
2587 a pointer which will be passed as argument in every CONSTFUN call.
2588 ALIGN is maximum alignment we can assume. Return nonzero if a
2589 call to store_by_pieces should succeed. */
2590
2591 int
2592 can_store_by_pieces (len, constfun, constfundata, align)
2593 unsigned HOST_WIDE_INT len;
2594 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2595 PTR constfundata;
2596 unsigned int align;
2597 {
2598 unsigned HOST_WIDE_INT max_size, l;
2599 HOST_WIDE_INT offset = 0;
2600 enum machine_mode mode, tmode;
2601 enum insn_code icode;
2602 int reverse;
2603 rtx cst;
2604
2605 if (! MOVE_BY_PIECES_P (len, align))
2606 return 0;
2607
2608 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2609 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2610 align = MOVE_MAX * BITS_PER_UNIT;
2611
2612 /* We would first store what we can in the largest integer mode, then go to
2613 successively smaller modes. */
2614
2615 for (reverse = 0;
2616 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2617 reverse++)
2618 {
2619 l = len;
2620 mode = VOIDmode;
2621 max_size = STORE_MAX_PIECES + 1;
2622 while (max_size > 1)
2623 {
2624 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2625 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2626 if (GET_MODE_SIZE (tmode) < max_size)
2627 mode = tmode;
2628
2629 if (mode == VOIDmode)
2630 break;
2631
2632 icode = mov_optab->handlers[(int) mode].insn_code;
2633 if (icode != CODE_FOR_nothing
2634 && align >= GET_MODE_ALIGNMENT (mode))
2635 {
2636 unsigned int size = GET_MODE_SIZE (mode);
2637
2638 while (l >= size)
2639 {
2640 if (reverse)
2641 offset -= size;
2642
2643 cst = (*constfun) (constfundata, offset, mode);
2644 if (!LEGITIMATE_CONSTANT_P (cst))
2645 return 0;
2646
2647 if (!reverse)
2648 offset += size;
2649
2650 l -= size;
2651 }
2652 }
2653
2654 max_size = GET_MODE_SIZE (mode);
2655 }
2656
2657 /* The code above should have handled everything. */
2658 if (l != 0)
2659 abort ();
2660 }
2661
2662 return 1;
2663 }
2664
2665 /* Generate several move instructions to store LEN bytes generated by
2666 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2667 pointer which will be passed as argument in every CONSTFUN call.
2668 ALIGN is maximum alignment we can assume. */
2669
2670 void
2671 store_by_pieces (to, len, constfun, constfundata, align)
2672 rtx to;
2673 unsigned HOST_WIDE_INT len;
2674 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2675 PTR constfundata;
2676 unsigned int align;
2677 {
2678 struct store_by_pieces data;
2679
2680 if (! MOVE_BY_PIECES_P (len, align))
2681 abort ();
2682 to = protect_from_queue (to, 1);
2683 data.constfun = constfun;
2684 data.constfundata = constfundata;
2685 data.len = len;
2686 data.to = to;
2687 store_by_pieces_1 (&data, align);
2688 }
2689
2690 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2691 rtx with BLKmode). The caller must pass TO through protect_from_queue
2692 before calling. ALIGN is maximum alignment we can assume. */
2693
2694 static void
2695 clear_by_pieces (to, len, align)
2696 rtx to;
2697 unsigned HOST_WIDE_INT len;
2698 unsigned int align;
2699 {
2700 struct store_by_pieces data;
2701
2702 data.constfun = clear_by_pieces_1;
2703 data.constfundata = NULL;
2704 data.len = len;
2705 data.to = to;
2706 store_by_pieces_1 (&data, align);
2707 }
2708
2709 /* Callback routine for clear_by_pieces.
2710 Return const0_rtx unconditionally. */
2711
2712 static rtx
2713 clear_by_pieces_1 (data, offset, mode)
2714 PTR data ATTRIBUTE_UNUSED;
2715 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2716 enum machine_mode mode ATTRIBUTE_UNUSED;
2717 {
2718 return const0_rtx;
2719 }
2720
2721 /* Subroutine of clear_by_pieces and store_by_pieces.
2722 Generate several move instructions to store LEN bytes of block TO. (A MEM
2723 rtx with BLKmode). The caller must pass TO through protect_from_queue
2724 before calling. ALIGN is maximum alignment we can assume. */
2725
2726 static void
2727 store_by_pieces_1 (data, align)
2728 struct store_by_pieces *data;
2729 unsigned int align;
2730 {
2731 rtx to_addr = XEXP (data->to, 0);
2732 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2733 enum machine_mode mode = VOIDmode, tmode;
2734 enum insn_code icode;
2735
2736 data->offset = 0;
2737 data->to_addr = to_addr;
2738 data->autinc_to
2739 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2740 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2741
2742 data->explicit_inc_to = 0;
2743 data->reverse
2744 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2745 if (data->reverse)
2746 data->offset = data->len;
2747
2748 /* If storing requires more than two move insns,
2749 copy addresses to registers (to make displacements shorter)
2750 and use post-increment if available. */
2751 if (!data->autinc_to
2752 && move_by_pieces_ninsns (data->len, align) > 2)
2753 {
2754 /* Determine the main mode we'll be using. */
2755 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2756 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2757 if (GET_MODE_SIZE (tmode) < max_size)
2758 mode = tmode;
2759
2760 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2761 {
2762 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2763 data->autinc_to = 1;
2764 data->explicit_inc_to = -1;
2765 }
2766
2767 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2768 && ! data->autinc_to)
2769 {
2770 data->to_addr = copy_addr_to_reg (to_addr);
2771 data->autinc_to = 1;
2772 data->explicit_inc_to = 1;
2773 }
2774
2775 if ( !data->autinc_to && CONSTANT_P (to_addr))
2776 data->to_addr = copy_addr_to_reg (to_addr);
2777 }
2778
2779 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2780 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2781 align = MOVE_MAX * BITS_PER_UNIT;
2782
2783 /* First store what we can in the largest integer mode, then go to
2784 successively smaller modes. */
2785
2786 while (max_size > 1)
2787 {
2788 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2789 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2790 if (GET_MODE_SIZE (tmode) < max_size)
2791 mode = tmode;
2792
2793 if (mode == VOIDmode)
2794 break;
2795
2796 icode = mov_optab->handlers[(int) mode].insn_code;
2797 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2798 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2799
2800 max_size = GET_MODE_SIZE (mode);
2801 }
2802
2803 /* The code above should have handled everything. */
2804 if (data->len != 0)
2805 abort ();
2806 }
2807
2808 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2809 with move instructions for mode MODE. GENFUN is the gen_... function
2810 to make a move insn for that mode. DATA has all the other info. */
2811
2812 static void
2813 store_by_pieces_2 (genfun, mode, data)
2814 rtx (*genfun) PARAMS ((rtx, ...));
2815 enum machine_mode mode;
2816 struct store_by_pieces *data;
2817 {
2818 unsigned int size = GET_MODE_SIZE (mode);
2819 rtx to1, cst;
2820
2821 while (data->len >= size)
2822 {
2823 if (data->reverse)
2824 data->offset -= size;
2825
2826 if (data->autinc_to)
2827 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2828 data->offset);
2829 else
2830 to1 = adjust_address (data->to, mode, data->offset);
2831
2832 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2833 emit_insn (gen_add2_insn (data->to_addr,
2834 GEN_INT (-(HOST_WIDE_INT) size)));
2835
2836 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2837 emit_insn ((*genfun) (to1, cst));
2838
2839 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2840 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2841
2842 if (! data->reverse)
2843 data->offset += size;
2844
2845 data->len -= size;
2846 }
2847 }
2848 \f
2849 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2850 its length in bytes. */
2851
2852 rtx
2853 clear_storage (object, size)
2854 rtx object;
2855 rtx size;
2856 {
2857 rtx retval = 0;
2858 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2859 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2860
2861 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2862 just move a zero. Otherwise, do this a piece at a time. */
2863 if (GET_MODE (object) != BLKmode
2864 && GET_CODE (size) == CONST_INT
2865 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2866 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2867 else
2868 {
2869 object = protect_from_queue (object, 1);
2870 size = protect_from_queue (size, 0);
2871
2872 if (GET_CODE (size) == CONST_INT
2873 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2874 clear_by_pieces (object, INTVAL (size), align);
2875 else if (clear_storage_via_clrstr (object, size, align))
2876 ;
2877 else
2878 retval = clear_storage_via_libcall (object, size);
2879 }
2880
2881 return retval;
2882 }
2883
2884 /* A subroutine of clear_storage. Expand a clrstr pattern;
2885 return true if successful. */
2886
2887 static bool
2888 clear_storage_via_clrstr (object, size, align)
2889 rtx object, size;
2890 unsigned int align;
2891 {
2892 /* Try the most limited insn first, because there's no point
2893 including more than one in the machine description unless
2894 the more limited one has some advantage. */
2895
2896 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2897 enum machine_mode mode;
2898
2899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2900 mode = GET_MODE_WIDER_MODE (mode))
2901 {
2902 enum insn_code code = clrstr_optab[(int) mode];
2903 insn_operand_predicate_fn pred;
2904
2905 if (code != CODE_FOR_nothing
2906 /* We don't need MODE to be narrower than
2907 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2908 the mode mask, as it is returned by the macro, it will
2909 definitely be less than the actual mode mask. */
2910 && ((GET_CODE (size) == CONST_INT
2911 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2912 <= (GET_MODE_MASK (mode) >> 1)))
2913 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2914 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2915 || (*pred) (object, BLKmode))
2916 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2917 || (*pred) (opalign, VOIDmode)))
2918 {
2919 rtx op1;
2920 rtx last = get_last_insn ();
2921 rtx pat;
2922
2923 op1 = convert_to_mode (mode, size, 1);
2924 pred = insn_data[(int) code].operand[1].predicate;
2925 if (pred != 0 && ! (*pred) (op1, mode))
2926 op1 = copy_to_mode_reg (mode, op1);
2927
2928 pat = GEN_FCN ((int) code) (object, op1, opalign);
2929 if (pat)
2930 {
2931 emit_insn (pat);
2932 return true;
2933 }
2934 else
2935 delete_insns_since (last);
2936 }
2937 }
2938
2939 return false;
2940 }
2941
2942 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2943 Return the return value of memset, 0 otherwise. */
2944
2945 static rtx
2946 clear_storage_via_libcall (object, size)
2947 rtx object, size;
2948 {
2949 tree call_expr, arg_list, fn, object_tree, size_tree;
2950 enum machine_mode size_mode;
2951 rtx retval;
2952
2953 /* OBJECT or SIZE may have been passed through protect_from_queue.
2954
2955 It is unsafe to save the value generated by protect_from_queue
2956 and reuse it later. Consider what happens if emit_queue is
2957 called before the return value from protect_from_queue is used.
2958
2959 Expansion of the CALL_EXPR below will call emit_queue before
2960 we are finished emitting RTL for argument setup. So if we are
2961 not careful we could get the wrong value for an argument.
2962
2963 To avoid this problem we go ahead and emit code to copy OBJECT
2964 and SIZE into new pseudos. We can then place those new pseudos
2965 into an RTL_EXPR and use them later, even after a call to
2966 emit_queue.
2967
2968 Note this is not strictly needed for library calls since they
2969 do not call emit_queue before loading their arguments. However,
2970 we may need to have library calls call emit_queue in the future
2971 since failing to do so could cause problems for targets which
2972 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2973
2974 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2975
2976 if (TARGET_MEM_FUNCTIONS)
2977 size_mode = TYPE_MODE (sizetype);
2978 else
2979 size_mode = TYPE_MODE (unsigned_type_node);
2980 size = convert_to_mode (size_mode, size, 1);
2981 size = copy_to_mode_reg (size_mode, size);
2982
2983 /* It is incorrect to use the libcall calling conventions to call
2984 memset in this context. This could be a user call to memset and
2985 the user may wish to examine the return value from memset. For
2986 targets where libcalls and normal calls have different conventions
2987 for returning pointers, we could end up generating incorrect code.
2988
2989 For convenience, we generate the call to bzero this way as well. */
2990
2991 object_tree = make_tree (ptr_type_node, object);
2992 if (TARGET_MEM_FUNCTIONS)
2993 size_tree = make_tree (sizetype, size);
2994 else
2995 size_tree = make_tree (unsigned_type_node, size);
2996
2997 fn = clear_storage_libcall_fn (true);
2998 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2999 if (TARGET_MEM_FUNCTIONS)
3000 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3001 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3002
3003 /* Now we have to build up the CALL_EXPR itself. */
3004 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3005 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3006 call_expr, arg_list, NULL_TREE);
3007 TREE_SIDE_EFFECTS (call_expr) = 1;
3008
3009 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3010
3011 /* If we are initializing a readonly value, show the above call
3012 clobbered it. Otherwise, a load from it may erroneously be
3013 hoisted from a loop. */
3014 if (RTX_UNCHANGING_P (object))
3015 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3016
3017 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3018 }
3019
3020 /* A subroutine of clear_storage_via_libcall. Create the tree node
3021 for the function we use for block clears. The first time FOR_CALL
3022 is true, we call assemble_external. */
3023
3024 static GTY(()) tree block_clear_fn;
3025
3026 static tree
3027 clear_storage_libcall_fn (for_call)
3028 int for_call;
3029 {
3030 static bool emitted_extern;
3031 tree fn = block_clear_fn, args;
3032
3033 if (!fn)
3034 {
3035 if (TARGET_MEM_FUNCTIONS)
3036 {
3037 fn = get_identifier ("memset");
3038 args = build_function_type_list (ptr_type_node, ptr_type_node,
3039 integer_type_node, sizetype,
3040 NULL_TREE);
3041 }
3042 else
3043 {
3044 fn = get_identifier ("bzero");
3045 args = build_function_type_list (void_type_node, ptr_type_node,
3046 unsigned_type_node, NULL_TREE);
3047 }
3048
3049 fn = build_decl (FUNCTION_DECL, fn, args);
3050 DECL_EXTERNAL (fn) = 1;
3051 TREE_PUBLIC (fn) = 1;
3052 DECL_ARTIFICIAL (fn) = 1;
3053 TREE_NOTHROW (fn) = 1;
3054
3055 block_clear_fn = fn;
3056 }
3057
3058 if (for_call && !emitted_extern)
3059 {
3060 emitted_extern = true;
3061 make_decl_rtl (fn, NULL);
3062 assemble_external (fn);
3063 }
3064
3065 return fn;
3066 }
3067 \f
3068 /* Generate code to copy Y into X.
3069 Both Y and X must have the same mode, except that
3070 Y can be a constant with VOIDmode.
3071 This mode cannot be BLKmode; use emit_block_move for that.
3072
3073 Return the last instruction emitted. */
3074
3075 rtx
3076 emit_move_insn (x, y)
3077 rtx x, y;
3078 {
3079 enum machine_mode mode = GET_MODE (x);
3080 rtx y_cst = NULL_RTX;
3081 rtx last_insn;
3082
3083 x = protect_from_queue (x, 1);
3084 y = protect_from_queue (y, 0);
3085
3086 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3087 abort ();
3088
3089 /* Never force constant_p_rtx to memory. */
3090 if (GET_CODE (y) == CONSTANT_P_RTX)
3091 ;
3092 else if (CONSTANT_P (y))
3093 {
3094 if (optimize
3095 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3096 && (last_insn = compress_float_constant (x, y)))
3097 return last_insn;
3098
3099 if (!LEGITIMATE_CONSTANT_P (y))
3100 {
3101 y_cst = y;
3102 y = force_const_mem (mode, y);
3103 }
3104 }
3105
3106 /* If X or Y are memory references, verify that their addresses are valid
3107 for the machine. */
3108 if (GET_CODE (x) == MEM
3109 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3110 && ! push_operand (x, GET_MODE (x)))
3111 || (flag_force_addr
3112 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3113 x = validize_mem (x);
3114
3115 if (GET_CODE (y) == MEM
3116 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3117 || (flag_force_addr
3118 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3119 y = validize_mem (y);
3120
3121 if (mode == BLKmode)
3122 abort ();
3123
3124 last_insn = emit_move_insn_1 (x, y);
3125
3126 if (y_cst && GET_CODE (x) == REG)
3127 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3128
3129 return last_insn;
3130 }
3131
3132 /* Low level part of emit_move_insn.
3133 Called just like emit_move_insn, but assumes X and Y
3134 are basically valid. */
3135
3136 rtx
3137 emit_move_insn_1 (x, y)
3138 rtx x, y;
3139 {
3140 enum machine_mode mode = GET_MODE (x);
3141 enum machine_mode submode;
3142 enum mode_class class = GET_MODE_CLASS (mode);
3143
3144 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3145 abort ();
3146
3147 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3148 return
3149 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3150
3151 /* Expand complex moves by moving real part and imag part, if possible. */
3152 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3153 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
3154 * BITS_PER_UNIT),
3155 (class == MODE_COMPLEX_INT
3156 ? MODE_INT : MODE_FLOAT),
3157 0))
3158 && (mov_optab->handlers[(int) submode].insn_code
3159 != CODE_FOR_nothing))
3160 {
3161 /* Don't split destination if it is a stack push. */
3162 int stack = push_operand (x, GET_MODE (x));
3163
3164 #ifdef PUSH_ROUNDING
3165 /* In case we output to the stack, but the size is smaller machine can
3166 push exactly, we need to use move instructions. */
3167 if (stack
3168 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3169 != GET_MODE_SIZE (submode)))
3170 {
3171 rtx temp;
3172 HOST_WIDE_INT offset1, offset2;
3173
3174 /* Do not use anti_adjust_stack, since we don't want to update
3175 stack_pointer_delta. */
3176 temp = expand_binop (Pmode,
3177 #ifdef STACK_GROWS_DOWNWARD
3178 sub_optab,
3179 #else
3180 add_optab,
3181 #endif
3182 stack_pointer_rtx,
3183 GEN_INT
3184 (PUSH_ROUNDING
3185 (GET_MODE_SIZE (GET_MODE (x)))),
3186 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3187
3188 if (temp != stack_pointer_rtx)
3189 emit_move_insn (stack_pointer_rtx, temp);
3190
3191 #ifdef STACK_GROWS_DOWNWARD
3192 offset1 = 0;
3193 offset2 = GET_MODE_SIZE (submode);
3194 #else
3195 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3196 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3197 + GET_MODE_SIZE (submode));
3198 #endif
3199
3200 emit_move_insn (change_address (x, submode,
3201 gen_rtx_PLUS (Pmode,
3202 stack_pointer_rtx,
3203 GEN_INT (offset1))),
3204 gen_realpart (submode, y));
3205 emit_move_insn (change_address (x, submode,
3206 gen_rtx_PLUS (Pmode,
3207 stack_pointer_rtx,
3208 GEN_INT (offset2))),
3209 gen_imagpart (submode, y));
3210 }
3211 else
3212 #endif
3213 /* If this is a stack, push the highpart first, so it
3214 will be in the argument order.
3215
3216 In that case, change_address is used only to convert
3217 the mode, not to change the address. */
3218 if (stack)
3219 {
3220 /* Note that the real part always precedes the imag part in memory
3221 regardless of machine's endianness. */
3222 #ifdef STACK_GROWS_DOWNWARD
3223 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3224 (gen_rtx_MEM (submode, XEXP (x, 0)),
3225 gen_imagpart (submode, y)));
3226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3227 (gen_rtx_MEM (submode, XEXP (x, 0)),
3228 gen_realpart (submode, y)));
3229 #else
3230 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3231 (gen_rtx_MEM (submode, XEXP (x, 0)),
3232 gen_realpart (submode, y)));
3233 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3234 (gen_rtx_MEM (submode, XEXP (x, 0)),
3235 gen_imagpart (submode, y)));
3236 #endif
3237 }
3238 else
3239 {
3240 rtx realpart_x, realpart_y;
3241 rtx imagpart_x, imagpart_y;
3242
3243 /* If this is a complex value with each part being smaller than a
3244 word, the usual calling sequence will likely pack the pieces into
3245 a single register. Unfortunately, SUBREG of hard registers only
3246 deals in terms of words, so we have a problem converting input
3247 arguments to the CONCAT of two registers that is used elsewhere
3248 for complex values. If this is before reload, we can copy it into
3249 memory and reload. FIXME, we should see about using extract and
3250 insert on integer registers, but complex short and complex char
3251 variables should be rarely used. */
3252 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3253 && (reload_in_progress | reload_completed) == 0)
3254 {
3255 int packed_dest_p
3256 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3257 int packed_src_p
3258 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3259
3260 if (packed_dest_p || packed_src_p)
3261 {
3262 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3263 ? MODE_FLOAT : MODE_INT);
3264
3265 enum machine_mode reg_mode
3266 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3267
3268 if (reg_mode != BLKmode)
3269 {
3270 rtx mem = assign_stack_temp (reg_mode,
3271 GET_MODE_SIZE (mode), 0);
3272 rtx cmem = adjust_address (mem, mode, 0);
3273
3274 cfun->cannot_inline
3275 = N_("function using short complex types cannot be inline");
3276
3277 if (packed_dest_p)
3278 {
3279 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3280
3281 emit_move_insn_1 (cmem, y);
3282 return emit_move_insn_1 (sreg, mem);
3283 }
3284 else
3285 {
3286 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3287
3288 emit_move_insn_1 (mem, sreg);
3289 return emit_move_insn_1 (x, cmem);
3290 }
3291 }
3292 }
3293 }
3294
3295 realpart_x = gen_realpart (submode, x);
3296 realpart_y = gen_realpart (submode, y);
3297 imagpart_x = gen_imagpart (submode, x);
3298 imagpart_y = gen_imagpart (submode, y);
3299
3300 /* Show the output dies here. This is necessary for SUBREGs
3301 of pseudos since we cannot track their lifetimes correctly;
3302 hard regs shouldn't appear here except as return values.
3303 We never want to emit such a clobber after reload. */
3304 if (x != y
3305 && ! (reload_in_progress || reload_completed)
3306 && (GET_CODE (realpart_x) == SUBREG
3307 || GET_CODE (imagpart_x) == SUBREG))
3308 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3309
3310 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3311 (realpart_x, realpart_y));
3312 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3313 (imagpart_x, imagpart_y));
3314 }
3315
3316 return get_last_insn ();
3317 }
3318
3319 /* This will handle any multi-word or full-word mode that lacks a move_insn
3320 pattern. However, you will get better code if you define such patterns,
3321 even if they must turn into multiple assembler instructions. */
3322 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3323 {
3324 rtx last_insn = 0;
3325 rtx seq, inner;
3326 int need_clobber;
3327 int i;
3328
3329 #ifdef PUSH_ROUNDING
3330
3331 /* If X is a push on the stack, do the push now and replace
3332 X with a reference to the stack pointer. */
3333 if (push_operand (x, GET_MODE (x)))
3334 {
3335 rtx temp;
3336 enum rtx_code code;
3337
3338 /* Do not use anti_adjust_stack, since we don't want to update
3339 stack_pointer_delta. */
3340 temp = expand_binop (Pmode,
3341 #ifdef STACK_GROWS_DOWNWARD
3342 sub_optab,
3343 #else
3344 add_optab,
3345 #endif
3346 stack_pointer_rtx,
3347 GEN_INT
3348 (PUSH_ROUNDING
3349 (GET_MODE_SIZE (GET_MODE (x)))),
3350 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3351
3352 if (temp != stack_pointer_rtx)
3353 emit_move_insn (stack_pointer_rtx, temp);
3354
3355 code = GET_CODE (XEXP (x, 0));
3356
3357 /* Just hope that small offsets off SP are OK. */
3358 if (code == POST_INC)
3359 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3360 GEN_INT (-((HOST_WIDE_INT)
3361 GET_MODE_SIZE (GET_MODE (x)))));
3362 else if (code == POST_DEC)
3363 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3364 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3365 else
3366 temp = stack_pointer_rtx;
3367
3368 x = change_address (x, VOIDmode, temp);
3369 }
3370 #endif
3371
3372 /* If we are in reload, see if either operand is a MEM whose address
3373 is scheduled for replacement. */
3374 if (reload_in_progress && GET_CODE (x) == MEM
3375 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3376 x = replace_equiv_address_nv (x, inner);
3377 if (reload_in_progress && GET_CODE (y) == MEM
3378 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3379 y = replace_equiv_address_nv (y, inner);
3380
3381 start_sequence ();
3382
3383 need_clobber = 0;
3384 for (i = 0;
3385 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3386 i++)
3387 {
3388 rtx xpart = operand_subword (x, i, 1, mode);
3389 rtx ypart = operand_subword (y, i, 1, mode);
3390
3391 /* If we can't get a part of Y, put Y into memory if it is a
3392 constant. Otherwise, force it into a register. If we still
3393 can't get a part of Y, abort. */
3394 if (ypart == 0 && CONSTANT_P (y))
3395 {
3396 y = force_const_mem (mode, y);
3397 ypart = operand_subword (y, i, 1, mode);
3398 }
3399 else if (ypart == 0)
3400 ypart = operand_subword_force (y, i, mode);
3401
3402 if (xpart == 0 || ypart == 0)
3403 abort ();
3404
3405 need_clobber |= (GET_CODE (xpart) == SUBREG);
3406
3407 last_insn = emit_move_insn (xpart, ypart);
3408 }
3409
3410 seq = get_insns ();
3411 end_sequence ();
3412
3413 /* Show the output dies here. This is necessary for SUBREGs
3414 of pseudos since we cannot track their lifetimes correctly;
3415 hard regs shouldn't appear here except as return values.
3416 We never want to emit such a clobber after reload. */
3417 if (x != y
3418 && ! (reload_in_progress || reload_completed)
3419 && need_clobber != 0)
3420 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3421
3422 emit_insn (seq);
3423
3424 return last_insn;
3425 }
3426 else
3427 abort ();
3428 }
3429
3430 /* If Y is representable exactly in a narrower mode, and the target can
3431 perform the extension directly from constant or memory, then emit the
3432 move as an extension. */
3433
3434 static rtx
3435 compress_float_constant (x, y)
3436 rtx x, y;
3437 {
3438 enum machine_mode dstmode = GET_MODE (x);
3439 enum machine_mode orig_srcmode = GET_MODE (y);
3440 enum machine_mode srcmode;
3441 REAL_VALUE_TYPE r;
3442
3443 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3444
3445 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3446 srcmode != orig_srcmode;
3447 srcmode = GET_MODE_WIDER_MODE (srcmode))
3448 {
3449 enum insn_code ic;
3450 rtx trunc_y, last_insn;
3451
3452 /* Skip if the target can't extend this way. */
3453 ic = can_extend_p (dstmode, srcmode, 0);
3454 if (ic == CODE_FOR_nothing)
3455 continue;
3456
3457 /* Skip if the narrowed value isn't exact. */
3458 if (! exact_real_truncate (srcmode, &r))
3459 continue;
3460
3461 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3462
3463 if (LEGITIMATE_CONSTANT_P (trunc_y))
3464 {
3465 /* Skip if the target needs extra instructions to perform
3466 the extension. */
3467 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3468 continue;
3469 }
3470 else if (float_extend_from_mem[dstmode][srcmode])
3471 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3472 else
3473 continue;
3474
3475 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3476 last_insn = get_last_insn ();
3477
3478 if (GET_CODE (x) == REG)
3479 REG_NOTES (last_insn)
3480 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3481
3482 return last_insn;
3483 }
3484
3485 return NULL_RTX;
3486 }
3487 \f
3488 /* Pushing data onto the stack. */
3489
3490 /* Push a block of length SIZE (perhaps variable)
3491 and return an rtx to address the beginning of the block.
3492 Note that it is not possible for the value returned to be a QUEUED.
3493 The value may be virtual_outgoing_args_rtx.
3494
3495 EXTRA is the number of bytes of padding to push in addition to SIZE.
3496 BELOW nonzero means this padding comes at low addresses;
3497 otherwise, the padding comes at high addresses. */
3498
3499 rtx
3500 push_block (size, extra, below)
3501 rtx size;
3502 int extra, below;
3503 {
3504 rtx temp;
3505
3506 size = convert_modes (Pmode, ptr_mode, size, 1);
3507 if (CONSTANT_P (size))
3508 anti_adjust_stack (plus_constant (size, extra));
3509 else if (GET_CODE (size) == REG && extra == 0)
3510 anti_adjust_stack (size);
3511 else
3512 {
3513 temp = copy_to_mode_reg (Pmode, size);
3514 if (extra != 0)
3515 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3516 temp, 0, OPTAB_LIB_WIDEN);
3517 anti_adjust_stack (temp);
3518 }
3519
3520 #ifndef STACK_GROWS_DOWNWARD
3521 if (0)
3522 #else
3523 if (1)
3524 #endif
3525 {
3526 temp = virtual_outgoing_args_rtx;
3527 if (extra != 0 && below)
3528 temp = plus_constant (temp, extra);
3529 }
3530 else
3531 {
3532 if (GET_CODE (size) == CONST_INT)
3533 temp = plus_constant (virtual_outgoing_args_rtx,
3534 -INTVAL (size) - (below ? 0 : extra));
3535 else if (extra != 0 && !below)
3536 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3537 negate_rtx (Pmode, plus_constant (size, extra)));
3538 else
3539 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3540 negate_rtx (Pmode, size));
3541 }
3542
3543 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3544 }
3545
3546 #ifdef PUSH_ROUNDING
3547
3548 /* Emit single push insn. */
3549
3550 static void
3551 emit_single_push_insn (mode, x, type)
3552 rtx x;
3553 enum machine_mode mode;
3554 tree type;
3555 {
3556 rtx dest_addr;
3557 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3558 rtx dest;
3559 enum insn_code icode;
3560 insn_operand_predicate_fn pred;
3561
3562 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3563 /* If there is push pattern, use it. Otherwise try old way of throwing
3564 MEM representing push operation to move expander. */
3565 icode = push_optab->handlers[(int) mode].insn_code;
3566 if (icode != CODE_FOR_nothing)
3567 {
3568 if (((pred = insn_data[(int) icode].operand[0].predicate)
3569 && !((*pred) (x, mode))))
3570 x = force_reg (mode, x);
3571 emit_insn (GEN_FCN (icode) (x));
3572 return;
3573 }
3574 if (GET_MODE_SIZE (mode) == rounded_size)
3575 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3576 else
3577 {
3578 #ifdef STACK_GROWS_DOWNWARD
3579 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3580 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3581 #else
3582 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3583 GEN_INT (rounded_size));
3584 #endif
3585 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3586 }
3587
3588 dest = gen_rtx_MEM (mode, dest_addr);
3589
3590 if (type != 0)
3591 {
3592 set_mem_attributes (dest, type, 1);
3593
3594 if (flag_optimize_sibling_calls)
3595 /* Function incoming arguments may overlap with sibling call
3596 outgoing arguments and we cannot allow reordering of reads
3597 from function arguments with stores to outgoing arguments
3598 of sibling calls. */
3599 set_mem_alias_set (dest, 0);
3600 }
3601 emit_move_insn (dest, x);
3602 }
3603 #endif
3604
3605 /* Generate code to push X onto the stack, assuming it has mode MODE and
3606 type TYPE.
3607 MODE is redundant except when X is a CONST_INT (since they don't
3608 carry mode info).
3609 SIZE is an rtx for the size of data to be copied (in bytes),
3610 needed only if X is BLKmode.
3611
3612 ALIGN (in bits) is maximum alignment we can assume.
3613
3614 If PARTIAL and REG are both nonzero, then copy that many of the first
3615 words of X into registers starting with REG, and push the rest of X.
3616 The amount of space pushed is decreased by PARTIAL words,
3617 rounded *down* to a multiple of PARM_BOUNDARY.
3618 REG must be a hard register in this case.
3619 If REG is zero but PARTIAL is not, take any all others actions for an
3620 argument partially in registers, but do not actually load any
3621 registers.
3622
3623 EXTRA is the amount in bytes of extra space to leave next to this arg.
3624 This is ignored if an argument block has already been allocated.
3625
3626 On a machine that lacks real push insns, ARGS_ADDR is the address of
3627 the bottom of the argument block for this call. We use indexing off there
3628 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3629 argument block has not been preallocated.
3630
3631 ARGS_SO_FAR is the size of args previously pushed for this call.
3632
3633 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3634 for arguments passed in registers. If nonzero, it will be the number
3635 of bytes required. */
3636
3637 void
3638 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3639 args_addr, args_so_far, reg_parm_stack_space,
3640 alignment_pad)
3641 rtx x;
3642 enum machine_mode mode;
3643 tree type;
3644 rtx size;
3645 unsigned int align;
3646 int partial;
3647 rtx reg;
3648 int extra;
3649 rtx args_addr;
3650 rtx args_so_far;
3651 int reg_parm_stack_space;
3652 rtx alignment_pad;
3653 {
3654 rtx xinner;
3655 enum direction stack_direction
3656 #ifdef STACK_GROWS_DOWNWARD
3657 = downward;
3658 #else
3659 = upward;
3660 #endif
3661
3662 /* Decide where to pad the argument: `downward' for below,
3663 `upward' for above, or `none' for don't pad it.
3664 Default is below for small data on big-endian machines; else above. */
3665 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3666
3667 /* Invert direction if stack is post-decrement.
3668 FIXME: why? */
3669 if (STACK_PUSH_CODE == POST_DEC)
3670 if (where_pad != none)
3671 where_pad = (where_pad == downward ? upward : downward);
3672
3673 xinner = x = protect_from_queue (x, 0);
3674
3675 if (mode == BLKmode)
3676 {
3677 /* Copy a block into the stack, entirely or partially. */
3678
3679 rtx temp;
3680 int used = partial * UNITS_PER_WORD;
3681 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3682 int skip;
3683
3684 if (size == 0)
3685 abort ();
3686
3687 used -= offset;
3688
3689 /* USED is now the # of bytes we need not copy to the stack
3690 because registers will take care of them. */
3691
3692 if (partial != 0)
3693 xinner = adjust_address (xinner, BLKmode, used);
3694
3695 /* If the partial register-part of the arg counts in its stack size,
3696 skip the part of stack space corresponding to the registers.
3697 Otherwise, start copying to the beginning of the stack space,
3698 by setting SKIP to 0. */
3699 skip = (reg_parm_stack_space == 0) ? 0 : used;
3700
3701 #ifdef PUSH_ROUNDING
3702 /* Do it with several push insns if that doesn't take lots of insns
3703 and if there is no difficulty with push insns that skip bytes
3704 on the stack for alignment purposes. */
3705 if (args_addr == 0
3706 && PUSH_ARGS
3707 && GET_CODE (size) == CONST_INT
3708 && skip == 0
3709 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3710 /* Here we avoid the case of a structure whose weak alignment
3711 forces many pushes of a small amount of data,
3712 and such small pushes do rounding that causes trouble. */
3713 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3714 || align >= BIGGEST_ALIGNMENT
3715 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3716 == (align / BITS_PER_UNIT)))
3717 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3718 {
3719 /* Push padding now if padding above and stack grows down,
3720 or if padding below and stack grows up.
3721 But if space already allocated, this has already been done. */
3722 if (extra && args_addr == 0
3723 && where_pad != none && where_pad != stack_direction)
3724 anti_adjust_stack (GEN_INT (extra));
3725
3726 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3727 }
3728 else
3729 #endif /* PUSH_ROUNDING */
3730 {
3731 rtx target;
3732
3733 /* Otherwise make space on the stack and copy the data
3734 to the address of that space. */
3735
3736 /* Deduct words put into registers from the size we must copy. */
3737 if (partial != 0)
3738 {
3739 if (GET_CODE (size) == CONST_INT)
3740 size = GEN_INT (INTVAL (size) - used);
3741 else
3742 size = expand_binop (GET_MODE (size), sub_optab, size,
3743 GEN_INT (used), NULL_RTX, 0,
3744 OPTAB_LIB_WIDEN);
3745 }
3746
3747 /* Get the address of the stack space.
3748 In this case, we do not deal with EXTRA separately.
3749 A single stack adjust will do. */
3750 if (! args_addr)
3751 {
3752 temp = push_block (size, extra, where_pad == downward);
3753 extra = 0;
3754 }
3755 else if (GET_CODE (args_so_far) == CONST_INT)
3756 temp = memory_address (BLKmode,
3757 plus_constant (args_addr,
3758 skip + INTVAL (args_so_far)));
3759 else
3760 temp = memory_address (BLKmode,
3761 plus_constant (gen_rtx_PLUS (Pmode,
3762 args_addr,
3763 args_so_far),
3764 skip));
3765
3766 if (!ACCUMULATE_OUTGOING_ARGS)
3767 {
3768 /* If the source is referenced relative to the stack pointer,
3769 copy it to another register to stabilize it. We do not need
3770 to do this if we know that we won't be changing sp. */
3771
3772 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3773 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3774 temp = copy_to_reg (temp);
3775 }
3776
3777 target = gen_rtx_MEM (BLKmode, temp);
3778
3779 if (type != 0)
3780 {
3781 set_mem_attributes (target, type, 1);
3782 /* Function incoming arguments may overlap with sibling call
3783 outgoing arguments and we cannot allow reordering of reads
3784 from function arguments with stores to outgoing arguments
3785 of sibling calls. */
3786 set_mem_alias_set (target, 0);
3787 }
3788
3789 /* ALIGN may well be better aligned than TYPE, e.g. due to
3790 PARM_BOUNDARY. Assume the caller isn't lying. */
3791 set_mem_align (target, align);
3792
3793 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3794 }
3795 }
3796 else if (partial > 0)
3797 {
3798 /* Scalar partly in registers. */
3799
3800 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3801 int i;
3802 int not_stack;
3803 /* # words of start of argument
3804 that we must make space for but need not store. */
3805 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3806 int args_offset = INTVAL (args_so_far);
3807 int skip;
3808
3809 /* Push padding now if padding above and stack grows down,
3810 or if padding below and stack grows up.
3811 But if space already allocated, this has already been done. */
3812 if (extra && args_addr == 0
3813 && where_pad != none && where_pad != stack_direction)
3814 anti_adjust_stack (GEN_INT (extra));
3815
3816 /* If we make space by pushing it, we might as well push
3817 the real data. Otherwise, we can leave OFFSET nonzero
3818 and leave the space uninitialized. */
3819 if (args_addr == 0)
3820 offset = 0;
3821
3822 /* Now NOT_STACK gets the number of words that we don't need to
3823 allocate on the stack. */
3824 not_stack = partial - offset;
3825
3826 /* If the partial register-part of the arg counts in its stack size,
3827 skip the part of stack space corresponding to the registers.
3828 Otherwise, start copying to the beginning of the stack space,
3829 by setting SKIP to 0. */
3830 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3831
3832 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3833 x = validize_mem (force_const_mem (mode, x));
3834
3835 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3836 SUBREGs of such registers are not allowed. */
3837 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3838 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3839 x = copy_to_reg (x);
3840
3841 /* Loop over all the words allocated on the stack for this arg. */
3842 /* We can do it by words, because any scalar bigger than a word
3843 has a size a multiple of a word. */
3844 #ifndef PUSH_ARGS_REVERSED
3845 for (i = not_stack; i < size; i++)
3846 #else
3847 for (i = size - 1; i >= not_stack; i--)
3848 #endif
3849 if (i >= not_stack + offset)
3850 emit_push_insn (operand_subword_force (x, i, mode),
3851 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3852 0, args_addr,
3853 GEN_INT (args_offset + ((i - not_stack + skip)
3854 * UNITS_PER_WORD)),
3855 reg_parm_stack_space, alignment_pad);
3856 }
3857 else
3858 {
3859 rtx addr;
3860 rtx target = NULL_RTX;
3861 rtx dest;
3862
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3869
3870 #ifdef PUSH_ROUNDING
3871 if (args_addr == 0 && PUSH_ARGS)
3872 emit_single_push_insn (mode, x, type);
3873 else
3874 #endif
3875 {
3876 if (GET_CODE (args_so_far) == CONST_INT)
3877 addr
3878 = memory_address (mode,
3879 plus_constant (args_addr,
3880 INTVAL (args_so_far)));
3881 else
3882 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3883 args_so_far));
3884 target = addr;
3885 dest = gen_rtx_MEM (mode, addr);
3886 if (type != 0)
3887 {
3888 set_mem_attributes (dest, type, 1);
3889 /* Function incoming arguments may overlap with sibling call
3890 outgoing arguments and we cannot allow reordering of reads
3891 from function arguments with stores to outgoing arguments
3892 of sibling calls. */
3893 set_mem_alias_set (dest, 0);
3894 }
3895
3896 emit_move_insn (dest, x);
3897 }
3898 }
3899
3900 /* If part should go in registers, copy that part
3901 into the appropriate registers. Do this now, at the end,
3902 since mem-to-mem copies above may do function calls. */
3903 if (partial > 0 && reg != 0)
3904 {
3905 /* Handle calls that pass values in multiple non-contiguous locations.
3906 The Irix 6 ABI has examples of this. */
3907 if (GET_CODE (reg) == PARALLEL)
3908 emit_group_load (reg, x, -1); /* ??? size? */
3909 else
3910 move_block_to_reg (REGNO (reg), x, partial, mode);
3911 }
3912
3913 if (extra && args_addr == 0 && where_pad == stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3915
3916 if (alignment_pad && args_addr == 0)
3917 anti_adjust_stack (alignment_pad);
3918 }
3919 \f
3920 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3921 operations. */
3922
3923 static rtx
3924 get_subtarget (x)
3925 rtx x;
3926 {
3927 return ((x == 0
3928 /* Only registers can be subtargets. */
3929 || GET_CODE (x) != REG
3930 /* If the register is readonly, it can't be set more than once. */
3931 || RTX_UNCHANGING_P (x)
3932 /* Don't use hard regs to avoid extending their life. */
3933 || REGNO (x) < FIRST_PSEUDO_REGISTER
3934 /* Avoid subtargets inside loops,
3935 since they hide some invariant expressions. */
3936 || preserve_subexpressions_p ())
3937 ? 0 : x);
3938 }
3939
3940 /* Expand an assignment that stores the value of FROM into TO.
3941 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3942 (This may contain a QUEUED rtx;
3943 if the value is constant, this rtx is a constant.)
3944 Otherwise, the returned value is NULL_RTX.
3945
3946 SUGGEST_REG is no longer actually used.
3947 It used to mean, copy the value through a register
3948 and return that register, if that is possible.
3949 We now use WANT_VALUE to decide whether to do this. */
3950
3951 rtx
3952 expand_assignment (to, from, want_value, suggest_reg)
3953 tree to, from;
3954 int want_value;
3955 int suggest_reg ATTRIBUTE_UNUSED;
3956 {
3957 rtx to_rtx = 0;
3958 rtx result;
3959
3960 /* Don't crash if the lhs of the assignment was erroneous. */
3961
3962 if (TREE_CODE (to) == ERROR_MARK)
3963 {
3964 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3965 return want_value ? result : NULL_RTX;
3966 }
3967
3968 /* Assignment of a structure component needs special treatment
3969 if the structure component's rtx is not simply a MEM.
3970 Assignment of an array element at a constant index, and assignment of
3971 an array element in an unaligned packed structure field, has the same
3972 problem. */
3973
3974 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3975 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3976 {
3977 enum machine_mode mode1;
3978 HOST_WIDE_INT bitsize, bitpos;
3979 rtx orig_to_rtx;
3980 tree offset;
3981 int unsignedp;
3982 int volatilep = 0;
3983 tree tem;
3984
3985 push_temp_slots ();
3986 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3987 &unsignedp, &volatilep);
3988
3989 /* If we are going to use store_bit_field and extract_bit_field,
3990 make sure to_rtx will be safe for multiple use. */
3991
3992 if (mode1 == VOIDmode && want_value)
3993 tem = stabilize_reference (tem);
3994
3995 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3996
3997 if (offset != 0)
3998 {
3999 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4000
4001 if (GET_CODE (to_rtx) != MEM)
4002 abort ();
4003
4004 #ifdef POINTERS_EXTEND_UNSIGNED
4005 if (GET_MODE (offset_rtx) != Pmode)
4006 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4007 #else
4008 if (GET_MODE (offset_rtx) != ptr_mode)
4009 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4010 #endif
4011
4012 /* A constant address in TO_RTX can have VOIDmode, we must not try
4013 to call force_reg for that case. Avoid that case. */
4014 if (GET_CODE (to_rtx) == MEM
4015 && GET_MODE (to_rtx) == BLKmode
4016 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4017 && bitsize > 0
4018 && (bitpos % bitsize) == 0
4019 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4020 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4021 {
4022 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4023 bitpos = 0;
4024 }
4025
4026 to_rtx = offset_address (to_rtx, offset_rtx,
4027 highest_pow2_factor_for_type (TREE_TYPE (to),
4028 offset));
4029 }
4030
4031 if (GET_CODE (to_rtx) == MEM)
4032 {
4033 /* If the field is at offset zero, we could have been given the
4034 DECL_RTX of the parent struct. Don't munge it. */
4035 to_rtx = shallow_copy_rtx (to_rtx);
4036
4037 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4038 }
4039
4040 /* Deal with volatile and readonly fields. The former is only done
4041 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4042 if (volatilep && GET_CODE (to_rtx) == MEM)
4043 {
4044 if (to_rtx == orig_to_rtx)
4045 to_rtx = copy_rtx (to_rtx);
4046 MEM_VOLATILE_P (to_rtx) = 1;
4047 }
4048
4049 if (TREE_CODE (to) == COMPONENT_REF
4050 && TREE_READONLY (TREE_OPERAND (to, 1)))
4051 {
4052 if (to_rtx == orig_to_rtx)
4053 to_rtx = copy_rtx (to_rtx);
4054 RTX_UNCHANGING_P (to_rtx) = 1;
4055 }
4056
4057 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4058 {
4059 if (to_rtx == orig_to_rtx)
4060 to_rtx = copy_rtx (to_rtx);
4061 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4062 }
4063
4064 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4065 (want_value
4066 /* Spurious cast for HPUX compiler. */
4067 ? ((enum machine_mode)
4068 TYPE_MODE (TREE_TYPE (to)))
4069 : VOIDmode),
4070 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4071
4072 preserve_temp_slots (result);
4073 free_temp_slots ();
4074 pop_temp_slots ();
4075
4076 /* If the value is meaningful, convert RESULT to the proper mode.
4077 Otherwise, return nothing. */
4078 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4079 TYPE_MODE (TREE_TYPE (from)),
4080 result,
4081 TREE_UNSIGNED (TREE_TYPE (to)))
4082 : NULL_RTX);
4083 }
4084
4085 /* If the rhs is a function call and its value is not an aggregate,
4086 call the function before we start to compute the lhs.
4087 This is needed for correct code for cases such as
4088 val = setjmp (buf) on machines where reference to val
4089 requires loading up part of an address in a separate insn.
4090
4091 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4092 since it might be a promoted variable where the zero- or sign- extension
4093 needs to be done. Handling this in the normal way is safe because no
4094 computation is done before the call. */
4095 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4096 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4097 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4098 && GET_CODE (DECL_RTL (to)) == REG))
4099 {
4100 rtx value;
4101
4102 push_temp_slots ();
4103 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4104 if (to_rtx == 0)
4105 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4106
4107 /* Handle calls that return values in multiple non-contiguous locations.
4108 The Irix 6 ABI has examples of this. */
4109 if (GET_CODE (to_rtx) == PARALLEL)
4110 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4111 else if (GET_MODE (to_rtx) == BLKmode)
4112 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4113 else
4114 {
4115 #ifdef POINTERS_EXTEND_UNSIGNED
4116 if (POINTER_TYPE_P (TREE_TYPE (to))
4117 && GET_MODE (to_rtx) != GET_MODE (value))
4118 value = convert_memory_address (GET_MODE (to_rtx), value);
4119 #endif
4120 emit_move_insn (to_rtx, value);
4121 }
4122 preserve_temp_slots (to_rtx);
4123 free_temp_slots ();
4124 pop_temp_slots ();
4125 return want_value ? to_rtx : NULL_RTX;
4126 }
4127
4128 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4129 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4130
4131 if (to_rtx == 0)
4132 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4133
4134 /* Don't move directly into a return register. */
4135 if (TREE_CODE (to) == RESULT_DECL
4136 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4137 {
4138 rtx temp;
4139
4140 push_temp_slots ();
4141 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4142
4143 if (GET_CODE (to_rtx) == PARALLEL)
4144 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4145 else
4146 emit_move_insn (to_rtx, temp);
4147
4148 preserve_temp_slots (to_rtx);
4149 free_temp_slots ();
4150 pop_temp_slots ();
4151 return want_value ? to_rtx : NULL_RTX;
4152 }
4153
4154 /* In case we are returning the contents of an object which overlaps
4155 the place the value is being stored, use a safe function when copying
4156 a value through a pointer into a structure value return block. */
4157 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4158 && current_function_returns_struct
4159 && !current_function_returns_pcc_struct)
4160 {
4161 rtx from_rtx, size;
4162
4163 push_temp_slots ();
4164 size = expr_size (from);
4165 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4166
4167 if (TARGET_MEM_FUNCTIONS)
4168 emit_library_call (memmove_libfunc, LCT_NORMAL,
4169 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4170 XEXP (from_rtx, 0), Pmode,
4171 convert_to_mode (TYPE_MODE (sizetype),
4172 size, TREE_UNSIGNED (sizetype)),
4173 TYPE_MODE (sizetype));
4174 else
4175 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4176 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4177 XEXP (to_rtx, 0), Pmode,
4178 convert_to_mode (TYPE_MODE (integer_type_node),
4179 size,
4180 TREE_UNSIGNED (integer_type_node)),
4181 TYPE_MODE (integer_type_node));
4182
4183 preserve_temp_slots (to_rtx);
4184 free_temp_slots ();
4185 pop_temp_slots ();
4186 return want_value ? to_rtx : NULL_RTX;
4187 }
4188
4189 /* Compute FROM and store the value in the rtx we got. */
4190
4191 push_temp_slots ();
4192 result = store_expr (from, to_rtx, want_value);
4193 preserve_temp_slots (result);
4194 free_temp_slots ();
4195 pop_temp_slots ();
4196 return want_value ? result : NULL_RTX;
4197 }
4198
4199 /* Generate code for computing expression EXP,
4200 and storing the value into TARGET.
4201 TARGET may contain a QUEUED rtx.
4202
4203 If WANT_VALUE is nonzero, return a copy of the value
4204 not in TARGET, so that we can be sure to use the proper
4205 value in a containing expression even if TARGET has something
4206 else stored in it. If possible, we copy the value through a pseudo
4207 and return that pseudo. Or, if the value is constant, we try to
4208 return the constant. In some cases, we return a pseudo
4209 copied *from* TARGET.
4210
4211 If the mode is BLKmode then we may return TARGET itself.
4212 It turns out that in BLKmode it doesn't cause a problem.
4213 because C has no operators that could combine two different
4214 assignments into the same BLKmode object with different values
4215 with no sequence point. Will other languages need this to
4216 be more thorough?
4217
4218 If WANT_VALUE is 0, we return NULL, to make sure
4219 to catch quickly any cases where the caller uses the value
4220 and fails to set WANT_VALUE. */
4221
4222 rtx
4223 store_expr (exp, target, want_value)
4224 tree exp;
4225 rtx target;
4226 int want_value;
4227 {
4228 rtx temp;
4229 int dont_return_target = 0;
4230 int dont_store_target = 0;
4231
4232 if (TREE_CODE (exp) == COMPOUND_EXPR)
4233 {
4234 /* Perform first part of compound expression, then assign from second
4235 part. */
4236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4237 emit_queue ();
4238 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4239 }
4240 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4241 {
4242 /* For conditional expression, get safe form of the target. Then
4243 test the condition, doing the appropriate assignment on either
4244 side. This avoids the creation of unnecessary temporaries.
4245 For non-BLKmode, it is more efficient not to do this. */
4246
4247 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4248
4249 emit_queue ();
4250 target = protect_from_queue (target, 1);
4251
4252 do_pending_stack_adjust ();
4253 NO_DEFER_POP;
4254 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4255 start_cleanup_deferral ();
4256 store_expr (TREE_OPERAND (exp, 1), target, 0);
4257 end_cleanup_deferral ();
4258 emit_queue ();
4259 emit_jump_insn (gen_jump (lab2));
4260 emit_barrier ();
4261 emit_label (lab1);
4262 start_cleanup_deferral ();
4263 store_expr (TREE_OPERAND (exp, 2), target, 0);
4264 end_cleanup_deferral ();
4265 emit_queue ();
4266 emit_label (lab2);
4267 OK_DEFER_POP;
4268
4269 return want_value ? target : NULL_RTX;
4270 }
4271 else if (queued_subexp_p (target))
4272 /* If target contains a postincrement, let's not risk
4273 using it as the place to generate the rhs. */
4274 {
4275 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4276 {
4277 /* Expand EXP into a new pseudo. */
4278 temp = gen_reg_rtx (GET_MODE (target));
4279 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4280 }
4281 else
4282 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4283
4284 /* If target is volatile, ANSI requires accessing the value
4285 *from* the target, if it is accessed. So make that happen.
4286 In no case return the target itself. */
4287 if (! MEM_VOLATILE_P (target) && want_value)
4288 dont_return_target = 1;
4289 }
4290 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4291 && GET_MODE (target) != BLKmode)
4292 /* If target is in memory and caller wants value in a register instead,
4293 arrange that. Pass TARGET as target for expand_expr so that,
4294 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4295 We know expand_expr will not use the target in that case.
4296 Don't do this if TARGET is volatile because we are supposed
4297 to write it and then read it. */
4298 {
4299 temp = expand_expr (exp, target, GET_MODE (target), 0);
4300 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4301 {
4302 /* If TEMP is already in the desired TARGET, only copy it from
4303 memory and don't store it there again. */
4304 if (temp == target
4305 || (rtx_equal_p (temp, target)
4306 && ! side_effects_p (temp) && ! side_effects_p (target)))
4307 dont_store_target = 1;
4308 temp = copy_to_reg (temp);
4309 }
4310 dont_return_target = 1;
4311 }
4312 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4313 /* If this is an scalar in a register that is stored in a wider mode
4314 than the declared mode, compute the result into its declared mode
4315 and then convert to the wider mode. Our value is the computed
4316 expression. */
4317 {
4318 rtx inner_target = 0;
4319
4320 /* If we don't want a value, we can do the conversion inside EXP,
4321 which will often result in some optimizations. Do the conversion
4322 in two steps: first change the signedness, if needed, then
4323 the extend. But don't do this if the type of EXP is a subtype
4324 of something else since then the conversion might involve
4325 more than just converting modes. */
4326 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4327 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4328 {
4329 if (TREE_UNSIGNED (TREE_TYPE (exp))
4330 != SUBREG_PROMOTED_UNSIGNED_P (target))
4331 exp = convert
4332 ((*lang_hooks.types.signed_or_unsigned_type)
4333 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4334
4335 exp = convert ((*lang_hooks.types.type_for_mode)
4336 (GET_MODE (SUBREG_REG (target)),
4337 SUBREG_PROMOTED_UNSIGNED_P (target)),
4338 exp);
4339
4340 inner_target = SUBREG_REG (target);
4341 }
4342
4343 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4344
4345 /* If TEMP is a volatile MEM and we want a result value, make
4346 the access now so it gets done only once. Likewise if
4347 it contains TARGET. */
4348 if (GET_CODE (temp) == MEM && want_value
4349 && (MEM_VOLATILE_P (temp)
4350 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4351 temp = copy_to_reg (temp);
4352
4353 /* If TEMP is a VOIDmode constant, use convert_modes to make
4354 sure that we properly convert it. */
4355 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4356 {
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4358 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4359 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4360 GET_MODE (target), temp,
4361 SUBREG_PROMOTED_UNSIGNED_P (target));
4362 }
4363
4364 convert_move (SUBREG_REG (target), temp,
4365 SUBREG_PROMOTED_UNSIGNED_P (target));
4366
4367 /* If we promoted a constant, change the mode back down to match
4368 target. Otherwise, the caller might get confused by a result whose
4369 mode is larger than expected. */
4370
4371 if (want_value && GET_MODE (temp) != GET_MODE (target))
4372 {
4373 if (GET_MODE (temp) != VOIDmode)
4374 {
4375 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4376 SUBREG_PROMOTED_VAR_P (temp) = 1;
4377 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4378 SUBREG_PROMOTED_UNSIGNED_P (target));
4379 }
4380 else
4381 temp = convert_modes (GET_MODE (target),
4382 GET_MODE (SUBREG_REG (target)),
4383 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4384 }
4385
4386 return want_value ? temp : NULL_RTX;
4387 }
4388 else
4389 {
4390 temp = expand_expr (exp, target, GET_MODE (target), 0);
4391 /* Return TARGET if it's a specified hardware register.
4392 If TARGET is a volatile mem ref, either return TARGET
4393 or return a reg copied *from* TARGET; ANSI requires this.
4394
4395 Otherwise, if TEMP is not TARGET, return TEMP
4396 if it is constant (for efficiency),
4397 or if we really want the correct value. */
4398 if (!(target && GET_CODE (target) == REG
4399 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4400 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4401 && ! rtx_equal_p (temp, target)
4402 && (CONSTANT_P (temp) || want_value))
4403 dont_return_target = 1;
4404 }
4405
4406 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4407 the same as that of TARGET, adjust the constant. This is needed, for
4408 example, in case it is a CONST_DOUBLE and we want only a word-sized
4409 value. */
4410 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4411 && TREE_CODE (exp) != ERROR_MARK
4412 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4413 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4414 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4415
4416 /* If value was not generated in the target, store it there.
4417 Convert the value to TARGET's type first if necessary.
4418 If TEMP and TARGET compare equal according to rtx_equal_p, but
4419 one or both of them are volatile memory refs, we have to distinguish
4420 two cases:
4421 - expand_expr has used TARGET. In this case, we must not generate
4422 another copy. This can be detected by TARGET being equal according
4423 to == .
4424 - expand_expr has not used TARGET - that means that the source just
4425 happens to have the same RTX form. Since temp will have been created
4426 by expand_expr, it will compare unequal according to == .
4427 We must generate a copy in this case, to reach the correct number
4428 of volatile memory references. */
4429
4430 if ((! rtx_equal_p (temp, target)
4431 || (temp != target && (side_effects_p (temp)
4432 || side_effects_p (target))))
4433 && TREE_CODE (exp) != ERROR_MARK
4434 && ! dont_store_target
4435 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4436 but TARGET is not valid memory reference, TEMP will differ
4437 from TARGET although it is really the same location. */
4438 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4439 || target != DECL_RTL_IF_SET (exp))
4440 /* If there's nothing to copy, don't bother. Don't call expr_size
4441 unless necessary, because some front-ends (C++) expr_size-hook
4442 aborts on objects that are not supposed to be bit-copied or
4443 bit-initialized. */
4444 && expr_size (exp) != const0_rtx)
4445 {
4446 target = protect_from_queue (target, 1);
4447 if (GET_MODE (temp) != GET_MODE (target)
4448 && GET_MODE (temp) != VOIDmode)
4449 {
4450 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4451 if (dont_return_target)
4452 {
4453 /* In this case, we will return TEMP,
4454 so make sure it has the proper mode.
4455 But don't forget to store the value into TARGET. */
4456 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4457 emit_move_insn (target, temp);
4458 }
4459 else
4460 convert_move (target, temp, unsignedp);
4461 }
4462
4463 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4464 {
4465 /* Handle copying a string constant into an array. The string
4466 constant may be shorter than the array. So copy just the string's
4467 actual length, and clear the rest. First get the size of the data
4468 type of the string, which is actually the size of the target. */
4469 rtx size = expr_size (exp);
4470
4471 if (GET_CODE (size) == CONST_INT
4472 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4473 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4474 else
4475 {
4476 /* Compute the size of the data to copy from the string. */
4477 tree copy_size
4478 = size_binop (MIN_EXPR,
4479 make_tree (sizetype, size),
4480 size_int (TREE_STRING_LENGTH (exp)));
4481 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4482 VOIDmode, 0);
4483 rtx label = 0;
4484
4485 /* Copy that much. */
4486 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4487 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4488
4489 /* Figure out how much is left in TARGET that we have to clear.
4490 Do all calculations in ptr_mode. */
4491 if (GET_CODE (copy_size_rtx) == CONST_INT)
4492 {
4493 size = plus_constant (size, -INTVAL (copy_size_rtx));
4494 target = adjust_address (target, BLKmode,
4495 INTVAL (copy_size_rtx));
4496 }
4497 else
4498 {
4499 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4500 copy_size_rtx, NULL_RTX, 0,
4501 OPTAB_LIB_WIDEN);
4502
4503 #ifdef POINTERS_EXTEND_UNSIGNED
4504 if (GET_MODE (copy_size_rtx) != Pmode)
4505 copy_size_rtx = convert_memory_address (Pmode,
4506 copy_size_rtx);
4507 #endif
4508
4509 target = offset_address (target, copy_size_rtx,
4510 highest_pow2_factor (copy_size));
4511 label = gen_label_rtx ();
4512 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4513 GET_MODE (size), 0, label);
4514 }
4515
4516 if (size != const0_rtx)
4517 clear_storage (target, size);
4518
4519 if (label)
4520 emit_label (label);
4521 }
4522 }
4523 /* Handle calls that return values in multiple non-contiguous locations.
4524 The Irix 6 ABI has examples of this. */
4525 else if (GET_CODE (target) == PARALLEL)
4526 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4527 else if (GET_MODE (temp) == BLKmode)
4528 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4529 else
4530 emit_move_insn (target, temp);
4531 }
4532
4533 /* If we don't want a value, return NULL_RTX. */
4534 if (! want_value)
4535 return NULL_RTX;
4536
4537 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4538 ??? The latter test doesn't seem to make sense. */
4539 else if (dont_return_target && GET_CODE (temp) != MEM)
4540 return temp;
4541
4542 /* Return TARGET itself if it is a hard register. */
4543 else if (want_value && GET_MODE (target) != BLKmode
4544 && ! (GET_CODE (target) == REG
4545 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4546 return copy_to_reg (target);
4547
4548 else
4549 return target;
4550 }
4551 \f
4552 /* Return 1 if EXP just contains zeros. */
4553
4554 static int
4555 is_zeros_p (exp)
4556 tree exp;
4557 {
4558 tree elt;
4559
4560 switch (TREE_CODE (exp))
4561 {
4562 case CONVERT_EXPR:
4563 case NOP_EXPR:
4564 case NON_LVALUE_EXPR:
4565 case VIEW_CONVERT_EXPR:
4566 return is_zeros_p (TREE_OPERAND (exp, 0));
4567
4568 case INTEGER_CST:
4569 return integer_zerop (exp);
4570
4571 case COMPLEX_CST:
4572 return
4573 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4574
4575 case REAL_CST:
4576 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4577
4578 case VECTOR_CST:
4579 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4580 elt = TREE_CHAIN (elt))
4581 if (!is_zeros_p (TREE_VALUE (elt)))
4582 return 0;
4583
4584 return 1;
4585
4586 case CONSTRUCTOR:
4587 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4588 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4589 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4590 if (! is_zeros_p (TREE_VALUE (elt)))
4591 return 0;
4592
4593 return 1;
4594
4595 default:
4596 return 0;
4597 }
4598 }
4599
4600 /* Return 1 if EXP contains mostly (3/4) zeros. */
4601
4602 static int
4603 mostly_zeros_p (exp)
4604 tree exp;
4605 {
4606 if (TREE_CODE (exp) == CONSTRUCTOR)
4607 {
4608 int elts = 0, zeros = 0;
4609 tree elt = CONSTRUCTOR_ELTS (exp);
4610 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4611 {
4612 /* If there are no ranges of true bits, it is all zero. */
4613 return elt == NULL_TREE;
4614 }
4615 for (; elt; elt = TREE_CHAIN (elt))
4616 {
4617 /* We do not handle the case where the index is a RANGE_EXPR,
4618 so the statistic will be somewhat inaccurate.
4619 We do make a more accurate count in store_constructor itself,
4620 so since this function is only used for nested array elements,
4621 this should be close enough. */
4622 if (mostly_zeros_p (TREE_VALUE (elt)))
4623 zeros++;
4624 elts++;
4625 }
4626
4627 return 4 * zeros >= 3 * elts;
4628 }
4629
4630 return is_zeros_p (exp);
4631 }
4632 \f
4633 /* Helper function for store_constructor.
4634 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4635 TYPE is the type of the CONSTRUCTOR, not the element type.
4636 CLEARED is as for store_constructor.
4637 ALIAS_SET is the alias set to use for any stores.
4638
4639 This provides a recursive shortcut back to store_constructor when it isn't
4640 necessary to go through store_field. This is so that we can pass through
4641 the cleared field to let store_constructor know that we may not have to
4642 clear a substructure if the outer structure has already been cleared. */
4643
4644 static void
4645 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4646 alias_set)
4647 rtx target;
4648 unsigned HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
4650 enum machine_mode mode;
4651 tree exp, type;
4652 int cleared;
4653 int alias_set;
4654 {
4655 if (TREE_CODE (exp) == CONSTRUCTOR
4656 && bitpos % BITS_PER_UNIT == 0
4657 /* If we have a nonzero bitpos for a register target, then we just
4658 let store_field do the bitfield handling. This is unlikely to
4659 generate unnecessary clear instructions anyways. */
4660 && (bitpos == 0 || GET_CODE (target) == MEM))
4661 {
4662 if (GET_CODE (target) == MEM)
4663 target
4664 = adjust_address (target,
4665 GET_MODE (target) == BLKmode
4666 || 0 != (bitpos
4667 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4668 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4669
4670
4671 /* Update the alias set, if required. */
4672 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4673 && MEM_ALIAS_SET (target) != 0)
4674 {
4675 target = copy_rtx (target);
4676 set_mem_alias_set (target, alias_set);
4677 }
4678
4679 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4680 }
4681 else
4682 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4683 alias_set);
4684 }
4685
4686 /* Store the value of constructor EXP into the rtx TARGET.
4687 TARGET is either a REG or a MEM; we know it cannot conflict, since
4688 safe_from_p has been called.
4689 CLEARED is true if TARGET is known to have been zero'd.
4690 SIZE is the number of bytes of TARGET we are allowed to modify: this
4691 may not be the same as the size of EXP if we are assigning to a field
4692 which has been packed to exclude padding bits. */
4693
4694 static void
4695 store_constructor (exp, target, cleared, size)
4696 tree exp;
4697 rtx target;
4698 int cleared;
4699 HOST_WIDE_INT size;
4700 {
4701 tree type = TREE_TYPE (exp);
4702 #ifdef WORD_REGISTER_OPERATIONS
4703 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4704 #endif
4705
4706 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4707 || TREE_CODE (type) == QUAL_UNION_TYPE)
4708 {
4709 tree elt;
4710
4711 /* We either clear the aggregate or indicate the value is dead. */
4712 if ((TREE_CODE (type) == UNION_TYPE
4713 || TREE_CODE (type) == QUAL_UNION_TYPE)
4714 && ! cleared
4715 && ! CONSTRUCTOR_ELTS (exp))
4716 /* If the constructor is empty, clear the union. */
4717 {
4718 clear_storage (target, expr_size (exp));
4719 cleared = 1;
4720 }
4721
4722 /* If we are building a static constructor into a register,
4723 set the initial value as zero so we can fold the value into
4724 a constant. But if more than one register is involved,
4725 this probably loses. */
4726 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4727 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4728 {
4729 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4730 cleared = 1;
4731 }
4732
4733 /* If the constructor has fewer fields than the structure
4734 or if we are initializing the structure to mostly zeros,
4735 clear the whole structure first. Don't do this if TARGET is a
4736 register whose mode size isn't equal to SIZE since clear_storage
4737 can't handle this case. */
4738 else if (! cleared && size > 0
4739 && ((list_length (CONSTRUCTOR_ELTS (exp))
4740 != fields_length (type))
4741 || mostly_zeros_p (exp))
4742 && (GET_CODE (target) != REG
4743 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4744 == size)))
4745 {
4746 clear_storage (target, GEN_INT (size));
4747 cleared = 1;
4748 }
4749
4750 if (! cleared)
4751 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4752
4753 /* Store each element of the constructor into
4754 the corresponding field of TARGET. */
4755
4756 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4757 {
4758 tree field = TREE_PURPOSE (elt);
4759 tree value = TREE_VALUE (elt);
4760 enum machine_mode mode;
4761 HOST_WIDE_INT bitsize;
4762 HOST_WIDE_INT bitpos = 0;
4763 int unsignedp;
4764 tree offset;
4765 rtx to_rtx = target;
4766
4767 /* Just ignore missing fields.
4768 We cleared the whole structure, above,
4769 if any fields are missing. */
4770 if (field == 0)
4771 continue;
4772
4773 if (cleared && is_zeros_p (value))
4774 continue;
4775
4776 if (host_integerp (DECL_SIZE (field), 1))
4777 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4778 else
4779 bitsize = -1;
4780
4781 unsignedp = TREE_UNSIGNED (field);
4782 mode = DECL_MODE (field);
4783 if (DECL_BIT_FIELD (field))
4784 mode = VOIDmode;
4785
4786 offset = DECL_FIELD_OFFSET (field);
4787 if (host_integerp (offset, 0)
4788 && host_integerp (bit_position (field), 0))
4789 {
4790 bitpos = int_bit_position (field);
4791 offset = 0;
4792 }
4793 else
4794 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4795
4796 if (offset)
4797 {
4798 rtx offset_rtx;
4799
4800 if (contains_placeholder_p (offset))
4801 offset = build (WITH_RECORD_EXPR, sizetype,
4802 offset, make_tree (TREE_TYPE (exp), target));
4803
4804 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4805 if (GET_CODE (to_rtx) != MEM)
4806 abort ();
4807
4808 #ifdef POINTERS_EXTEND_UNSIGNED
4809 if (GET_MODE (offset_rtx) != Pmode)
4810 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4811 #else
4812 if (GET_MODE (offset_rtx) != ptr_mode)
4813 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4814 #endif
4815
4816 to_rtx = offset_address (to_rtx, offset_rtx,
4817 highest_pow2_factor (offset));
4818 }
4819
4820 if (TREE_READONLY (field))
4821 {
4822 if (GET_CODE (to_rtx) == MEM)
4823 to_rtx = copy_rtx (to_rtx);
4824
4825 RTX_UNCHANGING_P (to_rtx) = 1;
4826 }
4827
4828 #ifdef WORD_REGISTER_OPERATIONS
4829 /* If this initializes a field that is smaller than a word, at the
4830 start of a word, try to widen it to a full word.
4831 This special case allows us to output C++ member function
4832 initializations in a form that the optimizers can understand. */
4833 if (GET_CODE (target) == REG
4834 && bitsize < BITS_PER_WORD
4835 && bitpos % BITS_PER_WORD == 0
4836 && GET_MODE_CLASS (mode) == MODE_INT
4837 && TREE_CODE (value) == INTEGER_CST
4838 && exp_size >= 0
4839 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4840 {
4841 tree type = TREE_TYPE (value);
4842
4843 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4844 {
4845 type = (*lang_hooks.types.type_for_size)
4846 (BITS_PER_WORD, TREE_UNSIGNED (type));
4847 value = convert (type, value);
4848 }
4849
4850 if (BYTES_BIG_ENDIAN)
4851 value
4852 = fold (build (LSHIFT_EXPR, type, value,
4853 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4854 bitsize = BITS_PER_WORD;
4855 mode = word_mode;
4856 }
4857 #endif
4858
4859 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4860 && DECL_NONADDRESSABLE_P (field))
4861 {
4862 to_rtx = copy_rtx (to_rtx);
4863 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4864 }
4865
4866 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4867 value, type, cleared,
4868 get_alias_set (TREE_TYPE (field)));
4869 }
4870 }
4871 else if (TREE_CODE (type) == ARRAY_TYPE
4872 || TREE_CODE (type) == VECTOR_TYPE)
4873 {
4874 tree elt;
4875 int i;
4876 int need_to_clear;
4877 tree domain = TYPE_DOMAIN (type);
4878 tree elttype = TREE_TYPE (type);
4879 int const_bounds_p;
4880 HOST_WIDE_INT minelt = 0;
4881 HOST_WIDE_INT maxelt = 0;
4882
4883 /* Vectors are like arrays, but the domain is stored via an array
4884 type indirectly. */
4885 if (TREE_CODE (type) == VECTOR_TYPE)
4886 {
4887 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4888 the same field as TYPE_DOMAIN, we are not guaranteed that
4889 it always will. */
4890 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4891 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4892 }
4893
4894 const_bounds_p = (TYPE_MIN_VALUE (domain)
4895 && TYPE_MAX_VALUE (domain)
4896 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4897 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4898
4899 /* If we have constant bounds for the range of the type, get them. */
4900 if (const_bounds_p)
4901 {
4902 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4903 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4904 }
4905
4906 /* If the constructor has fewer elements than the array,
4907 clear the whole array first. Similarly if this is
4908 static constructor of a non-BLKmode object. */
4909 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4910 need_to_clear = 1;
4911 else
4912 {
4913 HOST_WIDE_INT count = 0, zero_count = 0;
4914 need_to_clear = ! const_bounds_p;
4915
4916 /* This loop is a more accurate version of the loop in
4917 mostly_zeros_p (it handles RANGE_EXPR in an index).
4918 It is also needed to check for missing elements. */
4919 for (elt = CONSTRUCTOR_ELTS (exp);
4920 elt != NULL_TREE && ! need_to_clear;
4921 elt = TREE_CHAIN (elt))
4922 {
4923 tree index = TREE_PURPOSE (elt);
4924 HOST_WIDE_INT this_node_count;
4925
4926 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4927 {
4928 tree lo_index = TREE_OPERAND (index, 0);
4929 tree hi_index = TREE_OPERAND (index, 1);
4930
4931 if (! host_integerp (lo_index, 1)
4932 || ! host_integerp (hi_index, 1))
4933 {
4934 need_to_clear = 1;
4935 break;
4936 }
4937
4938 this_node_count = (tree_low_cst (hi_index, 1)
4939 - tree_low_cst (lo_index, 1) + 1);
4940 }
4941 else
4942 this_node_count = 1;
4943
4944 count += this_node_count;
4945 if (mostly_zeros_p (TREE_VALUE (elt)))
4946 zero_count += this_node_count;
4947 }
4948
4949 /* Clear the entire array first if there are any missing elements,
4950 or if the incidence of zero elements is >= 75%. */
4951 if (! need_to_clear
4952 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4953 need_to_clear = 1;
4954 }
4955
4956 if (need_to_clear && size > 0)
4957 {
4958 if (! cleared)
4959 {
4960 if (REG_P (target))
4961 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4962 else
4963 clear_storage (target, GEN_INT (size));
4964 }
4965 cleared = 1;
4966 }
4967 else if (REG_P (target))
4968 /* Inform later passes that the old value is dead. */
4969 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4970
4971 /* Store each element of the constructor into
4972 the corresponding element of TARGET, determined
4973 by counting the elements. */
4974 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4975 elt;
4976 elt = TREE_CHAIN (elt), i++)
4977 {
4978 enum machine_mode mode;
4979 HOST_WIDE_INT bitsize;
4980 HOST_WIDE_INT bitpos;
4981 int unsignedp;
4982 tree value = TREE_VALUE (elt);
4983 tree index = TREE_PURPOSE (elt);
4984 rtx xtarget = target;
4985
4986 if (cleared && is_zeros_p (value))
4987 continue;
4988
4989 unsignedp = TREE_UNSIGNED (elttype);
4990 mode = TYPE_MODE (elttype);
4991 if (mode == BLKmode)
4992 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4993 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4994 : -1);
4995 else
4996 bitsize = GET_MODE_BITSIZE (mode);
4997
4998 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4999 {
5000 tree lo_index = TREE_OPERAND (index, 0);
5001 tree hi_index = TREE_OPERAND (index, 1);
5002 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5003 struct nesting *loop;
5004 HOST_WIDE_INT lo, hi, count;
5005 tree position;
5006
5007 /* If the range is constant and "small", unroll the loop. */
5008 if (const_bounds_p
5009 && host_integerp (lo_index, 0)
5010 && host_integerp (hi_index, 0)
5011 && (lo = tree_low_cst (lo_index, 0),
5012 hi = tree_low_cst (hi_index, 0),
5013 count = hi - lo + 1,
5014 (GET_CODE (target) != MEM
5015 || count <= 2
5016 || (host_integerp (TYPE_SIZE (elttype), 1)
5017 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5018 <= 40 * 8)))))
5019 {
5020 lo -= minelt; hi -= minelt;
5021 for (; lo <= hi; lo++)
5022 {
5023 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5024
5025 if (GET_CODE (target) == MEM
5026 && !MEM_KEEP_ALIAS_SET_P (target)
5027 && TREE_CODE (type) == ARRAY_TYPE
5028 && TYPE_NONALIASED_COMPONENT (type))
5029 {
5030 target = copy_rtx (target);
5031 MEM_KEEP_ALIAS_SET_P (target) = 1;
5032 }
5033
5034 store_constructor_field
5035 (target, bitsize, bitpos, mode, value, type, cleared,
5036 get_alias_set (elttype));
5037 }
5038 }
5039 else
5040 {
5041 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5042 loop_top = gen_label_rtx ();
5043 loop_end = gen_label_rtx ();
5044
5045 unsignedp = TREE_UNSIGNED (domain);
5046
5047 index = build_decl (VAR_DECL, NULL_TREE, domain);
5048
5049 index_r
5050 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5051 &unsignedp, 0));
5052 SET_DECL_RTL (index, index_r);
5053 if (TREE_CODE (value) == SAVE_EXPR
5054 && SAVE_EXPR_RTL (value) == 0)
5055 {
5056 /* Make sure value gets expanded once before the
5057 loop. */
5058 expand_expr (value, const0_rtx, VOIDmode, 0);
5059 emit_queue ();
5060 }
5061 store_expr (lo_index, index_r, 0);
5062 loop = expand_start_loop (0);
5063
5064 /* Assign value to element index. */
5065 position
5066 = convert (ssizetype,
5067 fold (build (MINUS_EXPR, TREE_TYPE (index),
5068 index, TYPE_MIN_VALUE (domain))));
5069 position = size_binop (MULT_EXPR, position,
5070 convert (ssizetype,
5071 TYPE_SIZE_UNIT (elttype)));
5072
5073 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5074 xtarget = offset_address (target, pos_rtx,
5075 highest_pow2_factor (position));
5076 xtarget = adjust_address (xtarget, mode, 0);
5077 if (TREE_CODE (value) == CONSTRUCTOR)
5078 store_constructor (value, xtarget, cleared,
5079 bitsize / BITS_PER_UNIT);
5080 else
5081 store_expr (value, xtarget, 0);
5082
5083 expand_exit_loop_if_false (loop,
5084 build (LT_EXPR, integer_type_node,
5085 index, hi_index));
5086
5087 expand_increment (build (PREINCREMENT_EXPR,
5088 TREE_TYPE (index),
5089 index, integer_one_node), 0, 0);
5090 expand_end_loop ();
5091 emit_label (loop_end);
5092 }
5093 }
5094 else if ((index != 0 && ! host_integerp (index, 0))
5095 || ! host_integerp (TYPE_SIZE (elttype), 1))
5096 {
5097 tree position;
5098
5099 if (index == 0)
5100 index = ssize_int (1);
5101
5102 if (minelt)
5103 index = convert (ssizetype,
5104 fold (build (MINUS_EXPR, index,
5105 TYPE_MIN_VALUE (domain))));
5106
5107 position = size_binop (MULT_EXPR, index,
5108 convert (ssizetype,
5109 TYPE_SIZE_UNIT (elttype)));
5110 xtarget = offset_address (target,
5111 expand_expr (position, 0, VOIDmode, 0),
5112 highest_pow2_factor (position));
5113 xtarget = adjust_address (xtarget, mode, 0);
5114 store_expr (value, xtarget, 0);
5115 }
5116 else
5117 {
5118 if (index != 0)
5119 bitpos = ((tree_low_cst (index, 0) - minelt)
5120 * tree_low_cst (TYPE_SIZE (elttype), 1));
5121 else
5122 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5123
5124 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5125 && TREE_CODE (type) == ARRAY_TYPE
5126 && TYPE_NONALIASED_COMPONENT (type))
5127 {
5128 target = copy_rtx (target);
5129 MEM_KEEP_ALIAS_SET_P (target) = 1;
5130 }
5131
5132 store_constructor_field (target, bitsize, bitpos, mode, value,
5133 type, cleared, get_alias_set (elttype));
5134
5135 }
5136 }
5137 }
5138
5139 /* Set constructor assignments. */
5140 else if (TREE_CODE (type) == SET_TYPE)
5141 {
5142 tree elt = CONSTRUCTOR_ELTS (exp);
5143 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5144 tree domain = TYPE_DOMAIN (type);
5145 tree domain_min, domain_max, bitlength;
5146
5147 /* The default implementation strategy is to extract the constant
5148 parts of the constructor, use that to initialize the target,
5149 and then "or" in whatever non-constant ranges we need in addition.
5150
5151 If a large set is all zero or all ones, it is
5152 probably better to set it using memset (if available) or bzero.
5153 Also, if a large set has just a single range, it may also be
5154 better to first clear all the first clear the set (using
5155 bzero/memset), and set the bits we want. */
5156
5157 /* Check for all zeros. */
5158 if (elt == NULL_TREE && size > 0)
5159 {
5160 if (!cleared)
5161 clear_storage (target, GEN_INT (size));
5162 return;
5163 }
5164
5165 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5166 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5167 bitlength = size_binop (PLUS_EXPR,
5168 size_diffop (domain_max, domain_min),
5169 ssize_int (1));
5170
5171 nbits = tree_low_cst (bitlength, 1);
5172
5173 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5174 are "complicated" (more than one range), initialize (the
5175 constant parts) by copying from a constant. */
5176 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5177 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5178 {
5179 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5180 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5181 char *bit_buffer = (char *) alloca (nbits);
5182 HOST_WIDE_INT word = 0;
5183 unsigned int bit_pos = 0;
5184 unsigned int ibit = 0;
5185 unsigned int offset = 0; /* In bytes from beginning of set. */
5186
5187 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5188 for (;;)
5189 {
5190 if (bit_buffer[ibit])
5191 {
5192 if (BYTES_BIG_ENDIAN)
5193 word |= (1 << (set_word_size - 1 - bit_pos));
5194 else
5195 word |= 1 << bit_pos;
5196 }
5197
5198 bit_pos++; ibit++;
5199 if (bit_pos >= set_word_size || ibit == nbits)
5200 {
5201 if (word != 0 || ! cleared)
5202 {
5203 rtx datum = GEN_INT (word);
5204 rtx to_rtx;
5205
5206 /* The assumption here is that it is safe to use
5207 XEXP if the set is multi-word, but not if
5208 it's single-word. */
5209 if (GET_CODE (target) == MEM)
5210 to_rtx = adjust_address (target, mode, offset);
5211 else if (offset == 0)
5212 to_rtx = target;
5213 else
5214 abort ();
5215 emit_move_insn (to_rtx, datum);
5216 }
5217
5218 if (ibit == nbits)
5219 break;
5220 word = 0;
5221 bit_pos = 0;
5222 offset += set_word_size / BITS_PER_UNIT;
5223 }
5224 }
5225 }
5226 else if (!cleared)
5227 /* Don't bother clearing storage if the set is all ones. */
5228 if (TREE_CHAIN (elt) != NULL_TREE
5229 || (TREE_PURPOSE (elt) == NULL_TREE
5230 ? nbits != 1
5231 : ( ! host_integerp (TREE_VALUE (elt), 0)
5232 || ! host_integerp (TREE_PURPOSE (elt), 0)
5233 || (tree_low_cst (TREE_VALUE (elt), 0)
5234 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5235 != (HOST_WIDE_INT) nbits))))
5236 clear_storage (target, expr_size (exp));
5237
5238 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5239 {
5240 /* Start of range of element or NULL. */
5241 tree startbit = TREE_PURPOSE (elt);
5242 /* End of range of element, or element value. */
5243 tree endbit = TREE_VALUE (elt);
5244 HOST_WIDE_INT startb, endb;
5245 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5246
5247 bitlength_rtx = expand_expr (bitlength,
5248 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5249
5250 /* Handle non-range tuple element like [ expr ]. */
5251 if (startbit == NULL_TREE)
5252 {
5253 startbit = save_expr (endbit);
5254 endbit = startbit;
5255 }
5256
5257 startbit = convert (sizetype, startbit);
5258 endbit = convert (sizetype, endbit);
5259 if (! integer_zerop (domain_min))
5260 {
5261 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5262 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5263 }
5264 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5265 EXPAND_CONST_ADDRESS);
5266 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5267 EXPAND_CONST_ADDRESS);
5268
5269 if (REG_P (target))
5270 {
5271 targetx
5272 = assign_temp
5273 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5274 (GET_MODE (target), 0),
5275 TYPE_QUAL_CONST)),
5276 0, 1, 1);
5277 emit_move_insn (targetx, target);
5278 }
5279
5280 else if (GET_CODE (target) == MEM)
5281 targetx = target;
5282 else
5283 abort ();
5284
5285 /* Optimization: If startbit and endbit are constants divisible
5286 by BITS_PER_UNIT, call memset instead. */
5287 if (TARGET_MEM_FUNCTIONS
5288 && TREE_CODE (startbit) == INTEGER_CST
5289 && TREE_CODE (endbit) == INTEGER_CST
5290 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5291 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5292 {
5293 emit_library_call (memset_libfunc, LCT_NORMAL,
5294 VOIDmode, 3,
5295 plus_constant (XEXP (targetx, 0),
5296 startb / BITS_PER_UNIT),
5297 Pmode,
5298 constm1_rtx, TYPE_MODE (integer_type_node),
5299 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5300 TYPE_MODE (sizetype));
5301 }
5302 else
5303 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5304 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5305 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5306 startbit_rtx, TYPE_MODE (sizetype),
5307 endbit_rtx, TYPE_MODE (sizetype));
5308
5309 if (REG_P (target))
5310 emit_move_insn (target, targetx);
5311 }
5312 }
5313
5314 else
5315 abort ();
5316 }
5317
5318 /* Store the value of EXP (an expression tree)
5319 into a subfield of TARGET which has mode MODE and occupies
5320 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5321 If MODE is VOIDmode, it means that we are storing into a bit-field.
5322
5323 If VALUE_MODE is VOIDmode, return nothing in particular.
5324 UNSIGNEDP is not used in this case.
5325
5326 Otherwise, return an rtx for the value stored. This rtx
5327 has mode VALUE_MODE if that is convenient to do.
5328 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5329
5330 TYPE is the type of the underlying object,
5331
5332 ALIAS_SET is the alias set for the destination. This value will
5333 (in general) be different from that for TARGET, since TARGET is a
5334 reference to the containing structure. */
5335
5336 static rtx
5337 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5338 alias_set)
5339 rtx target;
5340 HOST_WIDE_INT bitsize;
5341 HOST_WIDE_INT bitpos;
5342 enum machine_mode mode;
5343 tree exp;
5344 enum machine_mode value_mode;
5345 int unsignedp;
5346 tree type;
5347 int alias_set;
5348 {
5349 HOST_WIDE_INT width_mask = 0;
5350
5351 if (TREE_CODE (exp) == ERROR_MARK)
5352 return const0_rtx;
5353
5354 /* If we have nothing to store, do nothing unless the expression has
5355 side-effects. */
5356 if (bitsize == 0)
5357 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5358 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5359 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5360
5361 /* If we are storing into an unaligned field of an aligned union that is
5362 in a register, we may have the mode of TARGET being an integer mode but
5363 MODE == BLKmode. In that case, get an aligned object whose size and
5364 alignment are the same as TARGET and store TARGET into it (we can avoid
5365 the store if the field being stored is the entire width of TARGET). Then
5366 call ourselves recursively to store the field into a BLKmode version of
5367 that object. Finally, load from the object into TARGET. This is not
5368 very efficient in general, but should only be slightly more expensive
5369 than the otherwise-required unaligned accesses. Perhaps this can be
5370 cleaned up later. */
5371
5372 if (mode == BLKmode
5373 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5374 {
5375 rtx object
5376 = assign_temp
5377 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5378 0, 1, 1);
5379 rtx blk_object = adjust_address (object, BLKmode, 0);
5380
5381 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5382 emit_move_insn (object, target);
5383
5384 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5385 alias_set);
5386
5387 emit_move_insn (target, object);
5388
5389 /* We want to return the BLKmode version of the data. */
5390 return blk_object;
5391 }
5392
5393 if (GET_CODE (target) == CONCAT)
5394 {
5395 /* We're storing into a struct containing a single __complex. */
5396
5397 if (bitpos != 0)
5398 abort ();
5399 return store_expr (exp, target, 0);
5400 }
5401
5402 /* If the structure is in a register or if the component
5403 is a bit field, we cannot use addressing to access it.
5404 Use bit-field techniques or SUBREG to store in it. */
5405
5406 if (mode == VOIDmode
5407 || (mode != BLKmode && ! direct_store[(int) mode]
5408 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5409 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5410 || GET_CODE (target) == REG
5411 || GET_CODE (target) == SUBREG
5412 /* If the field isn't aligned enough to store as an ordinary memref,
5413 store it as a bit field. */
5414 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5415 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5416 || bitpos % GET_MODE_ALIGNMENT (mode)))
5417 /* If the RHS and field are a constant size and the size of the
5418 RHS isn't the same size as the bitfield, we must use bitfield
5419 operations. */
5420 || (bitsize >= 0
5421 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5422 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5423 {
5424 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5425
5426 /* If BITSIZE is narrower than the size of the type of EXP
5427 we will be narrowing TEMP. Normally, what's wanted are the
5428 low-order bits. However, if EXP's type is a record and this is
5429 big-endian machine, we want the upper BITSIZE bits. */
5430 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5431 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5432 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5433 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5434 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5435 - bitsize),
5436 temp, 1);
5437
5438 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5439 MODE. */
5440 if (mode != VOIDmode && mode != BLKmode
5441 && mode != TYPE_MODE (TREE_TYPE (exp)))
5442 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5443
5444 /* If the modes of TARGET and TEMP are both BLKmode, both
5445 must be in memory and BITPOS must be aligned on a byte
5446 boundary. If so, we simply do a block copy. */
5447 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5448 {
5449 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5450 || bitpos % BITS_PER_UNIT != 0)
5451 abort ();
5452
5453 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5454 emit_block_move (target, temp,
5455 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5456 / BITS_PER_UNIT),
5457 BLOCK_OP_NORMAL);
5458
5459 return value_mode == VOIDmode ? const0_rtx : target;
5460 }
5461
5462 /* Store the value in the bitfield. */
5463 store_bit_field (target, bitsize, bitpos, mode, temp,
5464 int_size_in_bytes (type));
5465
5466 if (value_mode != VOIDmode)
5467 {
5468 /* The caller wants an rtx for the value.
5469 If possible, avoid refetching from the bitfield itself. */
5470 if (width_mask != 0
5471 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5472 {
5473 tree count;
5474 enum machine_mode tmode;
5475
5476 tmode = GET_MODE (temp);
5477 if (tmode == VOIDmode)
5478 tmode = value_mode;
5479
5480 if (unsignedp)
5481 return expand_and (tmode, temp,
5482 gen_int_mode (width_mask, tmode),
5483 NULL_RTX);
5484
5485 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5486 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5487 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5488 }
5489
5490 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5491 NULL_RTX, value_mode, VOIDmode,
5492 int_size_in_bytes (type));
5493 }
5494 return const0_rtx;
5495 }
5496 else
5497 {
5498 rtx addr = XEXP (target, 0);
5499 rtx to_rtx = target;
5500
5501 /* If a value is wanted, it must be the lhs;
5502 so make the address stable for multiple use. */
5503
5504 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5505 && ! CONSTANT_ADDRESS_P (addr)
5506 /* A frame-pointer reference is already stable. */
5507 && ! (GET_CODE (addr) == PLUS
5508 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5509 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5510 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5511 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5512
5513 /* Now build a reference to just the desired component. */
5514
5515 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5516
5517 if (to_rtx == target)
5518 to_rtx = copy_rtx (to_rtx);
5519
5520 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5521 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5522 set_mem_alias_set (to_rtx, alias_set);
5523
5524 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5525 }
5526 }
5527 \f
5528 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5529 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5530 codes and find the ultimate containing object, which we return.
5531
5532 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5533 bit position, and *PUNSIGNEDP to the signedness of the field.
5534 If the position of the field is variable, we store a tree
5535 giving the variable offset (in units) in *POFFSET.
5536 This offset is in addition to the bit position.
5537 If the position is not variable, we store 0 in *POFFSET.
5538
5539 If any of the extraction expressions is volatile,
5540 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5541
5542 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5543 is a mode that can be used to access the field. In that case, *PBITSIZE
5544 is redundant.
5545
5546 If the field describes a variable-sized object, *PMODE is set to
5547 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5548 this case, but the address of the object can be found. */
5549
5550 tree
5551 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5552 punsignedp, pvolatilep)
5553 tree exp;
5554 HOST_WIDE_INT *pbitsize;
5555 HOST_WIDE_INT *pbitpos;
5556 tree *poffset;
5557 enum machine_mode *pmode;
5558 int *punsignedp;
5559 int *pvolatilep;
5560 {
5561 tree size_tree = 0;
5562 enum machine_mode mode = VOIDmode;
5563 tree offset = size_zero_node;
5564 tree bit_offset = bitsize_zero_node;
5565 tree placeholder_ptr = 0;
5566 tree tem;
5567
5568 /* First get the mode, signedness, and size. We do this from just the
5569 outermost expression. */
5570 if (TREE_CODE (exp) == COMPONENT_REF)
5571 {
5572 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5573 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5574 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5575
5576 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5577 }
5578 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5579 {
5580 size_tree = TREE_OPERAND (exp, 1);
5581 *punsignedp = TREE_UNSIGNED (exp);
5582 }
5583 else
5584 {
5585 mode = TYPE_MODE (TREE_TYPE (exp));
5586 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5587
5588 if (mode == BLKmode)
5589 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5590 else
5591 *pbitsize = GET_MODE_BITSIZE (mode);
5592 }
5593
5594 if (size_tree != 0)
5595 {
5596 if (! host_integerp (size_tree, 1))
5597 mode = BLKmode, *pbitsize = -1;
5598 else
5599 *pbitsize = tree_low_cst (size_tree, 1);
5600 }
5601
5602 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5603 and find the ultimate containing object. */
5604 while (1)
5605 {
5606 if (TREE_CODE (exp) == BIT_FIELD_REF)
5607 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5608 else if (TREE_CODE (exp) == COMPONENT_REF)
5609 {
5610 tree field = TREE_OPERAND (exp, 1);
5611 tree this_offset = DECL_FIELD_OFFSET (field);
5612
5613 /* If this field hasn't been filled in yet, don't go
5614 past it. This should only happen when folding expressions
5615 made during type construction. */
5616 if (this_offset == 0)
5617 break;
5618 else if (! TREE_CONSTANT (this_offset)
5619 && contains_placeholder_p (this_offset))
5620 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5621
5622 offset = size_binop (PLUS_EXPR, offset, this_offset);
5623 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5624 DECL_FIELD_BIT_OFFSET (field));
5625
5626 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5627 }
5628
5629 else if (TREE_CODE (exp) == ARRAY_REF
5630 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5631 {
5632 tree index = TREE_OPERAND (exp, 1);
5633 tree array = TREE_OPERAND (exp, 0);
5634 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5635 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5636 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5637
5638 /* We assume all arrays have sizes that are a multiple of a byte.
5639 First subtract the lower bound, if any, in the type of the
5640 index, then convert to sizetype and multiply by the size of the
5641 array element. */
5642 if (low_bound != 0 && ! integer_zerop (low_bound))
5643 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5644 index, low_bound));
5645
5646 /* If the index has a self-referential type, pass it to a
5647 WITH_RECORD_EXPR; if the component size is, pass our
5648 component to one. */
5649 if (! TREE_CONSTANT (index)
5650 && contains_placeholder_p (index))
5651 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5652 if (! TREE_CONSTANT (unit_size)
5653 && contains_placeholder_p (unit_size))
5654 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5655
5656 offset = size_binop (PLUS_EXPR, offset,
5657 size_binop (MULT_EXPR,
5658 convert (sizetype, index),
5659 unit_size));
5660 }
5661
5662 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5663 {
5664 tree new = find_placeholder (exp, &placeholder_ptr);
5665
5666 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5667 We might have been called from tree optimization where we
5668 haven't set up an object yet. */
5669 if (new == 0)
5670 break;
5671 else
5672 exp = new;
5673
5674 continue;
5675 }
5676 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5677 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5678 && ! ((TREE_CODE (exp) == NOP_EXPR
5679 || TREE_CODE (exp) == CONVERT_EXPR)
5680 && (TYPE_MODE (TREE_TYPE (exp))
5681 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5682 break;
5683
5684 /* If any reference in the chain is volatile, the effect is volatile. */
5685 if (TREE_THIS_VOLATILE (exp))
5686 *pvolatilep = 1;
5687
5688 exp = TREE_OPERAND (exp, 0);
5689 }
5690
5691 /* If OFFSET is constant, see if we can return the whole thing as a
5692 constant bit position. Otherwise, split it up. */
5693 if (host_integerp (offset, 0)
5694 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5695 bitsize_unit_node))
5696 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5697 && host_integerp (tem, 0))
5698 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5699 else
5700 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5701
5702 *pmode = mode;
5703 return exp;
5704 }
5705
5706 /* Return 1 if T is an expression that get_inner_reference handles. */
5707
5708 int
5709 handled_component_p (t)
5710 tree t;
5711 {
5712 switch (TREE_CODE (t))
5713 {
5714 case BIT_FIELD_REF:
5715 case COMPONENT_REF:
5716 case ARRAY_REF:
5717 case ARRAY_RANGE_REF:
5718 case NON_LVALUE_EXPR:
5719 case VIEW_CONVERT_EXPR:
5720 return 1;
5721
5722 case NOP_EXPR:
5723 case CONVERT_EXPR:
5724 return (TYPE_MODE (TREE_TYPE (t))
5725 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5726
5727 default:
5728 return 0;
5729 }
5730 }
5731 \f
5732 /* Given an rtx VALUE that may contain additions and multiplications, return
5733 an equivalent value that just refers to a register, memory, or constant.
5734 This is done by generating instructions to perform the arithmetic and
5735 returning a pseudo-register containing the value.
5736
5737 The returned value may be a REG, SUBREG, MEM or constant. */
5738
5739 rtx
5740 force_operand (value, target)
5741 rtx value, target;
5742 {
5743 rtx op1, op2;
5744 /* Use subtarget as the target for operand 0 of a binary operation. */
5745 rtx subtarget = get_subtarget (target);
5746 enum rtx_code code = GET_CODE (value);
5747
5748 /* Check for a PIC address load. */
5749 if ((code == PLUS || code == MINUS)
5750 && XEXP (value, 0) == pic_offset_table_rtx
5751 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5752 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5753 || GET_CODE (XEXP (value, 1)) == CONST))
5754 {
5755 if (!subtarget)
5756 subtarget = gen_reg_rtx (GET_MODE (value));
5757 emit_move_insn (subtarget, value);
5758 return subtarget;
5759 }
5760
5761 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5762 {
5763 if (!target)
5764 target = gen_reg_rtx (GET_MODE (value));
5765 convert_move (target, force_operand (XEXP (value, 0), NULL),
5766 code == ZERO_EXTEND);
5767 return target;
5768 }
5769
5770 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5771 {
5772 op2 = XEXP (value, 1);
5773 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5774 subtarget = 0;
5775 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5776 {
5777 code = PLUS;
5778 op2 = negate_rtx (GET_MODE (value), op2);
5779 }
5780
5781 /* Check for an addition with OP2 a constant integer and our first
5782 operand a PLUS of a virtual register and something else. In that
5783 case, we want to emit the sum of the virtual register and the
5784 constant first and then add the other value. This allows virtual
5785 register instantiation to simply modify the constant rather than
5786 creating another one around this addition. */
5787 if (code == PLUS && GET_CODE (op2) == CONST_INT
5788 && GET_CODE (XEXP (value, 0)) == PLUS
5789 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5790 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5791 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5792 {
5793 rtx temp = expand_simple_binop (GET_MODE (value), code,
5794 XEXP (XEXP (value, 0), 0), op2,
5795 subtarget, 0, OPTAB_LIB_WIDEN);
5796 return expand_simple_binop (GET_MODE (value), code, temp,
5797 force_operand (XEXP (XEXP (value,
5798 0), 1), 0),
5799 target, 0, OPTAB_LIB_WIDEN);
5800 }
5801
5802 op1 = force_operand (XEXP (value, 0), subtarget);
5803 op2 = force_operand (op2, NULL_RTX);
5804 switch (code)
5805 {
5806 case MULT:
5807 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5808 case DIV:
5809 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5810 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5811 target, 1, OPTAB_LIB_WIDEN);
5812 else
5813 return expand_divmod (0,
5814 FLOAT_MODE_P (GET_MODE (value))
5815 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5816 GET_MODE (value), op1, op2, target, 0);
5817 break;
5818 case MOD:
5819 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5820 target, 0);
5821 break;
5822 case UDIV:
5823 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5824 target, 1);
5825 break;
5826 case UMOD:
5827 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5828 target, 1);
5829 break;
5830 case ASHIFTRT:
5831 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5832 target, 0, OPTAB_LIB_WIDEN);
5833 break;
5834 default:
5835 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5836 target, 1, OPTAB_LIB_WIDEN);
5837 }
5838 }
5839 if (GET_RTX_CLASS (code) == '1')
5840 {
5841 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5842 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5843 }
5844
5845 #ifdef INSN_SCHEDULING
5846 /* On machines that have insn scheduling, we want all memory reference to be
5847 explicit, so we need to deal with such paradoxical SUBREGs. */
5848 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5849 && (GET_MODE_SIZE (GET_MODE (value))
5850 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5851 value
5852 = simplify_gen_subreg (GET_MODE (value),
5853 force_reg (GET_MODE (SUBREG_REG (value)),
5854 force_operand (SUBREG_REG (value),
5855 NULL_RTX)),
5856 GET_MODE (SUBREG_REG (value)),
5857 SUBREG_BYTE (value));
5858 #endif
5859
5860 return value;
5861 }
5862 \f
5863 /* Subroutine of expand_expr: return nonzero iff there is no way that
5864 EXP can reference X, which is being modified. TOP_P is nonzero if this
5865 call is going to be used to determine whether we need a temporary
5866 for EXP, as opposed to a recursive call to this function.
5867
5868 It is always safe for this routine to return zero since it merely
5869 searches for optimization opportunities. */
5870
5871 int
5872 safe_from_p (x, exp, top_p)
5873 rtx x;
5874 tree exp;
5875 int top_p;
5876 {
5877 rtx exp_rtl = 0;
5878 int i, nops;
5879 static tree save_expr_list;
5880
5881 if (x == 0
5882 /* If EXP has varying size, we MUST use a target since we currently
5883 have no way of allocating temporaries of variable size
5884 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5885 So we assume here that something at a higher level has prevented a
5886 clash. This is somewhat bogus, but the best we can do. Only
5887 do this when X is BLKmode and when we are at the top level. */
5888 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5890 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5891 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5892 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5893 != INTEGER_CST)
5894 && GET_MODE (x) == BLKmode)
5895 /* If X is in the outgoing argument area, it is always safe. */
5896 || (GET_CODE (x) == MEM
5897 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5898 || (GET_CODE (XEXP (x, 0)) == PLUS
5899 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5900 return 1;
5901
5902 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5903 find the underlying pseudo. */
5904 if (GET_CODE (x) == SUBREG)
5905 {
5906 x = SUBREG_REG (x);
5907 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5908 return 0;
5909 }
5910
5911 /* A SAVE_EXPR might appear many times in the expression passed to the
5912 top-level safe_from_p call, and if it has a complex subexpression,
5913 examining it multiple times could result in a combinatorial explosion.
5914 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5915 with optimization took about 28 minutes to compile -- even though it was
5916 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5917 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5918 we have processed. Note that the only test of top_p was above. */
5919
5920 if (top_p)
5921 {
5922 int rtn;
5923 tree t;
5924
5925 save_expr_list = 0;
5926
5927 rtn = safe_from_p (x, exp, 0);
5928
5929 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5930 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5931
5932 return rtn;
5933 }
5934
5935 /* Now look at our tree code and possibly recurse. */
5936 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5937 {
5938 case 'd':
5939 exp_rtl = DECL_RTL_IF_SET (exp);
5940 break;
5941
5942 case 'c':
5943 return 1;
5944
5945 case 'x':
5946 if (TREE_CODE (exp) == TREE_LIST)
5947 return ((TREE_VALUE (exp) == 0
5948 || safe_from_p (x, TREE_VALUE (exp), 0))
5949 && (TREE_CHAIN (exp) == 0
5950 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5951 else if (TREE_CODE (exp) == ERROR_MARK)
5952 return 1; /* An already-visited SAVE_EXPR? */
5953 else
5954 return 0;
5955
5956 case '1':
5957 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5958
5959 case '2':
5960 case '<':
5961 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5962 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5963
5964 case 'e':
5965 case 'r':
5966 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5967 the expression. If it is set, we conflict iff we are that rtx or
5968 both are in memory. Otherwise, we check all operands of the
5969 expression recursively. */
5970
5971 switch (TREE_CODE (exp))
5972 {
5973 case ADDR_EXPR:
5974 /* If the operand is static or we are static, we can't conflict.
5975 Likewise if we don't conflict with the operand at all. */
5976 if (staticp (TREE_OPERAND (exp, 0))
5977 || TREE_STATIC (exp)
5978 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5979 return 1;
5980
5981 /* Otherwise, the only way this can conflict is if we are taking
5982 the address of a DECL a that address if part of X, which is
5983 very rare. */
5984 exp = TREE_OPERAND (exp, 0);
5985 if (DECL_P (exp))
5986 {
5987 if (!DECL_RTL_SET_P (exp)
5988 || GET_CODE (DECL_RTL (exp)) != MEM)
5989 return 0;
5990 else
5991 exp_rtl = XEXP (DECL_RTL (exp), 0);
5992 }
5993 break;
5994
5995 case INDIRECT_REF:
5996 if (GET_CODE (x) == MEM
5997 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5998 get_alias_set (exp)))
5999 return 0;
6000 break;
6001
6002 case CALL_EXPR:
6003 /* Assume that the call will clobber all hard registers and
6004 all of memory. */
6005 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6006 || GET_CODE (x) == MEM)
6007 return 0;
6008 break;
6009
6010 case RTL_EXPR:
6011 /* If a sequence exists, we would have to scan every instruction
6012 in the sequence to see if it was safe. This is probably not
6013 worthwhile. */
6014 if (RTL_EXPR_SEQUENCE (exp))
6015 return 0;
6016
6017 exp_rtl = RTL_EXPR_RTL (exp);
6018 break;
6019
6020 case WITH_CLEANUP_EXPR:
6021 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6022 break;
6023
6024 case CLEANUP_POINT_EXPR:
6025 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6026
6027 case SAVE_EXPR:
6028 exp_rtl = SAVE_EXPR_RTL (exp);
6029 if (exp_rtl)
6030 break;
6031
6032 /* If we've already scanned this, don't do it again. Otherwise,
6033 show we've scanned it and record for clearing the flag if we're
6034 going on. */
6035 if (TREE_PRIVATE (exp))
6036 return 1;
6037
6038 TREE_PRIVATE (exp) = 1;
6039 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6040 {
6041 TREE_PRIVATE (exp) = 0;
6042 return 0;
6043 }
6044
6045 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6046 return 1;
6047
6048 case BIND_EXPR:
6049 /* The only operand we look at is operand 1. The rest aren't
6050 part of the expression. */
6051 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6052
6053 case METHOD_CALL_EXPR:
6054 /* This takes an rtx argument, but shouldn't appear here. */
6055 abort ();
6056
6057 default:
6058 break;
6059 }
6060
6061 /* If we have an rtx, we do not need to scan our operands. */
6062 if (exp_rtl)
6063 break;
6064
6065 nops = first_rtl_op (TREE_CODE (exp));
6066 for (i = 0; i < nops; i++)
6067 if (TREE_OPERAND (exp, i) != 0
6068 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6069 return 0;
6070
6071 /* If this is a language-specific tree code, it may require
6072 special handling. */
6073 if ((unsigned int) TREE_CODE (exp)
6074 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6075 && !(*lang_hooks.safe_from_p) (x, exp))
6076 return 0;
6077 }
6078
6079 /* If we have an rtl, find any enclosed object. Then see if we conflict
6080 with it. */
6081 if (exp_rtl)
6082 {
6083 if (GET_CODE (exp_rtl) == SUBREG)
6084 {
6085 exp_rtl = SUBREG_REG (exp_rtl);
6086 if (GET_CODE (exp_rtl) == REG
6087 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6088 return 0;
6089 }
6090
6091 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6092 are memory and they conflict. */
6093 return ! (rtx_equal_p (x, exp_rtl)
6094 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6095 && true_dependence (exp_rtl, VOIDmode, x,
6096 rtx_addr_varies_p)));
6097 }
6098
6099 /* If we reach here, it is safe. */
6100 return 1;
6101 }
6102
6103 /* Subroutine of expand_expr: return rtx if EXP is a
6104 variable or parameter; else return 0. */
6105
6106 static rtx
6107 var_rtx (exp)
6108 tree exp;
6109 {
6110 STRIP_NOPS (exp);
6111 switch (TREE_CODE (exp))
6112 {
6113 case PARM_DECL:
6114 case VAR_DECL:
6115 return DECL_RTL (exp);
6116 default:
6117 return 0;
6118 }
6119 }
6120
6121 #ifdef MAX_INTEGER_COMPUTATION_MODE
6122
6123 void
6124 check_max_integer_computation_mode (exp)
6125 tree exp;
6126 {
6127 enum tree_code code;
6128 enum machine_mode mode;
6129
6130 /* Strip any NOPs that don't change the mode. */
6131 STRIP_NOPS (exp);
6132 code = TREE_CODE (exp);
6133
6134 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6135 if (code == NOP_EXPR
6136 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6137 return;
6138
6139 /* First check the type of the overall operation. We need only look at
6140 unary, binary and relational operations. */
6141 if (TREE_CODE_CLASS (code) == '1'
6142 || TREE_CODE_CLASS (code) == '2'
6143 || TREE_CODE_CLASS (code) == '<')
6144 {
6145 mode = TYPE_MODE (TREE_TYPE (exp));
6146 if (GET_MODE_CLASS (mode) == MODE_INT
6147 && mode > MAX_INTEGER_COMPUTATION_MODE)
6148 internal_error ("unsupported wide integer operation");
6149 }
6150
6151 /* Check operand of a unary op. */
6152 if (TREE_CODE_CLASS (code) == '1')
6153 {
6154 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6155 if (GET_MODE_CLASS (mode) == MODE_INT
6156 && mode > MAX_INTEGER_COMPUTATION_MODE)
6157 internal_error ("unsupported wide integer operation");
6158 }
6159
6160 /* Check operands of a binary/comparison op. */
6161 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6162 {
6163 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6164 if (GET_MODE_CLASS (mode) == MODE_INT
6165 && mode > MAX_INTEGER_COMPUTATION_MODE)
6166 internal_error ("unsupported wide integer operation");
6167
6168 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6169 if (GET_MODE_CLASS (mode) == MODE_INT
6170 && mode > MAX_INTEGER_COMPUTATION_MODE)
6171 internal_error ("unsupported wide integer operation");
6172 }
6173 }
6174 #endif
6175 \f
6176 /* Return the highest power of two that EXP is known to be a multiple of.
6177 This is used in updating alignment of MEMs in array references. */
6178
6179 static HOST_WIDE_INT
6180 highest_pow2_factor (exp)
6181 tree exp;
6182 {
6183 HOST_WIDE_INT c0, c1;
6184
6185 switch (TREE_CODE (exp))
6186 {
6187 case INTEGER_CST:
6188 /* We can find the lowest bit that's a one. If the low
6189 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6190 We need to handle this case since we can find it in a COND_EXPR,
6191 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6192 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6193 later ICE. */
6194 if (TREE_CONSTANT_OVERFLOW (exp))
6195 return BIGGEST_ALIGNMENT;
6196 else
6197 {
6198 /* Note: tree_low_cst is intentionally not used here,
6199 we don't care about the upper bits. */
6200 c0 = TREE_INT_CST_LOW (exp);
6201 c0 &= -c0;
6202 return c0 ? c0 : BIGGEST_ALIGNMENT;
6203 }
6204 break;
6205
6206 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6207 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6208 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6209 return MIN (c0, c1);
6210
6211 case MULT_EXPR:
6212 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6213 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6214 return c0 * c1;
6215
6216 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6217 case CEIL_DIV_EXPR:
6218 if (integer_pow2p (TREE_OPERAND (exp, 1))
6219 && host_integerp (TREE_OPERAND (exp, 1), 1))
6220 {
6221 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6222 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6223 return MAX (1, c0 / c1);
6224 }
6225 break;
6226
6227 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6228 case SAVE_EXPR: case WITH_RECORD_EXPR:
6229 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6230
6231 case COMPOUND_EXPR:
6232 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6233
6234 case COND_EXPR:
6235 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6236 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6237 return MIN (c0, c1);
6238
6239 default:
6240 break;
6241 }
6242
6243 return 1;
6244 }
6245
6246 /* Similar, except that it is known that the expression must be a multiple
6247 of the alignment of TYPE. */
6248
6249 static HOST_WIDE_INT
6250 highest_pow2_factor_for_type (type, exp)
6251 tree type;
6252 tree exp;
6253 {
6254 HOST_WIDE_INT type_align, factor;
6255
6256 factor = highest_pow2_factor (exp);
6257 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6258 return MAX (factor, type_align);
6259 }
6260 \f
6261 /* Return an object on the placeholder list that matches EXP, a
6262 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6263 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6264 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6265 is a location which initially points to a starting location in the
6266 placeholder list (zero means start of the list) and where a pointer into
6267 the placeholder list at which the object is found is placed. */
6268
6269 tree
6270 find_placeholder (exp, plist)
6271 tree exp;
6272 tree *plist;
6273 {
6274 tree type = TREE_TYPE (exp);
6275 tree placeholder_expr;
6276
6277 for (placeholder_expr
6278 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6279 placeholder_expr != 0;
6280 placeholder_expr = TREE_CHAIN (placeholder_expr))
6281 {
6282 tree need_type = TYPE_MAIN_VARIANT (type);
6283 tree elt;
6284
6285 /* Find the outermost reference that is of the type we want. If none,
6286 see if any object has a type that is a pointer to the type we
6287 want. */
6288 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6289 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6290 || TREE_CODE (elt) == COND_EXPR)
6291 ? TREE_OPERAND (elt, 1)
6292 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6293 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6294 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6295 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6296 ? TREE_OPERAND (elt, 0) : 0))
6297 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6298 {
6299 if (plist)
6300 *plist = placeholder_expr;
6301 return elt;
6302 }
6303
6304 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6305 elt
6306 = ((TREE_CODE (elt) == COMPOUND_EXPR
6307 || TREE_CODE (elt) == COND_EXPR)
6308 ? TREE_OPERAND (elt, 1)
6309 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6310 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6311 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6312 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6313 ? TREE_OPERAND (elt, 0) : 0))
6314 if (POINTER_TYPE_P (TREE_TYPE (elt))
6315 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6316 == need_type))
6317 {
6318 if (plist)
6319 *plist = placeholder_expr;
6320 return build1 (INDIRECT_REF, need_type, elt);
6321 }
6322 }
6323
6324 return 0;
6325 }
6326 \f
6327 /* expand_expr: generate code for computing expression EXP.
6328 An rtx for the computed value is returned. The value is never null.
6329 In the case of a void EXP, const0_rtx is returned.
6330
6331 The value may be stored in TARGET if TARGET is nonzero.
6332 TARGET is just a suggestion; callers must assume that
6333 the rtx returned may not be the same as TARGET.
6334
6335 If TARGET is CONST0_RTX, it means that the value will be ignored.
6336
6337 If TMODE is not VOIDmode, it suggests generating the
6338 result in mode TMODE. But this is done only when convenient.
6339 Otherwise, TMODE is ignored and the value generated in its natural mode.
6340 TMODE is just a suggestion; callers must assume that
6341 the rtx returned may not have mode TMODE.
6342
6343 Note that TARGET may have neither TMODE nor MODE. In that case, it
6344 probably will not be used.
6345
6346 If MODIFIER is EXPAND_SUM then when EXP is an addition
6347 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6348 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6349 products as above, or REG or MEM, or constant.
6350 Ordinarily in such cases we would output mul or add instructions
6351 and then return a pseudo reg containing the sum.
6352
6353 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6354 it also marks a label as absolutely required (it can't be dead).
6355 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6356 This is used for outputting expressions used in initializers.
6357
6358 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6359 with a constant address even if that address is not normally legitimate.
6360 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6361
6362 rtx
6363 expand_expr (exp, target, tmode, modifier)
6364 tree exp;
6365 rtx target;
6366 enum machine_mode tmode;
6367 enum expand_modifier modifier;
6368 {
6369 rtx op0, op1, temp;
6370 tree type = TREE_TYPE (exp);
6371 int unsignedp = TREE_UNSIGNED (type);
6372 enum machine_mode mode;
6373 enum tree_code code = TREE_CODE (exp);
6374 optab this_optab;
6375 rtx subtarget, original_target;
6376 int ignore;
6377 tree context;
6378
6379 /* Handle ERROR_MARK before anybody tries to access its type. */
6380 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6381 {
6382 op0 = CONST0_RTX (tmode);
6383 if (op0 != 0)
6384 return op0;
6385 return const0_rtx;
6386 }
6387
6388 mode = TYPE_MODE (type);
6389 /* Use subtarget as the target for operand 0 of a binary operation. */
6390 subtarget = get_subtarget (target);
6391 original_target = target;
6392 ignore = (target == const0_rtx
6393 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6394 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6395 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6396 && TREE_CODE (type) == VOID_TYPE));
6397
6398 /* If we are going to ignore this result, we need only do something
6399 if there is a side-effect somewhere in the expression. If there
6400 is, short-circuit the most common cases here. Note that we must
6401 not call expand_expr with anything but const0_rtx in case this
6402 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6403
6404 if (ignore)
6405 {
6406 if (! TREE_SIDE_EFFECTS (exp))
6407 return const0_rtx;
6408
6409 /* Ensure we reference a volatile object even if value is ignored, but
6410 don't do this if all we are doing is taking its address. */
6411 if (TREE_THIS_VOLATILE (exp)
6412 && TREE_CODE (exp) != FUNCTION_DECL
6413 && mode != VOIDmode && mode != BLKmode
6414 && modifier != EXPAND_CONST_ADDRESS)
6415 {
6416 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6417 if (GET_CODE (temp) == MEM)
6418 temp = copy_to_reg (temp);
6419 return const0_rtx;
6420 }
6421
6422 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6423 || code == INDIRECT_REF || code == BUFFER_REF)
6424 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6425 modifier);
6426
6427 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6428 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6429 {
6430 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6431 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6432 return const0_rtx;
6433 }
6434 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6435 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6436 /* If the second operand has no side effects, just evaluate
6437 the first. */
6438 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6439 modifier);
6440 else if (code == BIT_FIELD_REF)
6441 {
6442 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6443 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6444 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6445 return const0_rtx;
6446 }
6447
6448 target = 0;
6449 }
6450
6451 #ifdef MAX_INTEGER_COMPUTATION_MODE
6452 /* Only check stuff here if the mode we want is different from the mode
6453 of the expression; if it's the same, check_max_integer_computiation_mode
6454 will handle it. Do we really need to check this stuff at all? */
6455
6456 if (target
6457 && GET_MODE (target) != mode
6458 && TREE_CODE (exp) != INTEGER_CST
6459 && TREE_CODE (exp) != PARM_DECL
6460 && TREE_CODE (exp) != ARRAY_REF
6461 && TREE_CODE (exp) != ARRAY_RANGE_REF
6462 && TREE_CODE (exp) != COMPONENT_REF
6463 && TREE_CODE (exp) != BIT_FIELD_REF
6464 && TREE_CODE (exp) != INDIRECT_REF
6465 && TREE_CODE (exp) != CALL_EXPR
6466 && TREE_CODE (exp) != VAR_DECL
6467 && TREE_CODE (exp) != RTL_EXPR)
6468 {
6469 enum machine_mode mode = GET_MODE (target);
6470
6471 if (GET_MODE_CLASS (mode) == MODE_INT
6472 && mode > MAX_INTEGER_COMPUTATION_MODE)
6473 internal_error ("unsupported wide integer operation");
6474 }
6475
6476 if (tmode != mode
6477 && TREE_CODE (exp) != INTEGER_CST
6478 && TREE_CODE (exp) != PARM_DECL
6479 && TREE_CODE (exp) != ARRAY_REF
6480 && TREE_CODE (exp) != ARRAY_RANGE_REF
6481 && TREE_CODE (exp) != COMPONENT_REF
6482 && TREE_CODE (exp) != BIT_FIELD_REF
6483 && TREE_CODE (exp) != INDIRECT_REF
6484 && TREE_CODE (exp) != VAR_DECL
6485 && TREE_CODE (exp) != CALL_EXPR
6486 && TREE_CODE (exp) != RTL_EXPR
6487 && GET_MODE_CLASS (tmode) == MODE_INT
6488 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6489 internal_error ("unsupported wide integer operation");
6490
6491 check_max_integer_computation_mode (exp);
6492 #endif
6493
6494 /* If will do cse, generate all results into pseudo registers
6495 since 1) that allows cse to find more things
6496 and 2) otherwise cse could produce an insn the machine
6497 cannot support. And exception is a CONSTRUCTOR into a multi-word
6498 MEM: that's much more likely to be most efficient into the MEM. */
6499
6500 if (! cse_not_expected && mode != BLKmode && target
6501 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6502 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6503 target = subtarget;
6504
6505 switch (code)
6506 {
6507 case LABEL_DECL:
6508 {
6509 tree function = decl_function_context (exp);
6510 /* Handle using a label in a containing function. */
6511 if (function != current_function_decl
6512 && function != inline_function_decl && function != 0)
6513 {
6514 struct function *p = find_function_data (function);
6515 p->expr->x_forced_labels
6516 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6517 p->expr->x_forced_labels);
6518 }
6519 else
6520 {
6521 if (modifier == EXPAND_INITIALIZER)
6522 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6523 label_rtx (exp),
6524 forced_labels);
6525 }
6526
6527 temp = gen_rtx_MEM (FUNCTION_MODE,
6528 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6529 if (function != current_function_decl
6530 && function != inline_function_decl && function != 0)
6531 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6532 return temp;
6533 }
6534
6535 case PARM_DECL:
6536 if (!DECL_RTL_SET_P (exp))
6537 {
6538 error_with_decl (exp, "prior parameter's size depends on `%s'");
6539 return CONST0_RTX (mode);
6540 }
6541
6542 /* ... fall through ... */
6543
6544 case VAR_DECL:
6545 /* If a static var's type was incomplete when the decl was written,
6546 but the type is complete now, lay out the decl now. */
6547 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6548 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6549 {
6550 rtx value = DECL_RTL_IF_SET (exp);
6551
6552 layout_decl (exp, 0);
6553
6554 /* If the RTL was already set, update its mode and memory
6555 attributes. */
6556 if (value != 0)
6557 {
6558 PUT_MODE (value, DECL_MODE (exp));
6559 SET_DECL_RTL (exp, 0);
6560 set_mem_attributes (value, exp, 1);
6561 SET_DECL_RTL (exp, value);
6562 }
6563 }
6564
6565 /* ... fall through ... */
6566
6567 case FUNCTION_DECL:
6568 case RESULT_DECL:
6569 if (DECL_RTL (exp) == 0)
6570 abort ();
6571
6572 /* Ensure variable marked as used even if it doesn't go through
6573 a parser. If it hasn't be used yet, write out an external
6574 definition. */
6575 if (! TREE_USED (exp))
6576 {
6577 assemble_external (exp);
6578 TREE_USED (exp) = 1;
6579 }
6580
6581 /* Show we haven't gotten RTL for this yet. */
6582 temp = 0;
6583
6584 /* Handle variables inherited from containing functions. */
6585 context = decl_function_context (exp);
6586
6587 /* We treat inline_function_decl as an alias for the current function
6588 because that is the inline function whose vars, types, etc.
6589 are being merged into the current function.
6590 See expand_inline_function. */
6591
6592 if (context != 0 && context != current_function_decl
6593 && context != inline_function_decl
6594 /* If var is static, we don't need a static chain to access it. */
6595 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6596 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6597 {
6598 rtx addr;
6599
6600 /* Mark as non-local and addressable. */
6601 DECL_NONLOCAL (exp) = 1;
6602 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6603 abort ();
6604 (*lang_hooks.mark_addressable) (exp);
6605 if (GET_CODE (DECL_RTL (exp)) != MEM)
6606 abort ();
6607 addr = XEXP (DECL_RTL (exp), 0);
6608 if (GET_CODE (addr) == MEM)
6609 addr
6610 = replace_equiv_address (addr,
6611 fix_lexical_addr (XEXP (addr, 0), exp));
6612 else
6613 addr = fix_lexical_addr (addr, exp);
6614
6615 temp = replace_equiv_address (DECL_RTL (exp), addr);
6616 }
6617
6618 /* This is the case of an array whose size is to be determined
6619 from its initializer, while the initializer is still being parsed.
6620 See expand_decl. */
6621
6622 else if (GET_CODE (DECL_RTL (exp)) == MEM
6623 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6624 temp = validize_mem (DECL_RTL (exp));
6625
6626 /* If DECL_RTL is memory, we are in the normal case and either
6627 the address is not valid or it is not a register and -fforce-addr
6628 is specified, get the address into a register. */
6629
6630 else if (GET_CODE (DECL_RTL (exp)) == MEM
6631 && modifier != EXPAND_CONST_ADDRESS
6632 && modifier != EXPAND_SUM
6633 && modifier != EXPAND_INITIALIZER
6634 && (! memory_address_p (DECL_MODE (exp),
6635 XEXP (DECL_RTL (exp), 0))
6636 || (flag_force_addr
6637 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6638 temp = replace_equiv_address (DECL_RTL (exp),
6639 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6640
6641 /* If we got something, return it. But first, set the alignment
6642 if the address is a register. */
6643 if (temp != 0)
6644 {
6645 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6646 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6647
6648 return temp;
6649 }
6650
6651 /* If the mode of DECL_RTL does not match that of the decl, it
6652 must be a promoted value. We return a SUBREG of the wanted mode,
6653 but mark it so that we know that it was already extended. */
6654
6655 if (GET_CODE (DECL_RTL (exp)) == REG
6656 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6657 {
6658 /* Get the signedness used for this variable. Ensure we get the
6659 same mode we got when the variable was declared. */
6660 if (GET_MODE (DECL_RTL (exp))
6661 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6662 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6663 abort ();
6664
6665 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6666 SUBREG_PROMOTED_VAR_P (temp) = 1;
6667 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6668 return temp;
6669 }
6670
6671 return DECL_RTL (exp);
6672
6673 case INTEGER_CST:
6674 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6675 TREE_INT_CST_HIGH (exp), mode);
6676
6677 /* ??? If overflow is set, fold will have done an incomplete job,
6678 which can result in (plus xx (const_int 0)), which can get
6679 simplified by validate_replace_rtx during virtual register
6680 instantiation, which can result in unrecognizable insns.
6681 Avoid this by forcing all overflows into registers. */
6682 if (TREE_CONSTANT_OVERFLOW (exp)
6683 && modifier != EXPAND_INITIALIZER)
6684 temp = force_reg (mode, temp);
6685
6686 return temp;
6687
6688 case CONST_DECL:
6689 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6690
6691 case REAL_CST:
6692 /* If optimized, generate immediate CONST_DOUBLE
6693 which will be turned into memory by reload if necessary.
6694
6695 We used to force a register so that loop.c could see it. But
6696 this does not allow gen_* patterns to perform optimizations with
6697 the constants. It also produces two insns in cases like "x = 1.0;".
6698 On most machines, floating-point constants are not permitted in
6699 many insns, so we'd end up copying it to a register in any case.
6700
6701 Now, we do the copying in expand_binop, if appropriate. */
6702 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6703 TYPE_MODE (TREE_TYPE (exp)));
6704
6705 case COMPLEX_CST:
6706 case STRING_CST:
6707 if (! TREE_CST_RTL (exp))
6708 output_constant_def (exp, 1);
6709
6710 /* TREE_CST_RTL probably contains a constant address.
6711 On RISC machines where a constant address isn't valid,
6712 make some insns to get that address into a register. */
6713 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6714 && modifier != EXPAND_CONST_ADDRESS
6715 && modifier != EXPAND_INITIALIZER
6716 && modifier != EXPAND_SUM
6717 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6718 || (flag_force_addr
6719 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6720 return replace_equiv_address (TREE_CST_RTL (exp),
6721 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6722 return TREE_CST_RTL (exp);
6723
6724 case EXPR_WITH_FILE_LOCATION:
6725 {
6726 rtx to_return;
6727 const char *saved_input_filename = input_filename;
6728 int saved_lineno = lineno;
6729 input_filename = EXPR_WFL_FILENAME (exp);
6730 lineno = EXPR_WFL_LINENO (exp);
6731 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6732 emit_line_note (input_filename, lineno);
6733 /* Possibly avoid switching back and forth here. */
6734 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6735 input_filename = saved_input_filename;
6736 lineno = saved_lineno;
6737 return to_return;
6738 }
6739
6740 case SAVE_EXPR:
6741 context = decl_function_context (exp);
6742
6743 /* If this SAVE_EXPR was at global context, assume we are an
6744 initialization function and move it into our context. */
6745 if (context == 0)
6746 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6747
6748 /* We treat inline_function_decl as an alias for the current function
6749 because that is the inline function whose vars, types, etc.
6750 are being merged into the current function.
6751 See expand_inline_function. */
6752 if (context == current_function_decl || context == inline_function_decl)
6753 context = 0;
6754
6755 /* If this is non-local, handle it. */
6756 if (context)
6757 {
6758 /* The following call just exists to abort if the context is
6759 not of a containing function. */
6760 find_function_data (context);
6761
6762 temp = SAVE_EXPR_RTL (exp);
6763 if (temp && GET_CODE (temp) == REG)
6764 {
6765 put_var_into_stack (exp);
6766 temp = SAVE_EXPR_RTL (exp);
6767 }
6768 if (temp == 0 || GET_CODE (temp) != MEM)
6769 abort ();
6770 return
6771 replace_equiv_address (temp,
6772 fix_lexical_addr (XEXP (temp, 0), exp));
6773 }
6774 if (SAVE_EXPR_RTL (exp) == 0)
6775 {
6776 if (mode == VOIDmode)
6777 temp = const0_rtx;
6778 else
6779 temp = assign_temp (build_qualified_type (type,
6780 (TYPE_QUALS (type)
6781 | TYPE_QUAL_CONST)),
6782 3, 0, 0);
6783
6784 SAVE_EXPR_RTL (exp) = temp;
6785 if (!optimize && GET_CODE (temp) == REG)
6786 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6787 save_expr_regs);
6788
6789 /* If the mode of TEMP does not match that of the expression, it
6790 must be a promoted value. We pass store_expr a SUBREG of the
6791 wanted mode but mark it so that we know that it was already
6792 extended. Note that `unsignedp' was modified above in
6793 this case. */
6794
6795 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6796 {
6797 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6798 SUBREG_PROMOTED_VAR_P (temp) = 1;
6799 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6800 }
6801
6802 if (temp == const0_rtx)
6803 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6804 else
6805 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6806
6807 TREE_USED (exp) = 1;
6808 }
6809
6810 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6811 must be a promoted value. We return a SUBREG of the wanted mode,
6812 but mark it so that we know that it was already extended. */
6813
6814 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6815 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6816 {
6817 /* Compute the signedness and make the proper SUBREG. */
6818 promote_mode (type, mode, &unsignedp, 0);
6819 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6820 SUBREG_PROMOTED_VAR_P (temp) = 1;
6821 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6822 return temp;
6823 }
6824
6825 return SAVE_EXPR_RTL (exp);
6826
6827 case UNSAVE_EXPR:
6828 {
6829 rtx temp;
6830 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6831 TREE_OPERAND (exp, 0)
6832 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6833 return temp;
6834 }
6835
6836 case PLACEHOLDER_EXPR:
6837 {
6838 tree old_list = placeholder_list;
6839 tree placeholder_expr = 0;
6840
6841 exp = find_placeholder (exp, &placeholder_expr);
6842 if (exp == 0)
6843 abort ();
6844
6845 placeholder_list = TREE_CHAIN (placeholder_expr);
6846 temp = expand_expr (exp, original_target, tmode, modifier);
6847 placeholder_list = old_list;
6848 return temp;
6849 }
6850
6851 case WITH_RECORD_EXPR:
6852 /* Put the object on the placeholder list, expand our first operand,
6853 and pop the list. */
6854 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6855 placeholder_list);
6856 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6857 modifier);
6858 placeholder_list = TREE_CHAIN (placeholder_list);
6859 return target;
6860
6861 case GOTO_EXPR:
6862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6863 expand_goto (TREE_OPERAND (exp, 0));
6864 else
6865 expand_computed_goto (TREE_OPERAND (exp, 0));
6866 return const0_rtx;
6867
6868 case EXIT_EXPR:
6869 expand_exit_loop_if_false (NULL,
6870 invert_truthvalue (TREE_OPERAND (exp, 0)));
6871 return const0_rtx;
6872
6873 case LABELED_BLOCK_EXPR:
6874 if (LABELED_BLOCK_BODY (exp))
6875 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6876 /* Should perhaps use expand_label, but this is simpler and safer. */
6877 do_pending_stack_adjust ();
6878 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6879 return const0_rtx;
6880
6881 case EXIT_BLOCK_EXPR:
6882 if (EXIT_BLOCK_RETURN (exp))
6883 sorry ("returned value in block_exit_expr");
6884 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6885 return const0_rtx;
6886
6887 case LOOP_EXPR:
6888 push_temp_slots ();
6889 expand_start_loop (1);
6890 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6891 expand_end_loop ();
6892 pop_temp_slots ();
6893
6894 return const0_rtx;
6895
6896 case BIND_EXPR:
6897 {
6898 tree vars = TREE_OPERAND (exp, 0);
6899 int vars_need_expansion = 0;
6900
6901 /* Need to open a binding contour here because
6902 if there are any cleanups they must be contained here. */
6903 expand_start_bindings (2);
6904
6905 /* Mark the corresponding BLOCK for output in its proper place. */
6906 if (TREE_OPERAND (exp, 2) != 0
6907 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6908 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6909
6910 /* If VARS have not yet been expanded, expand them now. */
6911 while (vars)
6912 {
6913 if (!DECL_RTL_SET_P (vars))
6914 {
6915 vars_need_expansion = 1;
6916 expand_decl (vars);
6917 }
6918 expand_decl_init (vars);
6919 vars = TREE_CHAIN (vars);
6920 }
6921
6922 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6923
6924 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6925
6926 return temp;
6927 }
6928
6929 case RTL_EXPR:
6930 if (RTL_EXPR_SEQUENCE (exp))
6931 {
6932 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6933 abort ();
6934 emit_insn (RTL_EXPR_SEQUENCE (exp));
6935 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6936 }
6937 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6938 free_temps_for_rtl_expr (exp);
6939 return RTL_EXPR_RTL (exp);
6940
6941 case CONSTRUCTOR:
6942 /* If we don't need the result, just ensure we evaluate any
6943 subexpressions. */
6944 if (ignore)
6945 {
6946 tree elt;
6947
6948 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6949 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6950
6951 return const0_rtx;
6952 }
6953
6954 /* All elts simple constants => refer to a constant in memory. But
6955 if this is a non-BLKmode mode, let it store a field at a time
6956 since that should make a CONST_INT or CONST_DOUBLE when we
6957 fold. Likewise, if we have a target we can use, it is best to
6958 store directly into the target unless the type is large enough
6959 that memcpy will be used. If we are making an initializer and
6960 all operands are constant, put it in memory as well.
6961
6962 FIXME: Avoid trying to fill vector constructors piece-meal.
6963 Output them with output_constant_def below unless we're sure
6964 they're zeros. This should go away when vector initializers
6965 are treated like VECTOR_CST instead of arrays.
6966 */
6967 else if ((TREE_STATIC (exp)
6968 && ((mode == BLKmode
6969 && ! (target != 0 && safe_from_p (target, exp, 1)))
6970 || TREE_ADDRESSABLE (exp)
6971 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6972 && (! MOVE_BY_PIECES_P
6973 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6974 TYPE_ALIGN (type)))
6975 && ((TREE_CODE (type) == VECTOR_TYPE
6976 && !is_zeros_p (exp))
6977 || ! mostly_zeros_p (exp)))))
6978 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6979 {
6980 rtx constructor = output_constant_def (exp, 1);
6981
6982 if (modifier != EXPAND_CONST_ADDRESS
6983 && modifier != EXPAND_INITIALIZER
6984 && modifier != EXPAND_SUM)
6985 constructor = validize_mem (constructor);
6986
6987 return constructor;
6988 }
6989 else
6990 {
6991 /* Handle calls that pass values in multiple non-contiguous
6992 locations. The Irix 6 ABI has examples of this. */
6993 if (target == 0 || ! safe_from_p (target, exp, 1)
6994 || GET_CODE (target) == PARALLEL)
6995 target
6996 = assign_temp (build_qualified_type (type,
6997 (TYPE_QUALS (type)
6998 | (TREE_READONLY (exp)
6999 * TYPE_QUAL_CONST))),
7000 0, TREE_ADDRESSABLE (exp), 1);
7001
7002 store_constructor (exp, target, 0, int_expr_size (exp));
7003 return target;
7004 }
7005
7006 case INDIRECT_REF:
7007 {
7008 tree exp1 = TREE_OPERAND (exp, 0);
7009 tree index;
7010 tree string = string_constant (exp1, &index);
7011
7012 /* Try to optimize reads from const strings. */
7013 if (string
7014 && TREE_CODE (string) == STRING_CST
7015 && TREE_CODE (index) == INTEGER_CST
7016 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7017 && GET_MODE_CLASS (mode) == MODE_INT
7018 && GET_MODE_SIZE (mode) == 1
7019 && modifier != EXPAND_WRITE)
7020 return gen_int_mode (TREE_STRING_POINTER (string)
7021 [TREE_INT_CST_LOW (index)], mode);
7022
7023 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7024 op0 = memory_address (mode, op0);
7025 temp = gen_rtx_MEM (mode, op0);
7026 set_mem_attributes (temp, exp, 0);
7027
7028 /* If we are writing to this object and its type is a record with
7029 readonly fields, we must mark it as readonly so it will
7030 conflict with readonly references to those fields. */
7031 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7032 RTX_UNCHANGING_P (temp) = 1;
7033
7034 return temp;
7035 }
7036
7037 case ARRAY_REF:
7038 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7039 abort ();
7040
7041 {
7042 tree array = TREE_OPERAND (exp, 0);
7043 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7044 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7045 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7046 HOST_WIDE_INT i;
7047
7048 /* Optimize the special-case of a zero lower bound.
7049
7050 We convert the low_bound to sizetype to avoid some problems
7051 with constant folding. (E.g. suppose the lower bound is 1,
7052 and its mode is QI. Without the conversion, (ARRAY
7053 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7054 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7055
7056 if (! integer_zerop (low_bound))
7057 index = size_diffop (index, convert (sizetype, low_bound));
7058
7059 /* Fold an expression like: "foo"[2].
7060 This is not done in fold so it won't happen inside &.
7061 Don't fold if this is for wide characters since it's too
7062 difficult to do correctly and this is a very rare case. */
7063
7064 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7065 && TREE_CODE (array) == STRING_CST
7066 && TREE_CODE (index) == INTEGER_CST
7067 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7068 && GET_MODE_CLASS (mode) == MODE_INT
7069 && GET_MODE_SIZE (mode) == 1)
7070 return gen_int_mode (TREE_STRING_POINTER (array)
7071 [TREE_INT_CST_LOW (index)], mode);
7072
7073 /* If this is a constant index into a constant array,
7074 just get the value from the array. Handle both the cases when
7075 we have an explicit constructor and when our operand is a variable
7076 that was declared const. */
7077
7078 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7079 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7080 && TREE_CODE (index) == INTEGER_CST
7081 && 0 > compare_tree_int (index,
7082 list_length (CONSTRUCTOR_ELTS
7083 (TREE_OPERAND (exp, 0)))))
7084 {
7085 tree elem;
7086
7087 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7088 i = TREE_INT_CST_LOW (index);
7089 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7090 ;
7091
7092 if (elem)
7093 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7094 modifier);
7095 }
7096
7097 else if (optimize >= 1
7098 && modifier != EXPAND_CONST_ADDRESS
7099 && modifier != EXPAND_INITIALIZER
7100 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7101 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7102 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7103 {
7104 if (TREE_CODE (index) == INTEGER_CST)
7105 {
7106 tree init = DECL_INITIAL (array);
7107
7108 if (TREE_CODE (init) == CONSTRUCTOR)
7109 {
7110 tree elem;
7111
7112 for (elem = CONSTRUCTOR_ELTS (init);
7113 (elem
7114 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7115 elem = TREE_CHAIN (elem))
7116 ;
7117
7118 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7119 return expand_expr (fold (TREE_VALUE (elem)), target,
7120 tmode, modifier);
7121 }
7122 else if (TREE_CODE (init) == STRING_CST
7123 && 0 > compare_tree_int (index,
7124 TREE_STRING_LENGTH (init)))
7125 {
7126 tree type = TREE_TYPE (TREE_TYPE (init));
7127 enum machine_mode mode = TYPE_MODE (type);
7128
7129 if (GET_MODE_CLASS (mode) == MODE_INT
7130 && GET_MODE_SIZE (mode) == 1)
7131 return gen_int_mode (TREE_STRING_POINTER (init)
7132 [TREE_INT_CST_LOW (index)], mode);
7133 }
7134 }
7135 }
7136 }
7137 /* Fall through. */
7138
7139 case COMPONENT_REF:
7140 case BIT_FIELD_REF:
7141 case ARRAY_RANGE_REF:
7142 /* If the operand is a CONSTRUCTOR, we can just extract the
7143 appropriate field if it is present. Don't do this if we have
7144 already written the data since we want to refer to that copy
7145 and varasm.c assumes that's what we'll do. */
7146 if (code == COMPONENT_REF
7147 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7148 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7149 {
7150 tree elt;
7151
7152 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7153 elt = TREE_CHAIN (elt))
7154 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7155 /* We can normally use the value of the field in the
7156 CONSTRUCTOR. However, if this is a bitfield in
7157 an integral mode that we can fit in a HOST_WIDE_INT,
7158 we must mask only the number of bits in the bitfield,
7159 since this is done implicitly by the constructor. If
7160 the bitfield does not meet either of those conditions,
7161 we can't do this optimization. */
7162 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7163 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7164 == MODE_INT)
7165 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7166 <= HOST_BITS_PER_WIDE_INT))))
7167 {
7168 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7169 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7170 {
7171 HOST_WIDE_INT bitsize
7172 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7173 enum machine_mode imode
7174 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7175
7176 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7177 {
7178 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7179 op0 = expand_and (imode, op0, op1, target);
7180 }
7181 else
7182 {
7183 tree count
7184 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7185 0);
7186
7187 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7188 target, 0);
7189 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7190 target, 0);
7191 }
7192 }
7193
7194 return op0;
7195 }
7196 }
7197
7198 {
7199 enum machine_mode mode1;
7200 HOST_WIDE_INT bitsize, bitpos;
7201 tree offset;
7202 int volatilep = 0;
7203 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7204 &mode1, &unsignedp, &volatilep);
7205 rtx orig_op0;
7206
7207 /* If we got back the original object, something is wrong. Perhaps
7208 we are evaluating an expression too early. In any event, don't
7209 infinitely recurse. */
7210 if (tem == exp)
7211 abort ();
7212
7213 /* If TEM's type is a union of variable size, pass TARGET to the inner
7214 computation, since it will need a temporary and TARGET is known
7215 to have to do. This occurs in unchecked conversion in Ada. */
7216
7217 orig_op0 = op0
7218 = expand_expr (tem,
7219 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7220 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7221 != INTEGER_CST)
7222 ? target : NULL_RTX),
7223 VOIDmode,
7224 (modifier == EXPAND_INITIALIZER
7225 || modifier == EXPAND_CONST_ADDRESS)
7226 ? modifier : EXPAND_NORMAL);
7227
7228 /* If this is a constant, put it into a register if it is a
7229 legitimate constant and OFFSET is 0 and memory if it isn't. */
7230 if (CONSTANT_P (op0))
7231 {
7232 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7233 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7234 && offset == 0)
7235 op0 = force_reg (mode, op0);
7236 else
7237 op0 = validize_mem (force_const_mem (mode, op0));
7238 }
7239
7240 if (offset != 0)
7241 {
7242 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7243
7244 /* If this object is in a register, put it into memory.
7245 This case can't occur in C, but can in Ada if we have
7246 unchecked conversion of an expression from a scalar type to
7247 an array or record type. */
7248 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7249 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7250 {
7251 /* If the operand is a SAVE_EXPR, we can deal with this by
7252 forcing the SAVE_EXPR into memory. */
7253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7254 {
7255 put_var_into_stack (TREE_OPERAND (exp, 0));
7256 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7257 }
7258 else
7259 {
7260 tree nt
7261 = build_qualified_type (TREE_TYPE (tem),
7262 (TYPE_QUALS (TREE_TYPE (tem))
7263 | TYPE_QUAL_CONST));
7264 rtx memloc = assign_temp (nt, 1, 1, 1);
7265
7266 emit_move_insn (memloc, op0);
7267 op0 = memloc;
7268 }
7269 }
7270
7271 if (GET_CODE (op0) != MEM)
7272 abort ();
7273
7274 #ifdef POINTERS_EXTEND_UNSIGNED
7275 if (GET_MODE (offset_rtx) != Pmode)
7276 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7277 #else
7278 if (GET_MODE (offset_rtx) != ptr_mode)
7279 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7280 #endif
7281
7282 /* A constant address in OP0 can have VOIDmode, we must not try
7283 to call force_reg for that case. Avoid that case. */
7284 if (GET_CODE (op0) == MEM
7285 && GET_MODE (op0) == BLKmode
7286 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7287 && bitsize != 0
7288 && (bitpos % bitsize) == 0
7289 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7290 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7291 {
7292 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7293 bitpos = 0;
7294 }
7295
7296 op0 = offset_address (op0, offset_rtx,
7297 highest_pow2_factor (offset));
7298 }
7299
7300 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7301 record its alignment as BIGGEST_ALIGNMENT. */
7302 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7303 && is_aligning_offset (offset, tem))
7304 set_mem_align (op0, BIGGEST_ALIGNMENT);
7305
7306 /* Don't forget about volatility even if this is a bitfield. */
7307 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7308 {
7309 if (op0 == orig_op0)
7310 op0 = copy_rtx (op0);
7311
7312 MEM_VOLATILE_P (op0) = 1;
7313 }
7314
7315 /* The following code doesn't handle CONCAT.
7316 Assume only bitpos == 0 can be used for CONCAT, due to
7317 one element arrays having the same mode as its element. */
7318 if (GET_CODE (op0) == CONCAT)
7319 {
7320 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7321 abort ();
7322 return op0;
7323 }
7324
7325 /* In cases where an aligned union has an unaligned object
7326 as a field, we might be extracting a BLKmode value from
7327 an integer-mode (e.g., SImode) object. Handle this case
7328 by doing the extract into an object as wide as the field
7329 (which we know to be the width of a basic mode), then
7330 storing into memory, and changing the mode to BLKmode. */
7331 if (mode1 == VOIDmode
7332 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7333 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7334 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7335 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7336 && modifier != EXPAND_CONST_ADDRESS
7337 && modifier != EXPAND_INITIALIZER)
7338 /* If the field isn't aligned enough to fetch as a memref,
7339 fetch it as a bit field. */
7340 || (mode1 != BLKmode
7341 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7342 && ((TYPE_ALIGN (TREE_TYPE (tem))
7343 < GET_MODE_ALIGNMENT (mode))
7344 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7345 /* If the type and the field are a constant size and the
7346 size of the type isn't the same size as the bitfield,
7347 we must use bitfield operations. */
7348 || (bitsize >= 0
7349 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7350 == INTEGER_CST)
7351 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7352 bitsize)))
7353 {
7354 enum machine_mode ext_mode = mode;
7355
7356 if (ext_mode == BLKmode
7357 && ! (target != 0 && GET_CODE (op0) == MEM
7358 && GET_CODE (target) == MEM
7359 && bitpos % BITS_PER_UNIT == 0))
7360 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7361
7362 if (ext_mode == BLKmode)
7363 {
7364 /* In this case, BITPOS must start at a byte boundary and
7365 TARGET, if specified, must be a MEM. */
7366 if (GET_CODE (op0) != MEM
7367 || (target != 0 && GET_CODE (target) != MEM)
7368 || bitpos % BITS_PER_UNIT != 0)
7369 abort ();
7370
7371 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7372 if (target == 0)
7373 target = assign_temp (type, 0, 1, 1);
7374
7375 emit_block_move (target, op0,
7376 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7377 / BITS_PER_UNIT),
7378 BLOCK_OP_NORMAL);
7379
7380 return target;
7381 }
7382
7383 op0 = validize_mem (op0);
7384
7385 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7386 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7387
7388 op0 = extract_bit_field (op0, bitsize, bitpos,
7389 unsignedp, target, ext_mode, ext_mode,
7390 int_size_in_bytes (TREE_TYPE (tem)));
7391
7392 /* If the result is a record type and BITSIZE is narrower than
7393 the mode of OP0, an integral mode, and this is a big endian
7394 machine, we must put the field into the high-order bits. */
7395 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7396 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7397 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7398 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7399 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7400 - bitsize),
7401 op0, 1);
7402
7403 if (mode == BLKmode)
7404 {
7405 rtx new = assign_temp (build_qualified_type
7406 ((*lang_hooks.types.type_for_mode)
7407 (ext_mode, 0),
7408 TYPE_QUAL_CONST), 0, 1, 1);
7409
7410 emit_move_insn (new, op0);
7411 op0 = copy_rtx (new);
7412 PUT_MODE (op0, BLKmode);
7413 set_mem_attributes (op0, exp, 1);
7414 }
7415
7416 return op0;
7417 }
7418
7419 /* If the result is BLKmode, use that to access the object
7420 now as well. */
7421 if (mode == BLKmode)
7422 mode1 = BLKmode;
7423
7424 /* Get a reference to just this component. */
7425 if (modifier == EXPAND_CONST_ADDRESS
7426 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7427 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7428 else
7429 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7430
7431 if (op0 == orig_op0)
7432 op0 = copy_rtx (op0);
7433
7434 set_mem_attributes (op0, exp, 0);
7435 if (GET_CODE (XEXP (op0, 0)) == REG)
7436 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7437
7438 MEM_VOLATILE_P (op0) |= volatilep;
7439 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7440 || modifier == EXPAND_CONST_ADDRESS
7441 || modifier == EXPAND_INITIALIZER)
7442 return op0;
7443 else if (target == 0)
7444 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7445
7446 convert_move (target, op0, unsignedp);
7447 return target;
7448 }
7449
7450 case VTABLE_REF:
7451 {
7452 rtx insn, before = get_last_insn (), vtbl_ref;
7453
7454 /* Evaluate the interior expression. */
7455 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7456 tmode, modifier);
7457
7458 /* Get or create an instruction off which to hang a note. */
7459 if (REG_P (subtarget))
7460 {
7461 target = subtarget;
7462 insn = get_last_insn ();
7463 if (insn == before)
7464 abort ();
7465 if (! INSN_P (insn))
7466 insn = prev_nonnote_insn (insn);
7467 }
7468 else
7469 {
7470 target = gen_reg_rtx (GET_MODE (subtarget));
7471 insn = emit_move_insn (target, subtarget);
7472 }
7473
7474 /* Collect the data for the note. */
7475 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7476 vtbl_ref = plus_constant (vtbl_ref,
7477 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7478 /* Discard the initial CONST that was added. */
7479 vtbl_ref = XEXP (vtbl_ref, 0);
7480
7481 REG_NOTES (insn)
7482 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7483
7484 return target;
7485 }
7486
7487 /* Intended for a reference to a buffer of a file-object in Pascal.
7488 But it's not certain that a special tree code will really be
7489 necessary for these. INDIRECT_REF might work for them. */
7490 case BUFFER_REF:
7491 abort ();
7492
7493 case IN_EXPR:
7494 {
7495 /* Pascal set IN expression.
7496
7497 Algorithm:
7498 rlo = set_low - (set_low%bits_per_word);
7499 the_word = set [ (index - rlo)/bits_per_word ];
7500 bit_index = index % bits_per_word;
7501 bitmask = 1 << bit_index;
7502 return !!(the_word & bitmask); */
7503
7504 tree set = TREE_OPERAND (exp, 0);
7505 tree index = TREE_OPERAND (exp, 1);
7506 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7507 tree set_type = TREE_TYPE (set);
7508 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7509 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7510 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7511 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7512 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7513 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7514 rtx setaddr = XEXP (setval, 0);
7515 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7516 rtx rlow;
7517 rtx diff, quo, rem, addr, bit, result;
7518
7519 /* If domain is empty, answer is no. Likewise if index is constant
7520 and out of bounds. */
7521 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7522 && TREE_CODE (set_low_bound) == INTEGER_CST
7523 && tree_int_cst_lt (set_high_bound, set_low_bound))
7524 || (TREE_CODE (index) == INTEGER_CST
7525 && TREE_CODE (set_low_bound) == INTEGER_CST
7526 && tree_int_cst_lt (index, set_low_bound))
7527 || (TREE_CODE (set_high_bound) == INTEGER_CST
7528 && TREE_CODE (index) == INTEGER_CST
7529 && tree_int_cst_lt (set_high_bound, index))))
7530 return const0_rtx;
7531
7532 if (target == 0)
7533 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7534
7535 /* If we get here, we have to generate the code for both cases
7536 (in range and out of range). */
7537
7538 op0 = gen_label_rtx ();
7539 op1 = gen_label_rtx ();
7540
7541 if (! (GET_CODE (index_val) == CONST_INT
7542 && GET_CODE (lo_r) == CONST_INT))
7543 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7544 GET_MODE (index_val), iunsignedp, op1);
7545
7546 if (! (GET_CODE (index_val) == CONST_INT
7547 && GET_CODE (hi_r) == CONST_INT))
7548 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7549 GET_MODE (index_val), iunsignedp, op1);
7550
7551 /* Calculate the element number of bit zero in the first word
7552 of the set. */
7553 if (GET_CODE (lo_r) == CONST_INT)
7554 rlow = GEN_INT (INTVAL (lo_r)
7555 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7556 else
7557 rlow = expand_binop (index_mode, and_optab, lo_r,
7558 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7559 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7560
7561 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7562 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7563
7564 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7565 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7566 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7567 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7568
7569 addr = memory_address (byte_mode,
7570 expand_binop (index_mode, add_optab, diff,
7571 setaddr, NULL_RTX, iunsignedp,
7572 OPTAB_LIB_WIDEN));
7573
7574 /* Extract the bit we want to examine. */
7575 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7576 gen_rtx_MEM (byte_mode, addr),
7577 make_tree (TREE_TYPE (index), rem),
7578 NULL_RTX, 1);
7579 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7580 GET_MODE (target) == byte_mode ? target : 0,
7581 1, OPTAB_LIB_WIDEN);
7582
7583 if (result != target)
7584 convert_move (target, result, 1);
7585
7586 /* Output the code to handle the out-of-range case. */
7587 emit_jump (op0);
7588 emit_label (op1);
7589 emit_move_insn (target, const0_rtx);
7590 emit_label (op0);
7591 return target;
7592 }
7593
7594 case WITH_CLEANUP_EXPR:
7595 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7596 {
7597 WITH_CLEANUP_EXPR_RTL (exp)
7598 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7599 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7600 CLEANUP_EH_ONLY (exp));
7601
7602 /* That's it for this cleanup. */
7603 TREE_OPERAND (exp, 1) = 0;
7604 }
7605 return WITH_CLEANUP_EXPR_RTL (exp);
7606
7607 case CLEANUP_POINT_EXPR:
7608 {
7609 /* Start a new binding layer that will keep track of all cleanup
7610 actions to be performed. */
7611 expand_start_bindings (2);
7612
7613 target_temp_slot_level = temp_slot_level;
7614
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7616 /* If we're going to use this value, load it up now. */
7617 if (! ignore)
7618 op0 = force_not_mem (op0);
7619 preserve_temp_slots (op0);
7620 expand_end_bindings (NULL_TREE, 0, 0);
7621 }
7622 return op0;
7623
7624 case CALL_EXPR:
7625 /* Check for a built-in function. */
7626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7627 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7628 == FUNCTION_DECL)
7629 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7630 {
7631 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7632 == BUILT_IN_FRONTEND)
7633 return (*lang_hooks.expand_expr)
7634 (exp, original_target, tmode, modifier);
7635 else
7636 return expand_builtin (exp, target, subtarget, tmode, ignore);
7637 }
7638
7639 return expand_call (exp, target, ignore);
7640
7641 case NON_LVALUE_EXPR:
7642 case NOP_EXPR:
7643 case CONVERT_EXPR:
7644 case REFERENCE_EXPR:
7645 if (TREE_OPERAND (exp, 0) == error_mark_node)
7646 return const0_rtx;
7647
7648 if (TREE_CODE (type) == UNION_TYPE)
7649 {
7650 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7651
7652 /* If both input and output are BLKmode, this conversion isn't doing
7653 anything except possibly changing memory attribute. */
7654 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7655 {
7656 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7657 modifier);
7658
7659 result = copy_rtx (result);
7660 set_mem_attributes (result, exp, 0);
7661 return result;
7662 }
7663
7664 if (target == 0)
7665 target = assign_temp (type, 0, 1, 1);
7666
7667 if (GET_CODE (target) == MEM)
7668 /* Store data into beginning of memory target. */
7669 store_expr (TREE_OPERAND (exp, 0),
7670 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7671
7672 else if (GET_CODE (target) == REG)
7673 /* Store this field into a union of the proper type. */
7674 store_field (target,
7675 MIN ((int_size_in_bytes (TREE_TYPE
7676 (TREE_OPERAND (exp, 0)))
7677 * BITS_PER_UNIT),
7678 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7679 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7680 VOIDmode, 0, type, 0);
7681 else
7682 abort ();
7683
7684 /* Return the entire union. */
7685 return target;
7686 }
7687
7688 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7689 {
7690 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7691 modifier);
7692
7693 /* If the signedness of the conversion differs and OP0 is
7694 a promoted SUBREG, clear that indication since we now
7695 have to do the proper extension. */
7696 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7697 && GET_CODE (op0) == SUBREG)
7698 SUBREG_PROMOTED_VAR_P (op0) = 0;
7699
7700 return op0;
7701 }
7702
7703 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7704 if (GET_MODE (op0) == mode)
7705 return op0;
7706
7707 /* If OP0 is a constant, just convert it into the proper mode. */
7708 if (CONSTANT_P (op0))
7709 {
7710 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7711 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7712
7713 if (modifier == EXPAND_INITIALIZER)
7714 return simplify_gen_subreg (mode, op0, inner_mode,
7715 subreg_lowpart_offset (mode,
7716 inner_mode));
7717 else
7718 return convert_modes (mode, inner_mode, op0,
7719 TREE_UNSIGNED (inner_type));
7720 }
7721
7722 if (modifier == EXPAND_INITIALIZER)
7723 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7724
7725 if (target == 0)
7726 return
7727 convert_to_mode (mode, op0,
7728 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7729 else
7730 convert_move (target, op0,
7731 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7732 return target;
7733
7734 case VIEW_CONVERT_EXPR:
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7736
7737 /* If the input and output modes are both the same, we are done.
7738 Otherwise, if neither mode is BLKmode and both are within a word, we
7739 can use gen_lowpart. If neither is true, make sure the operand is
7740 in memory and convert the MEM to the new mode. */
7741 if (TYPE_MODE (type) == GET_MODE (op0))
7742 ;
7743 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7744 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7745 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7746 op0 = gen_lowpart (TYPE_MODE (type), op0);
7747 else if (GET_CODE (op0) != MEM)
7748 {
7749 /* If the operand is not a MEM, force it into memory. Since we
7750 are going to be be changing the mode of the MEM, don't call
7751 force_const_mem for constants because we don't allow pool
7752 constants to change mode. */
7753 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7754
7755 if (TREE_ADDRESSABLE (exp))
7756 abort ();
7757
7758 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7759 target
7760 = assign_stack_temp_for_type
7761 (TYPE_MODE (inner_type),
7762 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7763
7764 emit_move_insn (target, op0);
7765 op0 = target;
7766 }
7767
7768 /* At this point, OP0 is in the correct mode. If the output type is such
7769 that the operand is known to be aligned, indicate that it is.
7770 Otherwise, we need only be concerned about alignment for non-BLKmode
7771 results. */
7772 if (GET_CODE (op0) == MEM)
7773 {
7774 op0 = copy_rtx (op0);
7775
7776 if (TYPE_ALIGN_OK (type))
7777 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7778 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7779 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7780 {
7781 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7782 HOST_WIDE_INT temp_size
7783 = MAX (int_size_in_bytes (inner_type),
7784 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7785 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7786 temp_size, 0, type);
7787 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7788
7789 if (TREE_ADDRESSABLE (exp))
7790 abort ();
7791
7792 if (GET_MODE (op0) == BLKmode)
7793 emit_block_move (new_with_op0_mode, op0,
7794 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7795 BLOCK_OP_NORMAL);
7796 else
7797 emit_move_insn (new_with_op0_mode, op0);
7798
7799 op0 = new;
7800 }
7801
7802 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7803 }
7804
7805 return op0;
7806
7807 case PLUS_EXPR:
7808 this_optab = ! unsignedp && flag_trapv
7809 && (GET_MODE_CLASS (mode) == MODE_INT)
7810 ? addv_optab : add_optab;
7811
7812 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7813 something else, make sure we add the register to the constant and
7814 then to the other thing. This case can occur during strength
7815 reduction and doing it this way will produce better code if the
7816 frame pointer or argument pointer is eliminated.
7817
7818 fold-const.c will ensure that the constant is always in the inner
7819 PLUS_EXPR, so the only case we need to do anything about is if
7820 sp, ap, or fp is our second argument, in which case we must swap
7821 the innermost first argument and our second argument. */
7822
7823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7824 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7825 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7826 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7827 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7828 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7829 {
7830 tree t = TREE_OPERAND (exp, 1);
7831
7832 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7833 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7834 }
7835
7836 /* If the result is to be ptr_mode and we are adding an integer to
7837 something, we might be forming a constant. So try to use
7838 plus_constant. If it produces a sum and we can't accept it,
7839 use force_operand. This allows P = &ARR[const] to generate
7840 efficient code on machines where a SYMBOL_REF is not a valid
7841 address.
7842
7843 If this is an EXPAND_SUM call, always return the sum. */
7844 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7845 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7846 {
7847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7848 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7849 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7850 {
7851 rtx constant_part;
7852
7853 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7854 EXPAND_SUM);
7855 /* Use immed_double_const to ensure that the constant is
7856 truncated according to the mode of OP1, then sign extended
7857 to a HOST_WIDE_INT. Using the constant directly can result
7858 in non-canonical RTL in a 64x32 cross compile. */
7859 constant_part
7860 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7861 (HOST_WIDE_INT) 0,
7862 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7863 op1 = plus_constant (op1, INTVAL (constant_part));
7864 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7865 op1 = force_operand (op1, target);
7866 return op1;
7867 }
7868
7869 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7871 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7872 {
7873 rtx constant_part;
7874
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7876 (modifier == EXPAND_INITIALIZER
7877 ? EXPAND_INITIALIZER : EXPAND_SUM));
7878 if (! CONSTANT_P (op0))
7879 {
7880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7881 VOIDmode, modifier);
7882 /* Don't go to both_summands if modifier
7883 says it's not right to return a PLUS. */
7884 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7885 goto binop2;
7886 goto both_summands;
7887 }
7888 /* Use immed_double_const to ensure that the constant is
7889 truncated according to the mode of OP1, then sign extended
7890 to a HOST_WIDE_INT. Using the constant directly can result
7891 in non-canonical RTL in a 64x32 cross compile. */
7892 constant_part
7893 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7894 (HOST_WIDE_INT) 0,
7895 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7896 op0 = plus_constant (op0, INTVAL (constant_part));
7897 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7898 op0 = force_operand (op0, target);
7899 return op0;
7900 }
7901 }
7902
7903 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7904 subtarget = 0;
7905
7906 /* No sense saving up arithmetic to be done
7907 if it's all in the wrong mode to form part of an address.
7908 And force_operand won't know whether to sign-extend or
7909 zero-extend. */
7910 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7911 || mode != ptr_mode)
7912 {
7913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7915 if (op0 == const0_rtx)
7916 return op1;
7917 if (op1 == const0_rtx)
7918 return op0;
7919 goto binop2;
7920 }
7921
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7924
7925 /* We come here from MINUS_EXPR when the second operand is a
7926 constant. */
7927 both_summands:
7928 /* Make sure any term that's a sum with a constant comes last. */
7929 if (GET_CODE (op0) == PLUS
7930 && CONSTANT_P (XEXP (op0, 1)))
7931 {
7932 temp = op0;
7933 op0 = op1;
7934 op1 = temp;
7935 }
7936 /* If adding to a sum including a constant,
7937 associate it to put the constant outside. */
7938 if (GET_CODE (op1) == PLUS
7939 && CONSTANT_P (XEXP (op1, 1)))
7940 {
7941 rtx constant_term = const0_rtx;
7942
7943 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7944 if (temp != 0)
7945 op0 = temp;
7946 /* Ensure that MULT comes first if there is one. */
7947 else if (GET_CODE (op0) == MULT)
7948 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7949 else
7950 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7951
7952 /* Let's also eliminate constants from op0 if possible. */
7953 op0 = eliminate_constant_term (op0, &constant_term);
7954
7955 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7956 their sum should be a constant. Form it into OP1, since the
7957 result we want will then be OP0 + OP1. */
7958
7959 temp = simplify_binary_operation (PLUS, mode, constant_term,
7960 XEXP (op1, 1));
7961 if (temp != 0)
7962 op1 = temp;
7963 else
7964 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7965 }
7966
7967 /* Put a constant term last and put a multiplication first. */
7968 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7969 temp = op1, op1 = op0, op0 = temp;
7970
7971 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7972 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7973
7974 case MINUS_EXPR:
7975 /* For initializers, we are allowed to return a MINUS of two
7976 symbolic constants. Here we handle all cases when both operands
7977 are constant. */
7978 /* Handle difference of two symbolic constants,
7979 for the sake of an initializer. */
7980 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7981 && really_constant_p (TREE_OPERAND (exp, 0))
7982 && really_constant_p (TREE_OPERAND (exp, 1)))
7983 {
7984 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7985 modifier);
7986 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7987 modifier);
7988
7989 /* If the last operand is a CONST_INT, use plus_constant of
7990 the negated constant. Else make the MINUS. */
7991 if (GET_CODE (op1) == CONST_INT)
7992 return plus_constant (op0, - INTVAL (op1));
7993 else
7994 return gen_rtx_MINUS (mode, op0, op1);
7995 }
7996
7997 this_optab = ! unsignedp && flag_trapv
7998 && (GET_MODE_CLASS(mode) == MODE_INT)
7999 ? subv_optab : sub_optab;
8000
8001 /* No sense saving up arithmetic to be done
8002 if it's all in the wrong mode to form part of an address.
8003 And force_operand won't know whether to sign-extend or
8004 zero-extend. */
8005 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8006 || mode != ptr_mode)
8007 goto binop;
8008
8009 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8010 subtarget = 0;
8011
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8013 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8014
8015 /* Convert A - const to A + (-const). */
8016 if (GET_CODE (op1) == CONST_INT)
8017 {
8018 op1 = negate_rtx (mode, op1);
8019 goto both_summands;
8020 }
8021
8022 goto binop2;
8023
8024 case MULT_EXPR:
8025 /* If first operand is constant, swap them.
8026 Thus the following special case checks need only
8027 check the second operand. */
8028 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8029 {
8030 tree t1 = TREE_OPERAND (exp, 0);
8031 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8032 TREE_OPERAND (exp, 1) = t1;
8033 }
8034
8035 /* Attempt to return something suitable for generating an
8036 indexed address, for machines that support that. */
8037
8038 if (modifier == EXPAND_SUM && mode == ptr_mode
8039 && host_integerp (TREE_OPERAND (exp, 1), 0))
8040 {
8041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8042 EXPAND_SUM);
8043
8044 /* If we knew for certain that this is arithmetic for an array
8045 reference, and we knew the bounds of the array, then we could
8046 apply the distributive law across (PLUS X C) for constant C.
8047 Without such knowledge, we risk overflowing the computation
8048 when both X and C are large, but X+C isn't. */
8049 /* ??? Could perhaps special-case EXP being unsigned and C being
8050 positive. In that case we are certain that X+C is no smaller
8051 than X and so the transformed expression will overflow iff the
8052 original would have. */
8053
8054 if (GET_CODE (op0) != REG)
8055 op0 = force_operand (op0, NULL_RTX);
8056 if (GET_CODE (op0) != REG)
8057 op0 = copy_to_mode_reg (mode, op0);
8058
8059 return
8060 gen_rtx_MULT (mode, op0,
8061 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8062 }
8063
8064 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8065 subtarget = 0;
8066
8067 /* Check for multiplying things that have been extended
8068 from a narrower type. If this machine supports multiplying
8069 in that narrower type with a result in the desired type,
8070 do it that way, and avoid the explicit type-conversion. */
8071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8072 && TREE_CODE (type) == INTEGER_TYPE
8073 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8074 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8075 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8076 && int_fits_type_p (TREE_OPERAND (exp, 1),
8077 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8078 /* Don't use a widening multiply if a shift will do. */
8079 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8080 > HOST_BITS_PER_WIDE_INT)
8081 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8082 ||
8083 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8084 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8085 ==
8086 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8087 /* If both operands are extended, they must either both
8088 be zero-extended or both be sign-extended. */
8089 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8090 ==
8091 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8092 {
8093 enum machine_mode innermode
8094 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8095 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8096 ? smul_widen_optab : umul_widen_optab);
8097 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8098 ? umul_widen_optab : smul_widen_optab);
8099 if (mode == GET_MODE_WIDER_MODE (innermode))
8100 {
8101 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8102 {
8103 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8104 NULL_RTX, VOIDmode, 0);
8105 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8106 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8107 VOIDmode, 0);
8108 else
8109 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8110 NULL_RTX, VOIDmode, 0);
8111 goto binop2;
8112 }
8113 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8114 && innermode == word_mode)
8115 {
8116 rtx htem;
8117 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8118 NULL_RTX, VOIDmode, 0);
8119 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8120 op1 = convert_modes (innermode, mode,
8121 expand_expr (TREE_OPERAND (exp, 1),
8122 NULL_RTX, VOIDmode, 0),
8123 unsignedp);
8124 else
8125 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8126 NULL_RTX, VOIDmode, 0);
8127 temp = expand_binop (mode, other_optab, op0, op1, target,
8128 unsignedp, OPTAB_LIB_WIDEN);
8129 htem = expand_mult_highpart_adjust (innermode,
8130 gen_highpart (innermode, temp),
8131 op0, op1,
8132 gen_highpart (innermode, temp),
8133 unsignedp);
8134 emit_move_insn (gen_highpart (innermode, temp), htem);
8135 return temp;
8136 }
8137 }
8138 }
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8141 return expand_mult (mode, op0, op1, target, unsignedp);
8142
8143 case TRUNC_DIV_EXPR:
8144 case FLOOR_DIV_EXPR:
8145 case CEIL_DIV_EXPR:
8146 case ROUND_DIV_EXPR:
8147 case EXACT_DIV_EXPR:
8148 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8149 subtarget = 0;
8150 /* Possible optimization: compute the dividend with EXPAND_SUM
8151 then if the divisor is constant can optimize the case
8152 where some terms of the dividend have coeffs divisible by it. */
8153 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8154 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8155 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8156
8157 case RDIV_EXPR:
8158 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8159 expensive divide. If not, combine will rebuild the original
8160 computation. */
8161 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8162 && TREE_CODE (type) == REAL_TYPE
8163 && !real_onep (TREE_OPERAND (exp, 0)))
8164 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8165 build (RDIV_EXPR, type,
8166 build_real (type, dconst1),
8167 TREE_OPERAND (exp, 1))),
8168 target, tmode, unsignedp);
8169 this_optab = sdiv_optab;
8170 goto binop;
8171
8172 case TRUNC_MOD_EXPR:
8173 case FLOOR_MOD_EXPR:
8174 case CEIL_MOD_EXPR:
8175 case ROUND_MOD_EXPR:
8176 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8177 subtarget = 0;
8178 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8179 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8180 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8181
8182 case FIX_ROUND_EXPR:
8183 case FIX_FLOOR_EXPR:
8184 case FIX_CEIL_EXPR:
8185 abort (); /* Not used for C. */
8186
8187 case FIX_TRUNC_EXPR:
8188 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8189 if (target == 0)
8190 target = gen_reg_rtx (mode);
8191 expand_fix (target, op0, unsignedp);
8192 return target;
8193
8194 case FLOAT_EXPR:
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8196 if (target == 0)
8197 target = gen_reg_rtx (mode);
8198 /* expand_float can't figure out what to do if FROM has VOIDmode.
8199 So give it the correct mode. With -O, cse will optimize this. */
8200 if (GET_MODE (op0) == VOIDmode)
8201 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8202 op0);
8203 expand_float (target, op0,
8204 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8205 return target;
8206
8207 case NEGATE_EXPR:
8208 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8209 temp = expand_unop (mode,
8210 ! unsignedp && flag_trapv
8211 && (GET_MODE_CLASS(mode) == MODE_INT)
8212 ? negv_optab : neg_optab, op0, target, 0);
8213 if (temp == 0)
8214 abort ();
8215 return temp;
8216
8217 case ABS_EXPR:
8218 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8219
8220 /* Handle complex values specially. */
8221 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8222 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8223 return expand_complex_abs (mode, op0, target, unsignedp);
8224
8225 /* Unsigned abs is simply the operand. Testing here means we don't
8226 risk generating incorrect code below. */
8227 if (TREE_UNSIGNED (type))
8228 return op0;
8229
8230 return expand_abs (mode, op0, target, unsignedp,
8231 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8232
8233 case MAX_EXPR:
8234 case MIN_EXPR:
8235 target = original_target;
8236 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8237 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8238 || GET_MODE (target) != mode
8239 || (GET_CODE (target) == REG
8240 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8241 target = gen_reg_rtx (mode);
8242 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8244
8245 /* First try to do it with a special MIN or MAX instruction.
8246 If that does not win, use a conditional jump to select the proper
8247 value. */
8248 this_optab = (TREE_UNSIGNED (type)
8249 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8250 : (code == MIN_EXPR ? smin_optab : smax_optab));
8251
8252 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8253 OPTAB_WIDEN);
8254 if (temp != 0)
8255 return temp;
8256
8257 /* At this point, a MEM target is no longer useful; we will get better
8258 code without it. */
8259
8260 if (GET_CODE (target) == MEM)
8261 target = gen_reg_rtx (mode);
8262
8263 if (target != op0)
8264 emit_move_insn (target, op0);
8265
8266 op0 = gen_label_rtx ();
8267
8268 /* If this mode is an integer too wide to compare properly,
8269 compare word by word. Rely on cse to optimize constant cases. */
8270 if (GET_MODE_CLASS (mode) == MODE_INT
8271 && ! can_compare_p (GE, mode, ccp_jump))
8272 {
8273 if (code == MAX_EXPR)
8274 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8275 target, op1, NULL_RTX, op0);
8276 else
8277 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8278 op1, target, NULL_RTX, op0);
8279 }
8280 else
8281 {
8282 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8283 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8284 unsignedp, mode, NULL_RTX, NULL_RTX,
8285 op0);
8286 }
8287 emit_move_insn (target, op1);
8288 emit_label (op0);
8289 return target;
8290
8291 case BIT_NOT_EXPR:
8292 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8293 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8294 if (temp == 0)
8295 abort ();
8296 return temp;
8297
8298 case FFS_EXPR:
8299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8300 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8301 if (temp == 0)
8302 abort ();
8303 return temp;
8304
8305 /* ??? Can optimize bitwise operations with one arg constant.
8306 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8307 and (a bitwise1 b) bitwise2 b (etc)
8308 but that is probably not worth while. */
8309
8310 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8311 boolean values when we want in all cases to compute both of them. In
8312 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8313 as actual zero-or-1 values and then bitwise anding. In cases where
8314 there cannot be any side effects, better code would be made by
8315 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8316 how to recognize those cases. */
8317
8318 case TRUTH_AND_EXPR:
8319 case BIT_AND_EXPR:
8320 this_optab = and_optab;
8321 goto binop;
8322
8323 case TRUTH_OR_EXPR:
8324 case BIT_IOR_EXPR:
8325 this_optab = ior_optab;
8326 goto binop;
8327
8328 case TRUTH_XOR_EXPR:
8329 case BIT_XOR_EXPR:
8330 this_optab = xor_optab;
8331 goto binop;
8332
8333 case LSHIFT_EXPR:
8334 case RSHIFT_EXPR:
8335 case LROTATE_EXPR:
8336 case RROTATE_EXPR:
8337 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8338 subtarget = 0;
8339 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8340 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8341 unsignedp);
8342
8343 /* Could determine the answer when only additive constants differ. Also,
8344 the addition of one can be handled by changing the condition. */
8345 case LT_EXPR:
8346 case LE_EXPR:
8347 case GT_EXPR:
8348 case GE_EXPR:
8349 case EQ_EXPR:
8350 case NE_EXPR:
8351 case UNORDERED_EXPR:
8352 case ORDERED_EXPR:
8353 case UNLT_EXPR:
8354 case UNLE_EXPR:
8355 case UNGT_EXPR:
8356 case UNGE_EXPR:
8357 case UNEQ_EXPR:
8358 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8359 if (temp != 0)
8360 return temp;
8361
8362 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8363 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8364 && original_target
8365 && GET_CODE (original_target) == REG
8366 && (GET_MODE (original_target)
8367 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8368 {
8369 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8370 VOIDmode, 0);
8371
8372 /* If temp is constant, we can just compute the result. */
8373 if (GET_CODE (temp) == CONST_INT)
8374 {
8375 if (INTVAL (temp) != 0)
8376 emit_move_insn (target, const1_rtx);
8377 else
8378 emit_move_insn (target, const0_rtx);
8379
8380 return target;
8381 }
8382
8383 if (temp != original_target)
8384 {
8385 enum machine_mode mode1 = GET_MODE (temp);
8386 if (mode1 == VOIDmode)
8387 mode1 = tmode != VOIDmode ? tmode : mode;
8388
8389 temp = copy_to_mode_reg (mode1, temp);
8390 }
8391
8392 op1 = gen_label_rtx ();
8393 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8394 GET_MODE (temp), unsignedp, op1);
8395 emit_move_insn (temp, const1_rtx);
8396 emit_label (op1);
8397 return temp;
8398 }
8399
8400 /* If no set-flag instruction, must generate a conditional
8401 store into a temporary variable. Drop through
8402 and handle this like && and ||. */
8403
8404 case TRUTH_ANDIF_EXPR:
8405 case TRUTH_ORIF_EXPR:
8406 if (! ignore
8407 && (target == 0 || ! safe_from_p (target, exp, 1)
8408 /* Make sure we don't have a hard reg (such as function's return
8409 value) live across basic blocks, if not optimizing. */
8410 || (!optimize && GET_CODE (target) == REG
8411 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8412 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8413
8414 if (target)
8415 emit_clr_insn (target);
8416
8417 op1 = gen_label_rtx ();
8418 jumpifnot (exp, op1);
8419
8420 if (target)
8421 emit_0_to_1_insn (target);
8422
8423 emit_label (op1);
8424 return ignore ? const0_rtx : target;
8425
8426 case TRUTH_NOT_EXPR:
8427 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8428 /* The parser is careful to generate TRUTH_NOT_EXPR
8429 only with operands that are always zero or one. */
8430 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8431 target, 1, OPTAB_LIB_WIDEN);
8432 if (temp == 0)
8433 abort ();
8434 return temp;
8435
8436 case COMPOUND_EXPR:
8437 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8438 emit_queue ();
8439 return expand_expr (TREE_OPERAND (exp, 1),
8440 (ignore ? const0_rtx : target),
8441 VOIDmode, 0);
8442
8443 case COND_EXPR:
8444 /* If we would have a "singleton" (see below) were it not for a
8445 conversion in each arm, bring that conversion back out. */
8446 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8447 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8448 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8449 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8450 {
8451 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8452 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8453
8454 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8455 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8456 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8457 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8458 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8459 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8460 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8461 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8462 return expand_expr (build1 (NOP_EXPR, type,
8463 build (COND_EXPR, TREE_TYPE (iftrue),
8464 TREE_OPERAND (exp, 0),
8465 iftrue, iffalse)),
8466 target, tmode, modifier);
8467 }
8468
8469 {
8470 /* Note that COND_EXPRs whose type is a structure or union
8471 are required to be constructed to contain assignments of
8472 a temporary variable, so that we can evaluate them here
8473 for side effect only. If type is void, we must do likewise. */
8474
8475 /* If an arm of the branch requires a cleanup,
8476 only that cleanup is performed. */
8477
8478 tree singleton = 0;
8479 tree binary_op = 0, unary_op = 0;
8480
8481 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8482 convert it to our mode, if necessary. */
8483 if (integer_onep (TREE_OPERAND (exp, 1))
8484 && integer_zerop (TREE_OPERAND (exp, 2))
8485 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8486 {
8487 if (ignore)
8488 {
8489 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8490 modifier);
8491 return const0_rtx;
8492 }
8493
8494 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8495 if (GET_MODE (op0) == mode)
8496 return op0;
8497
8498 if (target == 0)
8499 target = gen_reg_rtx (mode);
8500 convert_move (target, op0, unsignedp);
8501 return target;
8502 }
8503
8504 /* Check for X ? A + B : A. If we have this, we can copy A to the
8505 output and conditionally add B. Similarly for unary operations.
8506 Don't do this if X has side-effects because those side effects
8507 might affect A or B and the "?" operation is a sequence point in
8508 ANSI. (operand_equal_p tests for side effects.) */
8509
8510 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8511 && operand_equal_p (TREE_OPERAND (exp, 2),
8512 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8513 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8514 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8515 && operand_equal_p (TREE_OPERAND (exp, 1),
8516 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8517 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8518 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8519 && operand_equal_p (TREE_OPERAND (exp, 2),
8520 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8521 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8522 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8523 && operand_equal_p (TREE_OPERAND (exp, 1),
8524 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8525 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8526
8527 /* If we are not to produce a result, we have no target. Otherwise,
8528 if a target was specified use it; it will not be used as an
8529 intermediate target unless it is safe. If no target, use a
8530 temporary. */
8531
8532 if (ignore)
8533 temp = 0;
8534 else if (original_target
8535 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8536 || (singleton && GET_CODE (original_target) == REG
8537 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8538 && original_target == var_rtx (singleton)))
8539 && GET_MODE (original_target) == mode
8540 #ifdef HAVE_conditional_move
8541 && (! can_conditionally_move_p (mode)
8542 || GET_CODE (original_target) == REG
8543 || TREE_ADDRESSABLE (type))
8544 #endif
8545 && (GET_CODE (original_target) != MEM
8546 || TREE_ADDRESSABLE (type)))
8547 temp = original_target;
8548 else if (TREE_ADDRESSABLE (type))
8549 abort ();
8550 else
8551 temp = assign_temp (type, 0, 0, 1);
8552
8553 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8554 do the test of X as a store-flag operation, do this as
8555 A + ((X != 0) << log C). Similarly for other simple binary
8556 operators. Only do for C == 1 if BRANCH_COST is low. */
8557 if (temp && singleton && binary_op
8558 && (TREE_CODE (binary_op) == PLUS_EXPR
8559 || TREE_CODE (binary_op) == MINUS_EXPR
8560 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8561 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8562 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8563 : integer_onep (TREE_OPERAND (binary_op, 1)))
8564 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8565 {
8566 rtx result;
8567 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8568 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8569 ? addv_optab : add_optab)
8570 : TREE_CODE (binary_op) == MINUS_EXPR
8571 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8572 ? subv_optab : sub_optab)
8573 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8574 : xor_optab);
8575
8576 /* If we had X ? A : A + 1, do this as A + (X == 0).
8577
8578 We have to invert the truth value here and then put it
8579 back later if do_store_flag fails. We cannot simply copy
8580 TREE_OPERAND (exp, 0) to another variable and modify that
8581 because invert_truthvalue can modify the tree pointed to
8582 by its argument. */
8583 if (singleton == TREE_OPERAND (exp, 1))
8584 TREE_OPERAND (exp, 0)
8585 = invert_truthvalue (TREE_OPERAND (exp, 0));
8586
8587 result = do_store_flag (TREE_OPERAND (exp, 0),
8588 (safe_from_p (temp, singleton, 1)
8589 ? temp : NULL_RTX),
8590 mode, BRANCH_COST <= 1);
8591
8592 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8593 result = expand_shift (LSHIFT_EXPR, mode, result,
8594 build_int_2 (tree_log2
8595 (TREE_OPERAND
8596 (binary_op, 1)),
8597 0),
8598 (safe_from_p (temp, singleton, 1)
8599 ? temp : NULL_RTX), 0);
8600
8601 if (result)
8602 {
8603 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8604 return expand_binop (mode, boptab, op1, result, temp,
8605 unsignedp, OPTAB_LIB_WIDEN);
8606 }
8607 else if (singleton == TREE_OPERAND (exp, 1))
8608 TREE_OPERAND (exp, 0)
8609 = invert_truthvalue (TREE_OPERAND (exp, 0));
8610 }
8611
8612 do_pending_stack_adjust ();
8613 NO_DEFER_POP;
8614 op0 = gen_label_rtx ();
8615
8616 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8617 {
8618 if (temp != 0)
8619 {
8620 /* If the target conflicts with the other operand of the
8621 binary op, we can't use it. Also, we can't use the target
8622 if it is a hard register, because evaluating the condition
8623 might clobber it. */
8624 if ((binary_op
8625 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8626 || (GET_CODE (temp) == REG
8627 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8628 temp = gen_reg_rtx (mode);
8629 store_expr (singleton, temp, 0);
8630 }
8631 else
8632 expand_expr (singleton,
8633 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8634 if (singleton == TREE_OPERAND (exp, 1))
8635 jumpif (TREE_OPERAND (exp, 0), op0);
8636 else
8637 jumpifnot (TREE_OPERAND (exp, 0), op0);
8638
8639 start_cleanup_deferral ();
8640 if (binary_op && temp == 0)
8641 /* Just touch the other operand. */
8642 expand_expr (TREE_OPERAND (binary_op, 1),
8643 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8644 else if (binary_op)
8645 store_expr (build (TREE_CODE (binary_op), type,
8646 make_tree (type, temp),
8647 TREE_OPERAND (binary_op, 1)),
8648 temp, 0);
8649 else
8650 store_expr (build1 (TREE_CODE (unary_op), type,
8651 make_tree (type, temp)),
8652 temp, 0);
8653 op1 = op0;
8654 }
8655 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8656 comparison operator. If we have one of these cases, set the
8657 output to A, branch on A (cse will merge these two references),
8658 then set the output to FOO. */
8659 else if (temp
8660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8661 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8663 TREE_OPERAND (exp, 1), 0)
8664 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8665 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8666 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8667 {
8668 if (GET_CODE (temp) == REG
8669 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8670 temp = gen_reg_rtx (mode);
8671 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8672 jumpif (TREE_OPERAND (exp, 0), op0);
8673
8674 start_cleanup_deferral ();
8675 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8676 op1 = op0;
8677 }
8678 else if (temp
8679 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8680 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8681 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8682 TREE_OPERAND (exp, 2), 0)
8683 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8684 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8685 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8686 {
8687 if (GET_CODE (temp) == REG
8688 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8689 temp = gen_reg_rtx (mode);
8690 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8691 jumpifnot (TREE_OPERAND (exp, 0), op0);
8692
8693 start_cleanup_deferral ();
8694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8695 op1 = op0;
8696 }
8697 else
8698 {
8699 op1 = gen_label_rtx ();
8700 jumpifnot (TREE_OPERAND (exp, 0), op0);
8701
8702 start_cleanup_deferral ();
8703
8704 /* One branch of the cond can be void, if it never returns. For
8705 example A ? throw : E */
8706 if (temp != 0
8707 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8708 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8709 else
8710 expand_expr (TREE_OPERAND (exp, 1),
8711 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8712 end_cleanup_deferral ();
8713 emit_queue ();
8714 emit_jump_insn (gen_jump (op1));
8715 emit_barrier ();
8716 emit_label (op0);
8717 start_cleanup_deferral ();
8718 if (temp != 0
8719 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8720 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8721 else
8722 expand_expr (TREE_OPERAND (exp, 2),
8723 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8724 }
8725
8726 end_cleanup_deferral ();
8727
8728 emit_queue ();
8729 emit_label (op1);
8730 OK_DEFER_POP;
8731
8732 return temp;
8733 }
8734
8735 case TARGET_EXPR:
8736 {
8737 /* Something needs to be initialized, but we didn't know
8738 where that thing was when building the tree. For example,
8739 it could be the return value of a function, or a parameter
8740 to a function which lays down in the stack, or a temporary
8741 variable which must be passed by reference.
8742
8743 We guarantee that the expression will either be constructed
8744 or copied into our original target. */
8745
8746 tree slot = TREE_OPERAND (exp, 0);
8747 tree cleanups = NULL_TREE;
8748 tree exp1;
8749
8750 if (TREE_CODE (slot) != VAR_DECL)
8751 abort ();
8752
8753 if (! ignore)
8754 target = original_target;
8755
8756 /* Set this here so that if we get a target that refers to a
8757 register variable that's already been used, put_reg_into_stack
8758 knows that it should fix up those uses. */
8759 TREE_USED (slot) = 1;
8760
8761 if (target == 0)
8762 {
8763 if (DECL_RTL_SET_P (slot))
8764 {
8765 target = DECL_RTL (slot);
8766 /* If we have already expanded the slot, so don't do
8767 it again. (mrs) */
8768 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8769 return target;
8770 }
8771 else
8772 {
8773 target = assign_temp (type, 2, 0, 1);
8774 /* All temp slots at this level must not conflict. */
8775 preserve_temp_slots (target);
8776 SET_DECL_RTL (slot, target);
8777 if (TREE_ADDRESSABLE (slot))
8778 put_var_into_stack (slot);
8779
8780 /* Since SLOT is not known to the called function
8781 to belong to its stack frame, we must build an explicit
8782 cleanup. This case occurs when we must build up a reference
8783 to pass the reference as an argument. In this case,
8784 it is very likely that such a reference need not be
8785 built here. */
8786
8787 if (TREE_OPERAND (exp, 2) == 0)
8788 TREE_OPERAND (exp, 2)
8789 = (*lang_hooks.maybe_build_cleanup) (slot);
8790 cleanups = TREE_OPERAND (exp, 2);
8791 }
8792 }
8793 else
8794 {
8795 /* This case does occur, when expanding a parameter which
8796 needs to be constructed on the stack. The target
8797 is the actual stack address that we want to initialize.
8798 The function we call will perform the cleanup in this case. */
8799
8800 /* If we have already assigned it space, use that space,
8801 not target that we were passed in, as our target
8802 parameter is only a hint. */
8803 if (DECL_RTL_SET_P (slot))
8804 {
8805 target = DECL_RTL (slot);
8806 /* If we have already expanded the slot, so don't do
8807 it again. (mrs) */
8808 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8809 return target;
8810 }
8811 else
8812 {
8813 SET_DECL_RTL (slot, target);
8814 /* If we must have an addressable slot, then make sure that
8815 the RTL that we just stored in slot is OK. */
8816 if (TREE_ADDRESSABLE (slot))
8817 put_var_into_stack (slot);
8818 }
8819 }
8820
8821 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8822 /* Mark it as expanded. */
8823 TREE_OPERAND (exp, 1) = NULL_TREE;
8824
8825 store_expr (exp1, target, 0);
8826
8827 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8828
8829 return target;
8830 }
8831
8832 case INIT_EXPR:
8833 {
8834 tree lhs = TREE_OPERAND (exp, 0);
8835 tree rhs = TREE_OPERAND (exp, 1);
8836
8837 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8838 return temp;
8839 }
8840
8841 case MODIFY_EXPR:
8842 {
8843 /* If lhs is complex, expand calls in rhs before computing it.
8844 That's so we don't compute a pointer and save it over a
8845 call. If lhs is simple, compute it first so we can give it
8846 as a target if the rhs is just a call. This avoids an
8847 extra temp and copy and that prevents a partial-subsumption
8848 which makes bad code. Actually we could treat
8849 component_ref's of vars like vars. */
8850
8851 tree lhs = TREE_OPERAND (exp, 0);
8852 tree rhs = TREE_OPERAND (exp, 1);
8853
8854 temp = 0;
8855
8856 /* Check for |= or &= of a bitfield of size one into another bitfield
8857 of size 1. In this case, (unless we need the result of the
8858 assignment) we can do this more efficiently with a
8859 test followed by an assignment, if necessary.
8860
8861 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8862 things change so we do, this code should be enhanced to
8863 support it. */
8864 if (ignore
8865 && TREE_CODE (lhs) == COMPONENT_REF
8866 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8867 || TREE_CODE (rhs) == BIT_AND_EXPR)
8868 && TREE_OPERAND (rhs, 0) == lhs
8869 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8870 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8871 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8872 {
8873 rtx label = gen_label_rtx ();
8874
8875 do_jump (TREE_OPERAND (rhs, 1),
8876 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8877 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8878 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8879 (TREE_CODE (rhs) == BIT_IOR_EXPR
8880 ? integer_one_node
8881 : integer_zero_node)),
8882 0, 0);
8883 do_pending_stack_adjust ();
8884 emit_label (label);
8885 return const0_rtx;
8886 }
8887
8888 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8889
8890 return temp;
8891 }
8892
8893 case RETURN_EXPR:
8894 if (!TREE_OPERAND (exp, 0))
8895 expand_null_return ();
8896 else
8897 expand_return (TREE_OPERAND (exp, 0));
8898 return const0_rtx;
8899
8900 case PREINCREMENT_EXPR:
8901 case PREDECREMENT_EXPR:
8902 return expand_increment (exp, 0, ignore);
8903
8904 case POSTINCREMENT_EXPR:
8905 case POSTDECREMENT_EXPR:
8906 /* Faster to treat as pre-increment if result is not used. */
8907 return expand_increment (exp, ! ignore, ignore);
8908
8909 case ADDR_EXPR:
8910 /* Are we taking the address of a nested function? */
8911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8912 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8913 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8914 && ! TREE_STATIC (exp))
8915 {
8916 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8917 op0 = force_operand (op0, target);
8918 }
8919 /* If we are taking the address of something erroneous, just
8920 return a zero. */
8921 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8922 return const0_rtx;
8923 /* If we are taking the address of a constant and are at the
8924 top level, we have to use output_constant_def since we can't
8925 call force_const_mem at top level. */
8926 else if (cfun == 0
8927 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8928 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8929 == 'c')))
8930 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8931 else
8932 {
8933 /* We make sure to pass const0_rtx down if we came in with
8934 ignore set, to avoid doing the cleanups twice for something. */
8935 op0 = expand_expr (TREE_OPERAND (exp, 0),
8936 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8937 (modifier == EXPAND_INITIALIZER
8938 ? modifier : EXPAND_CONST_ADDRESS));
8939
8940 /* If we are going to ignore the result, OP0 will have been set
8941 to const0_rtx, so just return it. Don't get confused and
8942 think we are taking the address of the constant. */
8943 if (ignore)
8944 return op0;
8945
8946 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8947 clever and returns a REG when given a MEM. */
8948 op0 = protect_from_queue (op0, 1);
8949
8950 /* We would like the object in memory. If it is a constant, we can
8951 have it be statically allocated into memory. For a non-constant,
8952 we need to allocate some memory and store the value into it. */
8953
8954 if (CONSTANT_P (op0))
8955 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8956 op0);
8957 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8958 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8959 || GET_CODE (op0) == PARALLEL)
8960 {
8961 /* If the operand is a SAVE_EXPR, we can deal with this by
8962 forcing the SAVE_EXPR into memory. */
8963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8964 {
8965 put_var_into_stack (TREE_OPERAND (exp, 0));
8966 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8967 }
8968 else
8969 {
8970 /* If this object is in a register, it can't be BLKmode. */
8971 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8972 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8973
8974 if (GET_CODE (op0) == PARALLEL)
8975 /* Handle calls that pass values in multiple
8976 non-contiguous locations. The Irix 6 ABI has examples
8977 of this. */
8978 emit_group_store (memloc, op0,
8979 int_size_in_bytes (inner_type));
8980 else
8981 emit_move_insn (memloc, op0);
8982
8983 op0 = memloc;
8984 }
8985 }
8986
8987 if (GET_CODE (op0) != MEM)
8988 abort ();
8989
8990 mark_temp_addr_taken (op0);
8991 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8992 {
8993 op0 = XEXP (op0, 0);
8994 #ifdef POINTERS_EXTEND_UNSIGNED
8995 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8996 && mode == ptr_mode)
8997 op0 = convert_memory_address (ptr_mode, op0);
8998 #endif
8999 return op0;
9000 }
9001
9002 /* If OP0 is not aligned as least as much as the type requires, we
9003 need to make a temporary, copy OP0 to it, and take the address of
9004 the temporary. We want to use the alignment of the type, not of
9005 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9006 the test for BLKmode means that can't happen. The test for
9007 BLKmode is because we never make mis-aligned MEMs with
9008 non-BLKmode.
9009
9010 We don't need to do this at all if the machine doesn't have
9011 strict alignment. */
9012 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9013 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9014 > MEM_ALIGN (op0))
9015 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9016 {
9017 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9018 rtx new
9019 = assign_stack_temp_for_type
9020 (TYPE_MODE (inner_type),
9021 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9022 : int_size_in_bytes (inner_type),
9023 1, build_qualified_type (inner_type,
9024 (TYPE_QUALS (inner_type)
9025 | TYPE_QUAL_CONST)));
9026
9027 if (TYPE_ALIGN_OK (inner_type))
9028 abort ();
9029
9030 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9031 BLOCK_OP_NORMAL);
9032 op0 = new;
9033 }
9034
9035 op0 = force_operand (XEXP (op0, 0), target);
9036 }
9037
9038 if (flag_force_addr
9039 && GET_CODE (op0) != REG
9040 && modifier != EXPAND_CONST_ADDRESS
9041 && modifier != EXPAND_INITIALIZER
9042 && modifier != EXPAND_SUM)
9043 op0 = force_reg (Pmode, op0);
9044
9045 if (GET_CODE (op0) == REG
9046 && ! REG_USERVAR_P (op0))
9047 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9048
9049 #ifdef POINTERS_EXTEND_UNSIGNED
9050 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9051 && mode == ptr_mode)
9052 op0 = convert_memory_address (ptr_mode, op0);
9053 #endif
9054
9055 return op0;
9056
9057 case ENTRY_VALUE_EXPR:
9058 abort ();
9059
9060 /* COMPLEX type for Extended Pascal & Fortran */
9061 case COMPLEX_EXPR:
9062 {
9063 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9064 rtx insns;
9065
9066 /* Get the rtx code of the operands. */
9067 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9068 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9069
9070 if (! target)
9071 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9072
9073 start_sequence ();
9074
9075 /* Move the real (op0) and imaginary (op1) parts to their location. */
9076 emit_move_insn (gen_realpart (mode, target), op0);
9077 emit_move_insn (gen_imagpart (mode, target), op1);
9078
9079 insns = get_insns ();
9080 end_sequence ();
9081
9082 /* Complex construction should appear as a single unit. */
9083 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9084 each with a separate pseudo as destination.
9085 It's not correct for flow to treat them as a unit. */
9086 if (GET_CODE (target) != CONCAT)
9087 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9088 else
9089 emit_insn (insns);
9090
9091 return target;
9092 }
9093
9094 case REALPART_EXPR:
9095 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9096 return gen_realpart (mode, op0);
9097
9098 case IMAGPART_EXPR:
9099 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9100 return gen_imagpart (mode, op0);
9101
9102 case CONJ_EXPR:
9103 {
9104 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9105 rtx imag_t;
9106 rtx insns;
9107
9108 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9109
9110 if (! target)
9111 target = gen_reg_rtx (mode);
9112
9113 start_sequence ();
9114
9115 /* Store the realpart and the negated imagpart to target. */
9116 emit_move_insn (gen_realpart (partmode, target),
9117 gen_realpart (partmode, op0));
9118
9119 imag_t = gen_imagpart (partmode, target);
9120 temp = expand_unop (partmode,
9121 ! unsignedp && flag_trapv
9122 && (GET_MODE_CLASS(partmode) == MODE_INT)
9123 ? negv_optab : neg_optab,
9124 gen_imagpart (partmode, op0), imag_t, 0);
9125 if (temp != imag_t)
9126 emit_move_insn (imag_t, temp);
9127
9128 insns = get_insns ();
9129 end_sequence ();
9130
9131 /* Conjugate should appear as a single unit
9132 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9133 each with a separate pseudo as destination.
9134 It's not correct for flow to treat them as a unit. */
9135 if (GET_CODE (target) != CONCAT)
9136 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9137 else
9138 emit_insn (insns);
9139
9140 return target;
9141 }
9142
9143 case TRY_CATCH_EXPR:
9144 {
9145 tree handler = TREE_OPERAND (exp, 1);
9146
9147 expand_eh_region_start ();
9148
9149 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9150
9151 expand_eh_region_end_cleanup (handler);
9152
9153 return op0;
9154 }
9155
9156 case TRY_FINALLY_EXPR:
9157 {
9158 tree try_block = TREE_OPERAND (exp, 0);
9159 tree finally_block = TREE_OPERAND (exp, 1);
9160
9161 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9162 {
9163 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9164 is not sufficient, so we cannot expand the block twice.
9165 So we play games with GOTO_SUBROUTINE_EXPR to let us
9166 expand the thing only once. */
9167 /* When not optimizing, we go ahead with this form since
9168 (1) user breakpoints operate more predictably without
9169 code duplication, and
9170 (2) we're not running any of the global optimizers
9171 that would explode in time/space with the highly
9172 connected CFG created by the indirect branching. */
9173
9174 rtx finally_label = gen_label_rtx ();
9175 rtx done_label = gen_label_rtx ();
9176 rtx return_link = gen_reg_rtx (Pmode);
9177 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9178 (tree) finally_label, (tree) return_link);
9179 TREE_SIDE_EFFECTS (cleanup) = 1;
9180
9181 /* Start a new binding layer that will keep track of all cleanup
9182 actions to be performed. */
9183 expand_start_bindings (2);
9184 target_temp_slot_level = temp_slot_level;
9185
9186 expand_decl_cleanup (NULL_TREE, cleanup);
9187 op0 = expand_expr (try_block, target, tmode, modifier);
9188
9189 preserve_temp_slots (op0);
9190 expand_end_bindings (NULL_TREE, 0, 0);
9191 emit_jump (done_label);
9192 emit_label (finally_label);
9193 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9194 emit_indirect_jump (return_link);
9195 emit_label (done_label);
9196 }
9197 else
9198 {
9199 expand_start_bindings (2);
9200 target_temp_slot_level = temp_slot_level;
9201
9202 expand_decl_cleanup (NULL_TREE, finally_block);
9203 op0 = expand_expr (try_block, target, tmode, modifier);
9204
9205 preserve_temp_slots (op0);
9206 expand_end_bindings (NULL_TREE, 0, 0);
9207 }
9208
9209 return op0;
9210 }
9211
9212 case GOTO_SUBROUTINE_EXPR:
9213 {
9214 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9215 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9216 rtx return_address = gen_label_rtx ();
9217 emit_move_insn (return_link,
9218 gen_rtx_LABEL_REF (Pmode, return_address));
9219 emit_jump (subr);
9220 emit_label (return_address);
9221 return const0_rtx;
9222 }
9223
9224 case VA_ARG_EXPR:
9225 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9226
9227 case EXC_PTR_EXPR:
9228 return get_exception_pointer (cfun);
9229
9230 case FDESC_EXPR:
9231 /* Function descriptors are not valid except for as
9232 initialization constants, and should not be expanded. */
9233 abort ();
9234
9235 default:
9236 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9237 }
9238
9239 /* Here to do an ordinary binary operator, generating an instruction
9240 from the optab already placed in `this_optab'. */
9241 binop:
9242 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9243 subtarget = 0;
9244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9245 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9246 binop2:
9247 temp = expand_binop (mode, this_optab, op0, op1, target,
9248 unsignedp, OPTAB_LIB_WIDEN);
9249 if (temp == 0)
9250 abort ();
9251 return temp;
9252 }
9253 \f
9254 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9255 when applied to the address of EXP produces an address known to be
9256 aligned more than BIGGEST_ALIGNMENT. */
9257
9258 static int
9259 is_aligning_offset (offset, exp)
9260 tree offset;
9261 tree exp;
9262 {
9263 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9264 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9265 || TREE_CODE (offset) == NOP_EXPR
9266 || TREE_CODE (offset) == CONVERT_EXPR
9267 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9268 offset = TREE_OPERAND (offset, 0);
9269
9270 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9271 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9272 if (TREE_CODE (offset) != BIT_AND_EXPR
9273 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9274 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9275 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9276 return 0;
9277
9278 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9279 It must be NEGATE_EXPR. Then strip any more conversions. */
9280 offset = TREE_OPERAND (offset, 0);
9281 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9282 || TREE_CODE (offset) == NOP_EXPR
9283 || TREE_CODE (offset) == CONVERT_EXPR)
9284 offset = TREE_OPERAND (offset, 0);
9285
9286 if (TREE_CODE (offset) != NEGATE_EXPR)
9287 return 0;
9288
9289 offset = TREE_OPERAND (offset, 0);
9290 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9291 || TREE_CODE (offset) == NOP_EXPR
9292 || TREE_CODE (offset) == CONVERT_EXPR)
9293 offset = TREE_OPERAND (offset, 0);
9294
9295 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9296 whose type is the same as EXP. */
9297 return (TREE_CODE (offset) == ADDR_EXPR
9298 && (TREE_OPERAND (offset, 0) == exp
9299 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9300 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9301 == TREE_TYPE (exp)))));
9302 }
9303 \f
9304 /* Return the tree node if an ARG corresponds to a string constant or zero
9305 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9306 in bytes within the string that ARG is accessing. The type of the
9307 offset will be `sizetype'. */
9308
9309 tree
9310 string_constant (arg, ptr_offset)
9311 tree arg;
9312 tree *ptr_offset;
9313 {
9314 STRIP_NOPS (arg);
9315
9316 if (TREE_CODE (arg) == ADDR_EXPR
9317 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9318 {
9319 *ptr_offset = size_zero_node;
9320 return TREE_OPERAND (arg, 0);
9321 }
9322 else if (TREE_CODE (arg) == PLUS_EXPR)
9323 {
9324 tree arg0 = TREE_OPERAND (arg, 0);
9325 tree arg1 = TREE_OPERAND (arg, 1);
9326
9327 STRIP_NOPS (arg0);
9328 STRIP_NOPS (arg1);
9329
9330 if (TREE_CODE (arg0) == ADDR_EXPR
9331 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9332 {
9333 *ptr_offset = convert (sizetype, arg1);
9334 return TREE_OPERAND (arg0, 0);
9335 }
9336 else if (TREE_CODE (arg1) == ADDR_EXPR
9337 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9338 {
9339 *ptr_offset = convert (sizetype, arg0);
9340 return TREE_OPERAND (arg1, 0);
9341 }
9342 }
9343
9344 return 0;
9345 }
9346 \f
9347 /* Expand code for a post- or pre- increment or decrement
9348 and return the RTX for the result.
9349 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9350
9351 static rtx
9352 expand_increment (exp, post, ignore)
9353 tree exp;
9354 int post, ignore;
9355 {
9356 rtx op0, op1;
9357 rtx temp, value;
9358 tree incremented = TREE_OPERAND (exp, 0);
9359 optab this_optab = add_optab;
9360 int icode;
9361 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9362 int op0_is_copy = 0;
9363 int single_insn = 0;
9364 /* 1 means we can't store into OP0 directly,
9365 because it is a subreg narrower than a word,
9366 and we don't dare clobber the rest of the word. */
9367 int bad_subreg = 0;
9368
9369 /* Stabilize any component ref that might need to be
9370 evaluated more than once below. */
9371 if (!post
9372 || TREE_CODE (incremented) == BIT_FIELD_REF
9373 || (TREE_CODE (incremented) == COMPONENT_REF
9374 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9375 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9376 incremented = stabilize_reference (incremented);
9377 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9378 ones into save exprs so that they don't accidentally get evaluated
9379 more than once by the code below. */
9380 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9381 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9382 incremented = save_expr (incremented);
9383
9384 /* Compute the operands as RTX.
9385 Note whether OP0 is the actual lvalue or a copy of it:
9386 I believe it is a copy iff it is a register or subreg
9387 and insns were generated in computing it. */
9388
9389 temp = get_last_insn ();
9390 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9391
9392 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9393 in place but instead must do sign- or zero-extension during assignment,
9394 so we copy it into a new register and let the code below use it as
9395 a copy.
9396
9397 Note that we can safely modify this SUBREG since it is know not to be
9398 shared (it was made by the expand_expr call above). */
9399
9400 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9401 {
9402 if (post)
9403 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9404 else
9405 bad_subreg = 1;
9406 }
9407 else if (GET_CODE (op0) == SUBREG
9408 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9409 {
9410 /* We cannot increment this SUBREG in place. If we are
9411 post-incrementing, get a copy of the old value. Otherwise,
9412 just mark that we cannot increment in place. */
9413 if (post)
9414 op0 = copy_to_reg (op0);
9415 else
9416 bad_subreg = 1;
9417 }
9418
9419 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9420 && temp != get_last_insn ());
9421 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9422
9423 /* Decide whether incrementing or decrementing. */
9424 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9425 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9426 this_optab = sub_optab;
9427
9428 /* Convert decrement by a constant into a negative increment. */
9429 if (this_optab == sub_optab
9430 && GET_CODE (op1) == CONST_INT)
9431 {
9432 op1 = GEN_INT (-INTVAL (op1));
9433 this_optab = add_optab;
9434 }
9435
9436 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9437 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9438
9439 /* For a preincrement, see if we can do this with a single instruction. */
9440 if (!post)
9441 {
9442 icode = (int) this_optab->handlers[(int) mode].insn_code;
9443 if (icode != (int) CODE_FOR_nothing
9444 /* Make sure that OP0 is valid for operands 0 and 1
9445 of the insn we want to queue. */
9446 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9447 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9448 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9449 single_insn = 1;
9450 }
9451
9452 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9453 then we cannot just increment OP0. We must therefore contrive to
9454 increment the original value. Then, for postincrement, we can return
9455 OP0 since it is a copy of the old value. For preincrement, expand here
9456 unless we can do it with a single insn.
9457
9458 Likewise if storing directly into OP0 would clobber high bits
9459 we need to preserve (bad_subreg). */
9460 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9461 {
9462 /* This is the easiest way to increment the value wherever it is.
9463 Problems with multiple evaluation of INCREMENTED are prevented
9464 because either (1) it is a component_ref or preincrement,
9465 in which case it was stabilized above, or (2) it is an array_ref
9466 with constant index in an array in a register, which is
9467 safe to reevaluate. */
9468 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9469 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9470 ? MINUS_EXPR : PLUS_EXPR),
9471 TREE_TYPE (exp),
9472 incremented,
9473 TREE_OPERAND (exp, 1));
9474
9475 while (TREE_CODE (incremented) == NOP_EXPR
9476 || TREE_CODE (incremented) == CONVERT_EXPR)
9477 {
9478 newexp = convert (TREE_TYPE (incremented), newexp);
9479 incremented = TREE_OPERAND (incremented, 0);
9480 }
9481
9482 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9483 return post ? op0 : temp;
9484 }
9485
9486 if (post)
9487 {
9488 /* We have a true reference to the value in OP0.
9489 If there is an insn to add or subtract in this mode, queue it.
9490 Queueing the increment insn avoids the register shuffling
9491 that often results if we must increment now and first save
9492 the old value for subsequent use. */
9493
9494 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9495 op0 = stabilize (op0);
9496 #endif
9497
9498 icode = (int) this_optab->handlers[(int) mode].insn_code;
9499 if (icode != (int) CODE_FOR_nothing
9500 /* Make sure that OP0 is valid for operands 0 and 1
9501 of the insn we want to queue. */
9502 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9503 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9504 {
9505 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9506 op1 = force_reg (mode, op1);
9507
9508 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9509 }
9510 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9511 {
9512 rtx addr = (general_operand (XEXP (op0, 0), mode)
9513 ? force_reg (Pmode, XEXP (op0, 0))
9514 : copy_to_reg (XEXP (op0, 0)));
9515 rtx temp, result;
9516
9517 op0 = replace_equiv_address (op0, addr);
9518 temp = force_reg (GET_MODE (op0), op0);
9519 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9520 op1 = force_reg (mode, op1);
9521
9522 /* The increment queue is LIFO, thus we have to `queue'
9523 the instructions in reverse order. */
9524 enqueue_insn (op0, gen_move_insn (op0, temp));
9525 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9526 return result;
9527 }
9528 }
9529
9530 /* Preincrement, or we can't increment with one simple insn. */
9531 if (post)
9532 /* Save a copy of the value before inc or dec, to return it later. */
9533 temp = value = copy_to_reg (op0);
9534 else
9535 /* Arrange to return the incremented value. */
9536 /* Copy the rtx because expand_binop will protect from the queue,
9537 and the results of that would be invalid for us to return
9538 if our caller does emit_queue before using our result. */
9539 temp = copy_rtx (value = op0);
9540
9541 /* Increment however we can. */
9542 op1 = expand_binop (mode, this_optab, value, op1, op0,
9543 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9544
9545 /* Make sure the value is stored into OP0. */
9546 if (op1 != op0)
9547 emit_move_insn (op0, op1);
9548
9549 return temp;
9550 }
9551 \f
9552 /* At the start of a function, record that we have no previously-pushed
9553 arguments waiting to be popped. */
9554
9555 void
9556 init_pending_stack_adjust ()
9557 {
9558 pending_stack_adjust = 0;
9559 }
9560
9561 /* When exiting from function, if safe, clear out any pending stack adjust
9562 so the adjustment won't get done.
9563
9564 Note, if the current function calls alloca, then it must have a
9565 frame pointer regardless of the value of flag_omit_frame_pointer. */
9566
9567 void
9568 clear_pending_stack_adjust ()
9569 {
9570 #ifdef EXIT_IGNORE_STACK
9571 if (optimize > 0
9572 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9573 && EXIT_IGNORE_STACK
9574 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9575 && ! flag_inline_functions)
9576 {
9577 stack_pointer_delta -= pending_stack_adjust,
9578 pending_stack_adjust = 0;
9579 }
9580 #endif
9581 }
9582
9583 /* Pop any previously-pushed arguments that have not been popped yet. */
9584
9585 void
9586 do_pending_stack_adjust ()
9587 {
9588 if (inhibit_defer_pop == 0)
9589 {
9590 if (pending_stack_adjust != 0)
9591 adjust_stack (GEN_INT (pending_stack_adjust));
9592 pending_stack_adjust = 0;
9593 }
9594 }
9595 \f
9596 /* Expand conditional expressions. */
9597
9598 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9599 LABEL is an rtx of code CODE_LABEL, in this function and all the
9600 functions here. */
9601
9602 void
9603 jumpifnot (exp, label)
9604 tree exp;
9605 rtx label;
9606 {
9607 do_jump (exp, label, NULL_RTX);
9608 }
9609
9610 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9611
9612 void
9613 jumpif (exp, label)
9614 tree exp;
9615 rtx label;
9616 {
9617 do_jump (exp, NULL_RTX, label);
9618 }
9619
9620 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9621 the result is zero, or IF_TRUE_LABEL if the result is one.
9622 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9623 meaning fall through in that case.
9624
9625 do_jump always does any pending stack adjust except when it does not
9626 actually perform a jump. An example where there is no jump
9627 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9628
9629 This function is responsible for optimizing cases such as
9630 &&, || and comparison operators in EXP. */
9631
9632 void
9633 do_jump (exp, if_false_label, if_true_label)
9634 tree exp;
9635 rtx if_false_label, if_true_label;
9636 {
9637 enum tree_code code = TREE_CODE (exp);
9638 /* Some cases need to create a label to jump to
9639 in order to properly fall through.
9640 These cases set DROP_THROUGH_LABEL nonzero. */
9641 rtx drop_through_label = 0;
9642 rtx temp;
9643 int i;
9644 tree type;
9645 enum machine_mode mode;
9646
9647 #ifdef MAX_INTEGER_COMPUTATION_MODE
9648 check_max_integer_computation_mode (exp);
9649 #endif
9650
9651 emit_queue ();
9652
9653 switch (code)
9654 {
9655 case ERROR_MARK:
9656 break;
9657
9658 case INTEGER_CST:
9659 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9660 if (temp)
9661 emit_jump (temp);
9662 break;
9663
9664 #if 0
9665 /* This is not true with #pragma weak */
9666 case ADDR_EXPR:
9667 /* The address of something can never be zero. */
9668 if (if_true_label)
9669 emit_jump (if_true_label);
9670 break;
9671 #endif
9672
9673 case NOP_EXPR:
9674 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9675 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9676 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9677 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9678 goto normal;
9679 case CONVERT_EXPR:
9680 /* If we are narrowing the operand, we have to do the compare in the
9681 narrower mode. */
9682 if ((TYPE_PRECISION (TREE_TYPE (exp))
9683 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9684 goto normal;
9685 case NON_LVALUE_EXPR:
9686 case REFERENCE_EXPR:
9687 case ABS_EXPR:
9688 case NEGATE_EXPR:
9689 case LROTATE_EXPR:
9690 case RROTATE_EXPR:
9691 /* These cannot change zero->nonzero or vice versa. */
9692 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9693 break;
9694
9695 case WITH_RECORD_EXPR:
9696 /* Put the object on the placeholder list, recurse through our first
9697 operand, and pop the list. */
9698 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9699 placeholder_list);
9700 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9701 placeholder_list = TREE_CHAIN (placeholder_list);
9702 break;
9703
9704 #if 0
9705 /* This is never less insns than evaluating the PLUS_EXPR followed by
9706 a test and can be longer if the test is eliminated. */
9707 case PLUS_EXPR:
9708 /* Reduce to minus. */
9709 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9710 TREE_OPERAND (exp, 0),
9711 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9712 TREE_OPERAND (exp, 1))));
9713 /* Process as MINUS. */
9714 #endif
9715
9716 case MINUS_EXPR:
9717 /* Nonzero iff operands of minus differ. */
9718 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9719 TREE_OPERAND (exp, 0),
9720 TREE_OPERAND (exp, 1)),
9721 NE, NE, if_false_label, if_true_label);
9722 break;
9723
9724 case BIT_AND_EXPR:
9725 /* If we are AND'ing with a small constant, do this comparison in the
9726 smallest type that fits. If the machine doesn't have comparisons
9727 that small, it will be converted back to the wider comparison.
9728 This helps if we are testing the sign bit of a narrower object.
9729 combine can't do this for us because it can't know whether a
9730 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9731
9732 if (! SLOW_BYTE_ACCESS
9733 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9734 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9735 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9736 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9737 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9738 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9739 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9740 != CODE_FOR_nothing))
9741 {
9742 do_jump (convert (type, exp), if_false_label, if_true_label);
9743 break;
9744 }
9745 goto normal;
9746
9747 case TRUTH_NOT_EXPR:
9748 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9749 break;
9750
9751 case TRUTH_ANDIF_EXPR:
9752 if (if_false_label == 0)
9753 if_false_label = drop_through_label = gen_label_rtx ();
9754 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9755 start_cleanup_deferral ();
9756 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9757 end_cleanup_deferral ();
9758 break;
9759
9760 case TRUTH_ORIF_EXPR:
9761 if (if_true_label == 0)
9762 if_true_label = drop_through_label = gen_label_rtx ();
9763 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9764 start_cleanup_deferral ();
9765 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9766 end_cleanup_deferral ();
9767 break;
9768
9769 case COMPOUND_EXPR:
9770 push_temp_slots ();
9771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9772 preserve_temp_slots (NULL_RTX);
9773 free_temp_slots ();
9774 pop_temp_slots ();
9775 emit_queue ();
9776 do_pending_stack_adjust ();
9777 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9778 break;
9779
9780 case COMPONENT_REF:
9781 case BIT_FIELD_REF:
9782 case ARRAY_REF:
9783 case ARRAY_RANGE_REF:
9784 {
9785 HOST_WIDE_INT bitsize, bitpos;
9786 int unsignedp;
9787 enum machine_mode mode;
9788 tree type;
9789 tree offset;
9790 int volatilep = 0;
9791
9792 /* Get description of this reference. We don't actually care
9793 about the underlying object here. */
9794 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9795 &unsignedp, &volatilep);
9796
9797 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9798 if (! SLOW_BYTE_ACCESS
9799 && type != 0 && bitsize >= 0
9800 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9801 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9802 != CODE_FOR_nothing))
9803 {
9804 do_jump (convert (type, exp), if_false_label, if_true_label);
9805 break;
9806 }
9807 goto normal;
9808 }
9809
9810 case COND_EXPR:
9811 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9812 if (integer_onep (TREE_OPERAND (exp, 1))
9813 && integer_zerop (TREE_OPERAND (exp, 2)))
9814 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9815
9816 else if (integer_zerop (TREE_OPERAND (exp, 1))
9817 && integer_onep (TREE_OPERAND (exp, 2)))
9818 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9819
9820 else
9821 {
9822 rtx label1 = gen_label_rtx ();
9823 drop_through_label = gen_label_rtx ();
9824
9825 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9826
9827 start_cleanup_deferral ();
9828 /* Now the THEN-expression. */
9829 do_jump (TREE_OPERAND (exp, 1),
9830 if_false_label ? if_false_label : drop_through_label,
9831 if_true_label ? if_true_label : drop_through_label);
9832 /* In case the do_jump just above never jumps. */
9833 do_pending_stack_adjust ();
9834 emit_label (label1);
9835
9836 /* Now the ELSE-expression. */
9837 do_jump (TREE_OPERAND (exp, 2),
9838 if_false_label ? if_false_label : drop_through_label,
9839 if_true_label ? if_true_label : drop_through_label);
9840 end_cleanup_deferral ();
9841 }
9842 break;
9843
9844 case EQ_EXPR:
9845 {
9846 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9847
9848 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9849 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9850 {
9851 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9852 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9853 do_jump
9854 (fold
9855 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9856 fold (build (EQ_EXPR, TREE_TYPE (exp),
9857 fold (build1 (REALPART_EXPR,
9858 TREE_TYPE (inner_type),
9859 exp0)),
9860 fold (build1 (REALPART_EXPR,
9861 TREE_TYPE (inner_type),
9862 exp1)))),
9863 fold (build (EQ_EXPR, TREE_TYPE (exp),
9864 fold (build1 (IMAGPART_EXPR,
9865 TREE_TYPE (inner_type),
9866 exp0)),
9867 fold (build1 (IMAGPART_EXPR,
9868 TREE_TYPE (inner_type),
9869 exp1)))))),
9870 if_false_label, if_true_label);
9871 }
9872
9873 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9874 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9875
9876 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9877 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9878 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9879 else
9880 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9881 break;
9882 }
9883
9884 case NE_EXPR:
9885 {
9886 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9887
9888 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9889 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9890 {
9891 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9892 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9893 do_jump
9894 (fold
9895 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9896 fold (build (NE_EXPR, TREE_TYPE (exp),
9897 fold (build1 (REALPART_EXPR,
9898 TREE_TYPE (inner_type),
9899 exp0)),
9900 fold (build1 (REALPART_EXPR,
9901 TREE_TYPE (inner_type),
9902 exp1)))),
9903 fold (build (NE_EXPR, TREE_TYPE (exp),
9904 fold (build1 (IMAGPART_EXPR,
9905 TREE_TYPE (inner_type),
9906 exp0)),
9907 fold (build1 (IMAGPART_EXPR,
9908 TREE_TYPE (inner_type),
9909 exp1)))))),
9910 if_false_label, if_true_label);
9911 }
9912
9913 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9914 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9915
9916 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9917 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9918 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9919 else
9920 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9921 break;
9922 }
9923
9924 case LT_EXPR:
9925 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9926 if (GET_MODE_CLASS (mode) == MODE_INT
9927 && ! can_compare_p (LT, mode, ccp_jump))
9928 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9929 else
9930 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9931 break;
9932
9933 case LE_EXPR:
9934 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9935 if (GET_MODE_CLASS (mode) == MODE_INT
9936 && ! can_compare_p (LE, mode, ccp_jump))
9937 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9938 else
9939 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9940 break;
9941
9942 case GT_EXPR:
9943 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9944 if (GET_MODE_CLASS (mode) == MODE_INT
9945 && ! can_compare_p (GT, mode, ccp_jump))
9946 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9947 else
9948 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9949 break;
9950
9951 case GE_EXPR:
9952 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9953 if (GET_MODE_CLASS (mode) == MODE_INT
9954 && ! can_compare_p (GE, mode, ccp_jump))
9955 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9956 else
9957 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9958 break;
9959
9960 case UNORDERED_EXPR:
9961 case ORDERED_EXPR:
9962 {
9963 enum rtx_code cmp, rcmp;
9964 int do_rev;
9965
9966 if (code == UNORDERED_EXPR)
9967 cmp = UNORDERED, rcmp = ORDERED;
9968 else
9969 cmp = ORDERED, rcmp = UNORDERED;
9970 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9971
9972 do_rev = 0;
9973 if (! can_compare_p (cmp, mode, ccp_jump)
9974 && (can_compare_p (rcmp, mode, ccp_jump)
9975 /* If the target doesn't provide either UNORDERED or ORDERED
9976 comparisons, canonicalize on UNORDERED for the library. */
9977 || rcmp == UNORDERED))
9978 do_rev = 1;
9979
9980 if (! do_rev)
9981 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9982 else
9983 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9984 }
9985 break;
9986
9987 {
9988 enum rtx_code rcode1;
9989 enum tree_code tcode2;
9990
9991 case UNLT_EXPR:
9992 rcode1 = UNLT;
9993 tcode2 = LT_EXPR;
9994 goto unordered_bcc;
9995 case UNLE_EXPR:
9996 rcode1 = UNLE;
9997 tcode2 = LE_EXPR;
9998 goto unordered_bcc;
9999 case UNGT_EXPR:
10000 rcode1 = UNGT;
10001 tcode2 = GT_EXPR;
10002 goto unordered_bcc;
10003 case UNGE_EXPR:
10004 rcode1 = UNGE;
10005 tcode2 = GE_EXPR;
10006 goto unordered_bcc;
10007 case UNEQ_EXPR:
10008 rcode1 = UNEQ;
10009 tcode2 = EQ_EXPR;
10010 goto unordered_bcc;
10011
10012 unordered_bcc:
10013 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10014 if (can_compare_p (rcode1, mode, ccp_jump))
10015 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10016 if_true_label);
10017 else
10018 {
10019 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10020 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10021 tree cmp0, cmp1;
10022
10023 /* If the target doesn't support combined unordered
10024 compares, decompose into UNORDERED + comparison. */
10025 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10026 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10027 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10028 do_jump (exp, if_false_label, if_true_label);
10029 }
10030 }
10031 break;
10032
10033 /* Special case:
10034 __builtin_expect (<test>, 0) and
10035 __builtin_expect (<test>, 1)
10036
10037 We need to do this here, so that <test> is not converted to a SCC
10038 operation on machines that use condition code registers and COMPARE
10039 like the PowerPC, and then the jump is done based on whether the SCC
10040 operation produced a 1 or 0. */
10041 case CALL_EXPR:
10042 /* Check for a built-in function. */
10043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10044 {
10045 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10046 tree arglist = TREE_OPERAND (exp, 1);
10047
10048 if (TREE_CODE (fndecl) == FUNCTION_DECL
10049 && DECL_BUILT_IN (fndecl)
10050 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10051 && arglist != NULL_TREE
10052 && TREE_CHAIN (arglist) != NULL_TREE)
10053 {
10054 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10055 if_true_label);
10056
10057 if (seq != NULL_RTX)
10058 {
10059 emit_insn (seq);
10060 return;
10061 }
10062 }
10063 }
10064 /* fall through and generate the normal code. */
10065
10066 default:
10067 normal:
10068 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10069 #if 0
10070 /* This is not needed any more and causes poor code since it causes
10071 comparisons and tests from non-SI objects to have different code
10072 sequences. */
10073 /* Copy to register to avoid generating bad insns by cse
10074 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10075 if (!cse_not_expected && GET_CODE (temp) == MEM)
10076 temp = copy_to_reg (temp);
10077 #endif
10078 do_pending_stack_adjust ();
10079 /* Do any postincrements in the expression that was tested. */
10080 emit_queue ();
10081
10082 if (GET_CODE (temp) == CONST_INT
10083 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10084 || GET_CODE (temp) == LABEL_REF)
10085 {
10086 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10087 if (target)
10088 emit_jump (target);
10089 }
10090 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10091 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10092 /* Note swapping the labels gives us not-equal. */
10093 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10094 else if (GET_MODE (temp) != VOIDmode)
10095 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10096 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10097 GET_MODE (temp), NULL_RTX,
10098 if_false_label, if_true_label);
10099 else
10100 abort ();
10101 }
10102
10103 if (drop_through_label)
10104 {
10105 /* If do_jump produces code that might be jumped around,
10106 do any stack adjusts from that code, before the place
10107 where control merges in. */
10108 do_pending_stack_adjust ();
10109 emit_label (drop_through_label);
10110 }
10111 }
10112 \f
10113 /* Given a comparison expression EXP for values too wide to be compared
10114 with one insn, test the comparison and jump to the appropriate label.
10115 The code of EXP is ignored; we always test GT if SWAP is 0,
10116 and LT if SWAP is 1. */
10117
10118 static void
10119 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10120 tree exp;
10121 int swap;
10122 rtx if_false_label, if_true_label;
10123 {
10124 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10125 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10126 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10127 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10128
10129 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10130 }
10131
10132 /* Compare OP0 with OP1, word at a time, in mode MODE.
10133 UNSIGNEDP says to do unsigned comparison.
10134 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10135
10136 void
10137 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10138 enum machine_mode mode;
10139 int unsignedp;
10140 rtx op0, op1;
10141 rtx if_false_label, if_true_label;
10142 {
10143 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10144 rtx drop_through_label = 0;
10145 int i;
10146
10147 if (! if_true_label || ! if_false_label)
10148 drop_through_label = gen_label_rtx ();
10149 if (! if_true_label)
10150 if_true_label = drop_through_label;
10151 if (! if_false_label)
10152 if_false_label = drop_through_label;
10153
10154 /* Compare a word at a time, high order first. */
10155 for (i = 0; i < nwords; i++)
10156 {
10157 rtx op0_word, op1_word;
10158
10159 if (WORDS_BIG_ENDIAN)
10160 {
10161 op0_word = operand_subword_force (op0, i, mode);
10162 op1_word = operand_subword_force (op1, i, mode);
10163 }
10164 else
10165 {
10166 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10167 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10168 }
10169
10170 /* All but high-order word must be compared as unsigned. */
10171 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10172 (unsignedp || i > 0), word_mode, NULL_RTX,
10173 NULL_RTX, if_true_label);
10174
10175 /* Consider lower words only if these are equal. */
10176 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10177 NULL_RTX, NULL_RTX, if_false_label);
10178 }
10179
10180 if (if_false_label)
10181 emit_jump (if_false_label);
10182 if (drop_through_label)
10183 emit_label (drop_through_label);
10184 }
10185
10186 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10187 with one insn, test the comparison and jump to the appropriate label. */
10188
10189 static void
10190 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10191 tree exp;
10192 rtx if_false_label, if_true_label;
10193 {
10194 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10195 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10197 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10198 int i;
10199 rtx drop_through_label = 0;
10200
10201 if (! if_false_label)
10202 drop_through_label = if_false_label = gen_label_rtx ();
10203
10204 for (i = 0; i < nwords; i++)
10205 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10206 operand_subword_force (op1, i, mode),
10207 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10208 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10209
10210 if (if_true_label)
10211 emit_jump (if_true_label);
10212 if (drop_through_label)
10213 emit_label (drop_through_label);
10214 }
10215 \f
10216 /* Jump according to whether OP0 is 0.
10217 We assume that OP0 has an integer mode that is too wide
10218 for the available compare insns. */
10219
10220 void
10221 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10222 rtx op0;
10223 rtx if_false_label, if_true_label;
10224 {
10225 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10226 rtx part;
10227 int i;
10228 rtx drop_through_label = 0;
10229
10230 /* The fastest way of doing this comparison on almost any machine is to
10231 "or" all the words and compare the result. If all have to be loaded
10232 from memory and this is a very wide item, it's possible this may
10233 be slower, but that's highly unlikely. */
10234
10235 part = gen_reg_rtx (word_mode);
10236 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10237 for (i = 1; i < nwords && part != 0; i++)
10238 part = expand_binop (word_mode, ior_optab, part,
10239 operand_subword_force (op0, i, GET_MODE (op0)),
10240 part, 1, OPTAB_WIDEN);
10241
10242 if (part != 0)
10243 {
10244 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10245 NULL_RTX, if_false_label, if_true_label);
10246
10247 return;
10248 }
10249
10250 /* If we couldn't do the "or" simply, do this with a series of compares. */
10251 if (! if_false_label)
10252 drop_through_label = if_false_label = gen_label_rtx ();
10253
10254 for (i = 0; i < nwords; i++)
10255 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10256 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10257 if_false_label, NULL_RTX);
10258
10259 if (if_true_label)
10260 emit_jump (if_true_label);
10261
10262 if (drop_through_label)
10263 emit_label (drop_through_label);
10264 }
10265 \f
10266 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10267 (including code to compute the values to be compared)
10268 and set (CC0) according to the result.
10269 The decision as to signed or unsigned comparison must be made by the caller.
10270
10271 We force a stack adjustment unless there are currently
10272 things pushed on the stack that aren't yet used.
10273
10274 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10275 compared. */
10276
10277 rtx
10278 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10279 rtx op0, op1;
10280 enum rtx_code code;
10281 int unsignedp;
10282 enum machine_mode mode;
10283 rtx size;
10284 {
10285 enum rtx_code ucode;
10286 rtx tem;
10287
10288 /* If one operand is constant, make it the second one. Only do this
10289 if the other operand is not constant as well. */
10290
10291 if (swap_commutative_operands_p (op0, op1))
10292 {
10293 tem = op0;
10294 op0 = op1;
10295 op1 = tem;
10296 code = swap_condition (code);
10297 }
10298
10299 if (flag_force_mem)
10300 {
10301 op0 = force_not_mem (op0);
10302 op1 = force_not_mem (op1);
10303 }
10304
10305 do_pending_stack_adjust ();
10306
10307 ucode = unsignedp ? unsigned_condition (code) : code;
10308 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10309 return tem;
10310
10311 #if 0
10312 /* There's no need to do this now that combine.c can eliminate lots of
10313 sign extensions. This can be less efficient in certain cases on other
10314 machines. */
10315
10316 /* If this is a signed equality comparison, we can do it as an
10317 unsigned comparison since zero-extension is cheaper than sign
10318 extension and comparisons with zero are done as unsigned. This is
10319 the case even on machines that can do fast sign extension, since
10320 zero-extension is easier to combine with other operations than
10321 sign-extension is. If we are comparing against a constant, we must
10322 convert it to what it would look like unsigned. */
10323 if ((code == EQ || code == NE) && ! unsignedp
10324 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10325 {
10326 if (GET_CODE (op1) == CONST_INT
10327 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10328 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10329 unsignedp = 1;
10330 }
10331 #endif
10332
10333 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10334
10335 #if HAVE_cc0
10336 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10337 #else
10338 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10339 #endif
10340 }
10341
10342 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10343 The decision as to signed or unsigned comparison must be made by the caller.
10344
10345 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10346 compared. */
10347
10348 void
10349 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10350 if_false_label, if_true_label)
10351 rtx op0, op1;
10352 enum rtx_code code;
10353 int unsignedp;
10354 enum machine_mode mode;
10355 rtx size;
10356 rtx if_false_label, if_true_label;
10357 {
10358 enum rtx_code ucode;
10359 rtx tem;
10360 int dummy_true_label = 0;
10361
10362 /* Reverse the comparison if that is safe and we want to jump if it is
10363 false. */
10364 if (! if_true_label && ! FLOAT_MODE_P (mode))
10365 {
10366 if_true_label = if_false_label;
10367 if_false_label = 0;
10368 code = reverse_condition (code);
10369 }
10370
10371 /* If one operand is constant, make it the second one. Only do this
10372 if the other operand is not constant as well. */
10373
10374 if (swap_commutative_operands_p (op0, op1))
10375 {
10376 tem = op0;
10377 op0 = op1;
10378 op1 = tem;
10379 code = swap_condition (code);
10380 }
10381
10382 if (flag_force_mem)
10383 {
10384 op0 = force_not_mem (op0);
10385 op1 = force_not_mem (op1);
10386 }
10387
10388 do_pending_stack_adjust ();
10389
10390 ucode = unsignedp ? unsigned_condition (code) : code;
10391 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10392 {
10393 if (tem == const_true_rtx)
10394 {
10395 if (if_true_label)
10396 emit_jump (if_true_label);
10397 }
10398 else
10399 {
10400 if (if_false_label)
10401 emit_jump (if_false_label);
10402 }
10403 return;
10404 }
10405
10406 #if 0
10407 /* There's no need to do this now that combine.c can eliminate lots of
10408 sign extensions. This can be less efficient in certain cases on other
10409 machines. */
10410
10411 /* If this is a signed equality comparison, we can do it as an
10412 unsigned comparison since zero-extension is cheaper than sign
10413 extension and comparisons with zero are done as unsigned. This is
10414 the case even on machines that can do fast sign extension, since
10415 zero-extension is easier to combine with other operations than
10416 sign-extension is. If we are comparing against a constant, we must
10417 convert it to what it would look like unsigned. */
10418 if ((code == EQ || code == NE) && ! unsignedp
10419 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10420 {
10421 if (GET_CODE (op1) == CONST_INT
10422 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10423 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10424 unsignedp = 1;
10425 }
10426 #endif
10427
10428 if (! if_true_label)
10429 {
10430 dummy_true_label = 1;
10431 if_true_label = gen_label_rtx ();
10432 }
10433
10434 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10435 if_true_label);
10436
10437 if (if_false_label)
10438 emit_jump (if_false_label);
10439 if (dummy_true_label)
10440 emit_label (if_true_label);
10441 }
10442
10443 /* Generate code for a comparison expression EXP (including code to compute
10444 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10445 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10446 generated code will drop through.
10447 SIGNED_CODE should be the rtx operation for this comparison for
10448 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10449
10450 We force a stack adjustment unless there are currently
10451 things pushed on the stack that aren't yet used. */
10452
10453 static void
10454 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10455 if_true_label)
10456 tree exp;
10457 enum rtx_code signed_code, unsigned_code;
10458 rtx if_false_label, if_true_label;
10459 {
10460 rtx op0, op1;
10461 tree type;
10462 enum machine_mode mode;
10463 int unsignedp;
10464 enum rtx_code code;
10465
10466 /* Don't crash if the comparison was erroneous. */
10467 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10469 return;
10470
10471 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10472 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10473 return;
10474
10475 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10476 mode = TYPE_MODE (type);
10477 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10478 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10479 || (GET_MODE_BITSIZE (mode)
10480 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10481 1)))))))
10482 {
10483 /* op0 might have been replaced by promoted constant, in which
10484 case the type of second argument should be used. */
10485 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10486 mode = TYPE_MODE (type);
10487 }
10488 unsignedp = TREE_UNSIGNED (type);
10489 code = unsignedp ? unsigned_code : signed_code;
10490
10491 #ifdef HAVE_canonicalize_funcptr_for_compare
10492 /* If function pointers need to be "canonicalized" before they can
10493 be reliably compared, then canonicalize them. */
10494 if (HAVE_canonicalize_funcptr_for_compare
10495 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10496 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10497 == FUNCTION_TYPE))
10498 {
10499 rtx new_op0 = gen_reg_rtx (mode);
10500
10501 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10502 op0 = new_op0;
10503 }
10504
10505 if (HAVE_canonicalize_funcptr_for_compare
10506 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10507 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10508 == FUNCTION_TYPE))
10509 {
10510 rtx new_op1 = gen_reg_rtx (mode);
10511
10512 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10513 op1 = new_op1;
10514 }
10515 #endif
10516
10517 /* Do any postincrements in the expression that was tested. */
10518 emit_queue ();
10519
10520 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10521 ((mode == BLKmode)
10522 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10523 if_false_label, if_true_label);
10524 }
10525 \f
10526 /* Generate code to calculate EXP using a store-flag instruction
10527 and return an rtx for the result. EXP is either a comparison
10528 or a TRUTH_NOT_EXPR whose operand is a comparison.
10529
10530 If TARGET is nonzero, store the result there if convenient.
10531
10532 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10533 cheap.
10534
10535 Return zero if there is no suitable set-flag instruction
10536 available on this machine.
10537
10538 Once expand_expr has been called on the arguments of the comparison,
10539 we are committed to doing the store flag, since it is not safe to
10540 re-evaluate the expression. We emit the store-flag insn by calling
10541 emit_store_flag, but only expand the arguments if we have a reason
10542 to believe that emit_store_flag will be successful. If we think that
10543 it will, but it isn't, we have to simulate the store-flag with a
10544 set/jump/set sequence. */
10545
10546 static rtx
10547 do_store_flag (exp, target, mode, only_cheap)
10548 tree exp;
10549 rtx target;
10550 enum machine_mode mode;
10551 int only_cheap;
10552 {
10553 enum rtx_code code;
10554 tree arg0, arg1, type;
10555 tree tem;
10556 enum machine_mode operand_mode;
10557 int invert = 0;
10558 int unsignedp;
10559 rtx op0, op1;
10560 enum insn_code icode;
10561 rtx subtarget = target;
10562 rtx result, label;
10563
10564 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10565 result at the end. We can't simply invert the test since it would
10566 have already been inverted if it were valid. This case occurs for
10567 some floating-point comparisons. */
10568
10569 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10570 invert = 1, exp = TREE_OPERAND (exp, 0);
10571
10572 arg0 = TREE_OPERAND (exp, 0);
10573 arg1 = TREE_OPERAND (exp, 1);
10574
10575 /* Don't crash if the comparison was erroneous. */
10576 if (arg0 == error_mark_node || arg1 == error_mark_node)
10577 return const0_rtx;
10578
10579 type = TREE_TYPE (arg0);
10580 operand_mode = TYPE_MODE (type);
10581 unsignedp = TREE_UNSIGNED (type);
10582
10583 /* We won't bother with BLKmode store-flag operations because it would mean
10584 passing a lot of information to emit_store_flag. */
10585 if (operand_mode == BLKmode)
10586 return 0;
10587
10588 /* We won't bother with store-flag operations involving function pointers
10589 when function pointers must be canonicalized before comparisons. */
10590 #ifdef HAVE_canonicalize_funcptr_for_compare
10591 if (HAVE_canonicalize_funcptr_for_compare
10592 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10593 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10594 == FUNCTION_TYPE))
10595 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10596 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10597 == FUNCTION_TYPE))))
10598 return 0;
10599 #endif
10600
10601 STRIP_NOPS (arg0);
10602 STRIP_NOPS (arg1);
10603
10604 /* Get the rtx comparison code to use. We know that EXP is a comparison
10605 operation of some type. Some comparisons against 1 and -1 can be
10606 converted to comparisons with zero. Do so here so that the tests
10607 below will be aware that we have a comparison with zero. These
10608 tests will not catch constants in the first operand, but constants
10609 are rarely passed as the first operand. */
10610
10611 switch (TREE_CODE (exp))
10612 {
10613 case EQ_EXPR:
10614 code = EQ;
10615 break;
10616 case NE_EXPR:
10617 code = NE;
10618 break;
10619 case LT_EXPR:
10620 if (integer_onep (arg1))
10621 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10622 else
10623 code = unsignedp ? LTU : LT;
10624 break;
10625 case LE_EXPR:
10626 if (! unsignedp && integer_all_onesp (arg1))
10627 arg1 = integer_zero_node, code = LT;
10628 else
10629 code = unsignedp ? LEU : LE;
10630 break;
10631 case GT_EXPR:
10632 if (! unsignedp && integer_all_onesp (arg1))
10633 arg1 = integer_zero_node, code = GE;
10634 else
10635 code = unsignedp ? GTU : GT;
10636 break;
10637 case GE_EXPR:
10638 if (integer_onep (arg1))
10639 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10640 else
10641 code = unsignedp ? GEU : GE;
10642 break;
10643
10644 case UNORDERED_EXPR:
10645 code = UNORDERED;
10646 break;
10647 case ORDERED_EXPR:
10648 code = ORDERED;
10649 break;
10650 case UNLT_EXPR:
10651 code = UNLT;
10652 break;
10653 case UNLE_EXPR:
10654 code = UNLE;
10655 break;
10656 case UNGT_EXPR:
10657 code = UNGT;
10658 break;
10659 case UNGE_EXPR:
10660 code = UNGE;
10661 break;
10662 case UNEQ_EXPR:
10663 code = UNEQ;
10664 break;
10665
10666 default:
10667 abort ();
10668 }
10669
10670 /* Put a constant second. */
10671 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10672 {
10673 tem = arg0; arg0 = arg1; arg1 = tem;
10674 code = swap_condition (code);
10675 }
10676
10677 /* If this is an equality or inequality test of a single bit, we can
10678 do this by shifting the bit being tested to the low-order bit and
10679 masking the result with the constant 1. If the condition was EQ,
10680 we xor it with 1. This does not require an scc insn and is faster
10681 than an scc insn even if we have it. */
10682
10683 if ((code == NE || code == EQ)
10684 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10685 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10686 {
10687 tree inner = TREE_OPERAND (arg0, 0);
10688 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10689 int ops_unsignedp;
10690
10691 /* If INNER is a right shift of a constant and it plus BITNUM does
10692 not overflow, adjust BITNUM and INNER. */
10693
10694 if (TREE_CODE (inner) == RSHIFT_EXPR
10695 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10696 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10697 && bitnum < TYPE_PRECISION (type)
10698 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10699 bitnum - TYPE_PRECISION (type)))
10700 {
10701 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10702 inner = TREE_OPERAND (inner, 0);
10703 }
10704
10705 /* If we are going to be able to omit the AND below, we must do our
10706 operations as unsigned. If we must use the AND, we have a choice.
10707 Normally unsigned is faster, but for some machines signed is. */
10708 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10709 #ifdef LOAD_EXTEND_OP
10710 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10711 #else
10712 : 1
10713 #endif
10714 );
10715
10716 if (! get_subtarget (subtarget)
10717 || GET_MODE (subtarget) != operand_mode
10718 || ! safe_from_p (subtarget, inner, 1))
10719 subtarget = 0;
10720
10721 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10722
10723 if (bitnum != 0)
10724 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10725 size_int (bitnum), subtarget, ops_unsignedp);
10726
10727 if (GET_MODE (op0) != mode)
10728 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10729
10730 if ((code == EQ && ! invert) || (code == NE && invert))
10731 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10732 ops_unsignedp, OPTAB_LIB_WIDEN);
10733
10734 /* Put the AND last so it can combine with more things. */
10735 if (bitnum != TYPE_PRECISION (type) - 1)
10736 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10737
10738 return op0;
10739 }
10740
10741 /* Now see if we are likely to be able to do this. Return if not. */
10742 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10743 return 0;
10744
10745 icode = setcc_gen_code[(int) code];
10746 if (icode == CODE_FOR_nothing
10747 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10748 {
10749 /* We can only do this if it is one of the special cases that
10750 can be handled without an scc insn. */
10751 if ((code == LT && integer_zerop (arg1))
10752 || (! only_cheap && code == GE && integer_zerop (arg1)))
10753 ;
10754 else if (BRANCH_COST >= 0
10755 && ! only_cheap && (code == NE || code == EQ)
10756 && TREE_CODE (type) != REAL_TYPE
10757 && ((abs_optab->handlers[(int) operand_mode].insn_code
10758 != CODE_FOR_nothing)
10759 || (ffs_optab->handlers[(int) operand_mode].insn_code
10760 != CODE_FOR_nothing)))
10761 ;
10762 else
10763 return 0;
10764 }
10765
10766 if (! get_subtarget (target)
10767 || GET_MODE (subtarget) != operand_mode
10768 || ! safe_from_p (subtarget, arg1, 1))
10769 subtarget = 0;
10770
10771 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10772 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10773
10774 if (target == 0)
10775 target = gen_reg_rtx (mode);
10776
10777 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10778 because, if the emit_store_flag does anything it will succeed and
10779 OP0 and OP1 will not be used subsequently. */
10780
10781 result = emit_store_flag (target, code,
10782 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10783 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10784 operand_mode, unsignedp, 1);
10785
10786 if (result)
10787 {
10788 if (invert)
10789 result = expand_binop (mode, xor_optab, result, const1_rtx,
10790 result, 0, OPTAB_LIB_WIDEN);
10791 return result;
10792 }
10793
10794 /* If this failed, we have to do this with set/compare/jump/set code. */
10795 if (GET_CODE (target) != REG
10796 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10797 target = gen_reg_rtx (GET_MODE (target));
10798
10799 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10800 result = compare_from_rtx (op0, op1, code, unsignedp,
10801 operand_mode, NULL_RTX);
10802 if (GET_CODE (result) == CONST_INT)
10803 return (((result == const0_rtx && ! invert)
10804 || (result != const0_rtx && invert))
10805 ? const0_rtx : const1_rtx);
10806
10807 /* The code of RESULT may not match CODE if compare_from_rtx
10808 decided to swap its operands and reverse the original code.
10809
10810 We know that compare_from_rtx returns either a CONST_INT or
10811 a new comparison code, so it is safe to just extract the
10812 code from RESULT. */
10813 code = GET_CODE (result);
10814
10815 label = gen_label_rtx ();
10816 if (bcc_gen_fctn[(int) code] == 0)
10817 abort ();
10818
10819 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10820 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10821 emit_label (label);
10822
10823 return target;
10824 }
10825 \f
10826
10827 /* Stubs in case we haven't got a casesi insn. */
10828 #ifndef HAVE_casesi
10829 # define HAVE_casesi 0
10830 # define gen_casesi(a, b, c, d, e) (0)
10831 # define CODE_FOR_casesi CODE_FOR_nothing
10832 #endif
10833
10834 /* If the machine does not have a case insn that compares the bounds,
10835 this means extra overhead for dispatch tables, which raises the
10836 threshold for using them. */
10837 #ifndef CASE_VALUES_THRESHOLD
10838 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10839 #endif /* CASE_VALUES_THRESHOLD */
10840
10841 unsigned int
10842 case_values_threshold ()
10843 {
10844 return CASE_VALUES_THRESHOLD;
10845 }
10846
10847 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10848 0 otherwise (i.e. if there is no casesi instruction). */
10849 int
10850 try_casesi (index_type, index_expr, minval, range,
10851 table_label, default_label)
10852 tree index_type, index_expr, minval, range;
10853 rtx table_label ATTRIBUTE_UNUSED;
10854 rtx default_label;
10855 {
10856 enum machine_mode index_mode = SImode;
10857 int index_bits = GET_MODE_BITSIZE (index_mode);
10858 rtx op1, op2, index;
10859 enum machine_mode op_mode;
10860
10861 if (! HAVE_casesi)
10862 return 0;
10863
10864 /* Convert the index to SImode. */
10865 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10866 {
10867 enum machine_mode omode = TYPE_MODE (index_type);
10868 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10869
10870 /* We must handle the endpoints in the original mode. */
10871 index_expr = build (MINUS_EXPR, index_type,
10872 index_expr, minval);
10873 minval = integer_zero_node;
10874 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10875 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10876 omode, 1, default_label);
10877 /* Now we can safely truncate. */
10878 index = convert_to_mode (index_mode, index, 0);
10879 }
10880 else
10881 {
10882 if (TYPE_MODE (index_type) != index_mode)
10883 {
10884 index_expr = convert ((*lang_hooks.types.type_for_size)
10885 (index_bits, 0), index_expr);
10886 index_type = TREE_TYPE (index_expr);
10887 }
10888
10889 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10890 }
10891 emit_queue ();
10892 index = protect_from_queue (index, 0);
10893 do_pending_stack_adjust ();
10894
10895 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10896 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10897 (index, op_mode))
10898 index = copy_to_mode_reg (op_mode, index);
10899
10900 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10901
10902 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10903 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10904 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10905 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10906 (op1, op_mode))
10907 op1 = copy_to_mode_reg (op_mode, op1);
10908
10909 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10910
10911 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10912 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10913 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10914 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10915 (op2, op_mode))
10916 op2 = copy_to_mode_reg (op_mode, op2);
10917
10918 emit_jump_insn (gen_casesi (index, op1, op2,
10919 table_label, default_label));
10920 return 1;
10921 }
10922
10923 /* Attempt to generate a tablejump instruction; same concept. */
10924 #ifndef HAVE_tablejump
10925 #define HAVE_tablejump 0
10926 #define gen_tablejump(x, y) (0)
10927 #endif
10928
10929 /* Subroutine of the next function.
10930
10931 INDEX is the value being switched on, with the lowest value
10932 in the table already subtracted.
10933 MODE is its expected mode (needed if INDEX is constant).
10934 RANGE is the length of the jump table.
10935 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10936
10937 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10938 index value is out of range. */
10939
10940 static void
10941 do_tablejump (index, mode, range, table_label, default_label)
10942 rtx index, range, table_label, default_label;
10943 enum machine_mode mode;
10944 {
10945 rtx temp, vector;
10946
10947 if (INTVAL (range) > cfun->max_jumptable_ents)
10948 cfun->max_jumptable_ents = INTVAL (range);
10949
10950 /* Do an unsigned comparison (in the proper mode) between the index
10951 expression and the value which represents the length of the range.
10952 Since we just finished subtracting the lower bound of the range
10953 from the index expression, this comparison allows us to simultaneously
10954 check that the original index expression value is both greater than
10955 or equal to the minimum value of the range and less than or equal to
10956 the maximum value of the range. */
10957
10958 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10959 default_label);
10960
10961 /* If index is in range, it must fit in Pmode.
10962 Convert to Pmode so we can index with it. */
10963 if (mode != Pmode)
10964 index = convert_to_mode (Pmode, index, 1);
10965
10966 /* Don't let a MEM slip thru, because then INDEX that comes
10967 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10968 and break_out_memory_refs will go to work on it and mess it up. */
10969 #ifdef PIC_CASE_VECTOR_ADDRESS
10970 if (flag_pic && GET_CODE (index) != REG)
10971 index = copy_to_mode_reg (Pmode, index);
10972 #endif
10973
10974 /* If flag_force_addr were to affect this address
10975 it could interfere with the tricky assumptions made
10976 about addresses that contain label-refs,
10977 which may be valid only very near the tablejump itself. */
10978 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10979 GET_MODE_SIZE, because this indicates how large insns are. The other
10980 uses should all be Pmode, because they are addresses. This code
10981 could fail if addresses and insns are not the same size. */
10982 index = gen_rtx_PLUS (Pmode,
10983 gen_rtx_MULT (Pmode, index,
10984 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10985 gen_rtx_LABEL_REF (Pmode, table_label));
10986 #ifdef PIC_CASE_VECTOR_ADDRESS
10987 if (flag_pic)
10988 index = PIC_CASE_VECTOR_ADDRESS (index);
10989 else
10990 #endif
10991 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10992 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10993 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10994 RTX_UNCHANGING_P (vector) = 1;
10995 convert_move (temp, vector, 0);
10996
10997 emit_jump_insn (gen_tablejump (temp, table_label));
10998
10999 /* If we are generating PIC code or if the table is PC-relative, the
11000 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11001 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11002 emit_barrier ();
11003 }
11004
11005 int
11006 try_tablejump (index_type, index_expr, minval, range,
11007 table_label, default_label)
11008 tree index_type, index_expr, minval, range;
11009 rtx table_label, default_label;
11010 {
11011 rtx index;
11012
11013 if (! HAVE_tablejump)
11014 return 0;
11015
11016 index_expr = fold (build (MINUS_EXPR, index_type,
11017 convert (index_type, index_expr),
11018 convert (index_type, minval)));
11019 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11020 emit_queue ();
11021 index = protect_from_queue (index, 0);
11022 do_pending_stack_adjust ();
11023
11024 do_tablejump (index, TYPE_MODE (index_type),
11025 convert_modes (TYPE_MODE (index_type),
11026 TYPE_MODE (TREE_TYPE (range)),
11027 expand_expr (range, NULL_RTX,
11028 VOIDmode, 0),
11029 TREE_UNSIGNED (TREE_TYPE (range))),
11030 table_label, default_label);
11031 return 1;
11032 }
11033
11034 /* Nonzero if the mode is a valid vector mode for this architecture.
11035 This returns nonzero even if there is no hardware support for the
11036 vector mode, but we can emulate with narrower modes. */
11037
11038 int
11039 vector_mode_valid_p (mode)
11040 enum machine_mode mode;
11041 {
11042 enum mode_class class = GET_MODE_CLASS (mode);
11043 enum machine_mode innermode;
11044
11045 /* Doh! What's going on? */
11046 if (class != MODE_VECTOR_INT
11047 && class != MODE_VECTOR_FLOAT)
11048 return 0;
11049
11050 /* Hardware support. Woo hoo! */
11051 if (VECTOR_MODE_SUPPORTED_P (mode))
11052 return 1;
11053
11054 innermode = GET_MODE_INNER (mode);
11055
11056 /* We should probably return 1 if requesting V4DI and we have no DI,
11057 but we have V2DI, but this is probably very unlikely. */
11058
11059 /* If we have support for the inner mode, we can safely emulate it.
11060 We may not have V2DI, but me can emulate with a pair of DIs. */
11061 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11062 }
11063
11064 #include "gt-expr.h"