1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
52 /* Supply a default definition for PUSH_ARGS. */
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls
= 1;
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage
;
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list
= 0;
109 /* This structure is used by move_by_pieces to describe the move to
111 struct move_by_pieces
120 int explicit_inc_from
;
121 unsigned HOST_WIDE_INT len
;
122 HOST_WIDE_INT offset
;
126 /* This structure is used by clear_by_pieces to describe the clear to
129 struct clear_by_pieces
135 unsigned HOST_WIDE_INT len
;
136 HOST_WIDE_INT offset
;
140 extern struct obstack permanent_obstack
;
142 static rtx get_push_address
PARAMS ((int));
144 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
145 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
146 PARAMS ((unsigned HOST_WIDE_INT
,
148 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
149 struct move_by_pieces
*));
150 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
152 static void clear_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...),
154 struct clear_by_pieces
*));
155 static rtx get_subtarget
PARAMS ((rtx
));
156 static int is_zeros_p
PARAMS ((tree
));
157 static int mostly_zeros_p
PARAMS ((tree
));
158 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
159 HOST_WIDE_INT
, enum machine_mode
,
160 tree
, tree
, unsigned int, int));
161 static void store_constructor
PARAMS ((tree
, rtx
, unsigned int, int,
163 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
164 HOST_WIDE_INT
, enum machine_mode
,
165 tree
, enum machine_mode
, int,
166 unsigned int, HOST_WIDE_INT
, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
169 static tree save_noncopied_parts
PARAMS ((tree
, tree
));
170 static tree init_noncopied_parts
PARAMS ((tree
, tree
));
171 static int safe_from_p
PARAMS ((rtx
, tree
, int));
172 static int fixed_type_p
PARAMS ((tree
));
173 static rtx var_rtx
PARAMS ((tree
));
174 static int readonly_fields_p
PARAMS ((tree
));
175 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
176 static rtx expand_increment
PARAMS ((tree
, int, int));
177 static void preexpand_calls
PARAMS ((tree
));
178 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
179 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
180 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
182 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
188 static char direct_load
[NUM_MACHINE_MODES
];
189 static char direct_store
[NUM_MACHINE_MODES
];
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
210 /* This array records the insn_code of insns to perform block moves. */
211 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
213 /* This array records the insn_code of insns to perform block clears. */
214 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
229 enum machine_mode mode
;
236 /* Since we are on the permanent obstack, we must be sure we save this
237 spot AFTER we call start_sequence, since it will reuse the rtl it
239 free_point
= (char *) oballoc (0);
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
245 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
247 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
248 pat
= PATTERN (insn
);
250 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
251 mode
= (enum machine_mode
) ((int) mode
+ 1))
256 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
257 PUT_MODE (mem
, mode
);
258 PUT_MODE (mem1
, mode
);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
264 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
265 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
268 if (! HARD_REGNO_MODE_OK (regno
, mode
))
271 reg
= gen_rtx_REG (mode
, regno
);
274 SET_DEST (pat
) = reg
;
275 if (recog (pat
, insn
, &num_clobbers
) >= 0)
276 direct_load
[(int) mode
] = 1;
278 SET_SRC (pat
) = mem1
;
279 SET_DEST (pat
) = reg
;
280 if (recog (pat
, insn
, &num_clobbers
) >= 0)
281 direct_load
[(int) mode
] = 1;
284 SET_DEST (pat
) = mem
;
285 if (recog (pat
, insn
, &num_clobbers
) >= 0)
286 direct_store
[(int) mode
] = 1;
289 SET_DEST (pat
) = mem1
;
290 if (recog (pat
, insn
, &num_clobbers
) >= 0)
291 direct_store
[(int) mode
] = 1;
299 /* This is run at the start of compiling a function. */
304 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
307 pending_stack_adjust
= 0;
308 stack_pointer_delta
= 0;
309 inhibit_defer_pop
= 0;
311 apply_args_value
= 0;
317 struct expr_status
*p
;
322 ggc_mark_rtx (p
->x_saveregs_value
);
323 ggc_mark_rtx (p
->x_apply_args_value
);
324 ggc_mark_rtx (p
->x_forced_labels
);
335 /* Small sanity check that the queue is empty at the end of a function. */
338 finish_expr_for_function ()
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
355 enqueue_insn (var
, body
)
358 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
359 body
, pending_chain
);
360 return pending_chain
;
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
379 protect_from_queue (x
, modify
)
383 register RTX_CODE code
= GET_CODE (x
);
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain
== 0)
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
398 if (code
== MEM
&& GET_MODE (x
) != BLKmode
399 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
401 register rtx y
= XEXP (x
, 0);
402 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
404 MEM_COPY_ATTRIBUTES (new, x
);
408 register rtx temp
= gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp
, new),
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
419 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
420 if (tem
!= XEXP (x
, 0))
426 else if (code
== PLUS
|| code
== MULT
)
428 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
429 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
430 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x
) == 0)
441 return QUEUED_VAR (x
);
442 /* If the increment has happened and a pre-increment copy exists,
444 if (QUEUED_COPY (x
) != 0)
445 return QUEUED_COPY (x
);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
451 return QUEUED_COPY (x
);
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
463 register enum rtx_code code
= GET_CODE (x
);
469 return queued_subexp_p (XEXP (x
, 0));
473 return (queued_subexp_p (XEXP (x
, 0))
474 || queued_subexp_p (XEXP (x
, 1)));
480 /* Perform all the pending incrementations. */
486 while ((p
= pending_chain
))
488 rtx body
= QUEUED_BODY (p
);
490 if (GET_CODE (body
) == SEQUENCE
)
492 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
493 emit_insn (QUEUED_BODY (p
));
496 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
497 pending_chain
= QUEUED_NEXT (p
);
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
507 convert_move (to
, from
, unsignedp
)
508 register rtx to
, from
;
511 enum machine_mode to_mode
= GET_MODE (to
);
512 enum machine_mode from_mode
= GET_MODE (from
);
513 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
514 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
521 to
= protect_from_queue (to
, 1);
522 from
= protect_from_queue (from
, 0);
524 if (to_real
!= from_real
)
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
531 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
533 >= GET_MODE_SIZE (to_mode
))
534 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
535 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
537 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
540 if (to_mode
== from_mode
541 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
543 emit_move_insn (to
, from
);
547 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
549 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
552 if (VECTOR_MODE_P (to_mode
))
553 from
= gen_rtx_SUBREG (to_mode
, from
, 0);
555 to
= gen_rtx_SUBREG (from_mode
, to
, 0);
557 emit_move_insn (to
, from
);
561 if (to_real
!= from_real
)
568 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
570 /* Try converting directly if the insn is supported. */
571 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
574 emit_unop_insn (code
, to
, from
, UNKNOWN
);
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
582 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
589 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
596 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
603 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
610 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
617 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
625 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
632 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
639 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
646 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
653 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
661 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
668 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
675 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
682 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
690 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
697 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
704 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
711 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
718 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
730 libcall
= extendsfdf2_libfunc
;
734 libcall
= extendsfxf2_libfunc
;
738 libcall
= extendsftf2_libfunc
;
750 libcall
= truncdfsf2_libfunc
;
754 libcall
= extenddfxf2_libfunc
;
758 libcall
= extenddftf2_libfunc
;
770 libcall
= truncxfsf2_libfunc
;
774 libcall
= truncxfdf2_libfunc
;
786 libcall
= trunctfsf2_libfunc
;
790 libcall
= trunctfdf2_libfunc
;
802 if (libcall
== (rtx
) 0)
803 /* This conversion is not implemented yet. */
806 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
808 emit_move_insn (to
, value
);
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
816 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
823 enum machine_mode lowpart_mode
;
824 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
826 /* Try converting directly if the insn is supported. */
827 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
835 from
= force_reg (from_mode
, from
);
836 emit_unop_insn (code
, to
, from
, equiv_code
);
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
841 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
842 != CODE_FOR_nothing
))
844 if (GET_CODE (to
) == REG
)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
846 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
847 emit_unop_insn (code
, to
,
848 gen_lowpart (word_mode
, to
), equiv_code
);
852 /* No special multiword conversion insn; do it by hand. */
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to
, from
))
859 from
= force_reg (from_mode
, from
);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
863 lowpart_mode
= word_mode
;
865 lowpart_mode
= from_mode
;
867 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
869 lowpart
= gen_lowpart (lowpart_mode
, to
);
870 emit_move_insn (lowpart
, lowfrom
);
872 /* Compute the value to put in each remaining word. */
874 fill_value
= const0_rtx
;
879 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
880 && STORE_FLAG_VALUE
== -1)
882 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
884 fill_value
= gen_reg_rtx (word_mode
);
885 emit_insn (gen_slt (fill_value
));
891 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
892 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
894 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
898 /* Fill the remaining words. */
899 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
901 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
902 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
907 if (fill_value
!= subword
)
908 emit_move_insn (subword
, fill_value
);
911 insns
= get_insns ();
914 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
915 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
923 if (!((GET_CODE (from
) == MEM
924 && ! MEM_VOLATILE_P (from
)
925 && direct_load
[(int) to_mode
]
926 && ! mode_dependent_address_p (XEXP (from
, 0)))
927 || GET_CODE (from
) == REG
928 || GET_CODE (from
) == SUBREG
))
929 from
= force_reg (from_mode
, from
);
930 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode
== PQImode
)
937 if (from_mode
!= QImode
)
938 from
= convert_to_mode (QImode
, from
, unsignedp
);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2
)
943 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
946 #endif /* HAVE_truncqipqi2 */
950 if (from_mode
== PQImode
)
952 if (to_mode
!= QImode
)
954 from
= convert_to_mode (QImode
, from
, unsignedp
);
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2
)
962 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
965 #endif /* HAVE_extendpqiqi2 */
970 if (to_mode
== PSImode
)
972 if (from_mode
!= SImode
)
973 from
= convert_to_mode (SImode
, from
, unsignedp
);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2
)
978 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
981 #endif /* HAVE_truncsipsi2 */
985 if (from_mode
== PSImode
)
987 if (to_mode
!= SImode
)
989 from
= convert_to_mode (SImode
, from
, unsignedp
);
994 #ifdef HAVE_extendpsisi2
995 if (HAVE_extendpsisi2
)
997 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1000 #endif /* HAVE_extendpsisi2 */
1005 if (to_mode
== PDImode
)
1007 if (from_mode
!= DImode
)
1008 from
= convert_to_mode (DImode
, from
, unsignedp
);
1010 #ifdef HAVE_truncdipdi2
1011 if (HAVE_truncdipdi2
)
1013 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1016 #endif /* HAVE_truncdipdi2 */
1020 if (from_mode
== PDImode
)
1022 if (to_mode
!= DImode
)
1024 from
= convert_to_mode (DImode
, from
, unsignedp
);
1029 #ifdef HAVE_extendpdidi2
1030 if (HAVE_extendpdidi2
)
1032 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1035 #endif /* HAVE_extendpdidi2 */
1040 /* Now follow all the conversions between integers
1041 no more than a word long. */
1043 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1044 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1045 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1046 GET_MODE_BITSIZE (from_mode
)))
1048 if (!((GET_CODE (from
) == MEM
1049 && ! MEM_VOLATILE_P (from
)
1050 && direct_load
[(int) to_mode
]
1051 && ! mode_dependent_address_p (XEXP (from
, 0)))
1052 || GET_CODE (from
) == REG
1053 || GET_CODE (from
) == SUBREG
))
1054 from
= force_reg (from_mode
, from
);
1055 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1056 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1057 from
= copy_to_reg (from
);
1058 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1062 /* Handle extension. */
1063 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1065 /* Convert directly if that works. */
1066 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1067 != CODE_FOR_nothing
)
1069 emit_unop_insn (code
, to
, from
, equiv_code
);
1074 enum machine_mode intermediate
;
1078 /* Search for a mode to convert via. */
1079 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1080 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1081 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1082 != CODE_FOR_nothing
)
1083 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1084 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1085 GET_MODE_BITSIZE (intermediate
))))
1086 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1087 != CODE_FOR_nothing
))
1089 convert_move (to
, convert_to_mode (intermediate
, from
,
1090 unsignedp
), unsignedp
);
1094 /* No suitable intermediate mode.
1095 Generate what we need with shifts. */
1096 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1097 - GET_MODE_BITSIZE (from_mode
), 0);
1098 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1099 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1101 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1104 emit_move_insn (to
, tmp
);
1109 /* Support special truncate insns for certain modes. */
1111 if (from_mode
== DImode
&& to_mode
== SImode
)
1113 #ifdef HAVE_truncdisi2
1114 if (HAVE_truncdisi2
)
1116 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1120 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1124 if (from_mode
== DImode
&& to_mode
== HImode
)
1126 #ifdef HAVE_truncdihi2
1127 if (HAVE_truncdihi2
)
1129 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1133 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1137 if (from_mode
== DImode
&& to_mode
== QImode
)
1139 #ifdef HAVE_truncdiqi2
1140 if (HAVE_truncdiqi2
)
1142 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1146 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1150 if (from_mode
== SImode
&& to_mode
== HImode
)
1152 #ifdef HAVE_truncsihi2
1153 if (HAVE_truncsihi2
)
1155 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1159 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1163 if (from_mode
== SImode
&& to_mode
== QImode
)
1165 #ifdef HAVE_truncsiqi2
1166 if (HAVE_truncsiqi2
)
1168 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1172 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1176 if (from_mode
== HImode
&& to_mode
== QImode
)
1178 #ifdef HAVE_trunchiqi2
1179 if (HAVE_trunchiqi2
)
1181 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1185 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1189 if (from_mode
== TImode
&& to_mode
== DImode
)
1191 #ifdef HAVE_trunctidi2
1192 if (HAVE_trunctidi2
)
1194 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1198 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1202 if (from_mode
== TImode
&& to_mode
== SImode
)
1204 #ifdef HAVE_trunctisi2
1205 if (HAVE_trunctisi2
)
1207 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1211 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1215 if (from_mode
== TImode
&& to_mode
== HImode
)
1217 #ifdef HAVE_trunctihi2
1218 if (HAVE_trunctihi2
)
1220 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1224 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1228 if (from_mode
== TImode
&& to_mode
== QImode
)
1230 #ifdef HAVE_trunctiqi2
1231 if (HAVE_trunctiqi2
)
1233 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1237 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1241 /* Handle truncation of volatile memrefs, and so on;
1242 the things that couldn't be truncated directly,
1243 and for which there was no special instruction. */
1244 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1246 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1247 emit_move_insn (to
, temp
);
1251 /* Mode combination is not recognized. */
1255 /* Return an rtx for a value that would result
1256 from converting X to mode MODE.
1257 Both X and MODE may be floating, or both integer.
1258 UNSIGNEDP is nonzero if X is an unsigned value.
1259 This can be done by referring to a part of X in place
1260 or by copying to a new temporary with conversion.
1262 This function *must not* call protect_from_queue
1263 except when putting X into an insn (in which case convert_move does it). */
1266 convert_to_mode (mode
, x
, unsignedp
)
1267 enum machine_mode mode
;
1271 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1274 /* Return an rtx for a value that would result
1275 from converting X from mode OLDMODE to mode MODE.
1276 Both modes may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1282 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1284 This function *must not* call protect_from_queue
1285 except when putting X into an insn (in which case convert_move does it). */
1288 convert_modes (mode
, oldmode
, x
, unsignedp
)
1289 enum machine_mode mode
, oldmode
;
1295 /* If FROM is a SUBREG that indicates that we have already done at least
1296 the required extension, strip it. */
1298 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1299 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1300 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1301 x
= gen_lowpart (mode
, x
);
1303 if (GET_MODE (x
) != VOIDmode
)
1304 oldmode
= GET_MODE (x
);
1306 if (mode
== oldmode
)
1309 /* There is one case that we must handle specially: If we are converting
1310 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1311 we are to interpret the constant as unsigned, gen_lowpart will do
1312 the wrong if the constant appears negative. What we want to do is
1313 make the high-order word of the constant zero, not all ones. */
1315 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1316 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1317 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1319 HOST_WIDE_INT val
= INTVAL (x
);
1321 if (oldmode
!= VOIDmode
1322 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1324 int width
= GET_MODE_BITSIZE (oldmode
);
1326 /* We need to zero extend VAL. */
1327 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1330 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1338 if ((GET_CODE (x
) == CONST_INT
1339 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1340 || (GET_MODE_CLASS (mode
) == MODE_INT
1341 && GET_MODE_CLASS (oldmode
) == MODE_INT
1342 && (GET_CODE (x
) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1344 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1345 && direct_load
[(int) mode
])
1346 || (GET_CODE (x
) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1348 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1354 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1356 HOST_WIDE_INT val
= INTVAL (x
);
1357 int width
= GET_MODE_BITSIZE (oldmode
);
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1363 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1364 val
|= (HOST_WIDE_INT
) (-1) << width
;
1366 return GEN_INT (val
);
1369 return gen_lowpart (mode
, x
);
1372 temp
= gen_reg_rtx (mode
);
1373 convert_move (temp
, x
, unsignedp
);
1377 /* This macro is used to determine what the largest unit size that
1378 move_by_pieces can use is. */
1380 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1381 move efficiently, as opposed to MOVE_MAX which is the maximum
1382 number of bytes we can move with a single instruction. */
1384 #ifndef MOVE_MAX_PIECES
1385 #define MOVE_MAX_PIECES MOVE_MAX
1388 /* Generate several move instructions to copy LEN bytes
1389 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1390 The caller must pass FROM and TO
1391 through protect_from_queue before calling.
1392 ALIGN is maximum alignment we can assume. */
1395 move_by_pieces (to
, from
, len
, align
)
1397 unsigned HOST_WIDE_INT len
;
1400 struct move_by_pieces data
;
1401 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1402 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1403 enum machine_mode mode
= VOIDmode
, tmode
;
1404 enum insn_code icode
;
1407 data
.to_addr
= to_addr
;
1408 data
.from_addr
= from_addr
;
1412 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1413 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1415 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1416 || GET_CODE (from_addr
) == POST_INC
1417 || GET_CODE (from_addr
) == POST_DEC
);
1419 data
.explicit_inc_from
= 0;
1420 data
.explicit_inc_to
= 0;
1422 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1423 if (data
.reverse
) data
.offset
= len
;
1426 /* If copying requires more than two move insns,
1427 copy addresses to registers (to make displacements shorter)
1428 and use post-increment if available. */
1429 if (!(data
.autinc_from
&& data
.autinc_to
)
1430 && move_by_pieces_ninsns (len
, align
) > 2)
1432 /* Find the mode of the largest move... */
1433 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1434 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1435 if (GET_MODE_SIZE (tmode
) < max_size
)
1438 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1440 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1441 data
.autinc_from
= 1;
1442 data
.explicit_inc_from
= -1;
1444 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1446 data
.from_addr
= copy_addr_to_reg (from_addr
);
1447 data
.autinc_from
= 1;
1448 data
.explicit_inc_from
= 1;
1450 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1451 data
.from_addr
= copy_addr_to_reg (from_addr
);
1452 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1454 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1456 data
.explicit_inc_to
= -1;
1458 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1460 data
.to_addr
= copy_addr_to_reg (to_addr
);
1462 data
.explicit_inc_to
= 1;
1464 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1465 data
.to_addr
= copy_addr_to_reg (to_addr
);
1468 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1469 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1470 align
= MOVE_MAX
* BITS_PER_UNIT
;
1472 /* First move what we can in the largest integer mode, then go to
1473 successively smaller modes. */
1475 while (max_size
> 1)
1477 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1478 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1479 if (GET_MODE_SIZE (tmode
) < max_size
)
1482 if (mode
== VOIDmode
)
1485 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1486 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1487 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1489 max_size
= GET_MODE_SIZE (mode
);
1492 /* The code above should have handled everything. */
1497 /* Return number of insns required to move L bytes by pieces.
1498 ALIGN (in bytes) is maximum alignment we can assume. */
1500 static unsigned HOST_WIDE_INT
1501 move_by_pieces_ninsns (l
, align
)
1502 unsigned HOST_WIDE_INT l
;
1505 unsigned HOST_WIDE_INT n_insns
= 0;
1506 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1509 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1510 align
= MOVE_MAX
* BITS_PER_UNIT
;
1512 while (max_size
> 1)
1514 enum machine_mode mode
= VOIDmode
, tmode
;
1515 enum insn_code icode
;
1517 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1518 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1519 if (GET_MODE_SIZE (tmode
) < max_size
)
1522 if (mode
== VOIDmode
)
1525 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1526 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1527 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1529 max_size
= GET_MODE_SIZE (mode
);
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1540 move_by_pieces_1 (genfun
, mode
, data
)
1541 rtx (*genfun
) PARAMS ((rtx
, ...));
1542 enum machine_mode mode
;
1543 struct move_by_pieces
*data
;
1545 unsigned int size
= GET_MODE_SIZE (mode
);
1548 while (data
->len
>= size
)
1551 data
->offset
-= size
;
1553 if (data
->autinc_to
)
1555 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
1556 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
1559 to1
= change_address (data
->to
, mode
,
1560 plus_constant (data
->to_addr
, data
->offset
));
1562 if (data
->autinc_from
)
1564 from1
= gen_rtx_MEM (mode
, data
->from_addr
);
1565 MEM_COPY_ATTRIBUTES (from1
, data
->from
);
1568 from1
= change_address (data
->from
, mode
,
1569 plus_constant (data
->from_addr
, data
->offset
));
1571 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1572 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1573 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1574 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1576 emit_insn ((*genfun
) (to1
, from1
));
1578 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1579 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1580 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1581 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1583 if (! data
->reverse
)
1584 data
->offset
+= size
;
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have.
1599 Return the address of the new block, if memcpy is called and returns it,
1603 emit_block_move (x
, y
, size
, align
)
1609 #ifdef TARGET_MEM_FUNCTIONS
1611 tree call_expr
, arg_list
;
1614 if (GET_MODE (x
) != BLKmode
)
1617 if (GET_MODE (y
) != BLKmode
)
1620 x
= protect_from_queue (x
, 1);
1621 y
= protect_from_queue (y
, 0);
1622 size
= protect_from_queue (size
, 0);
1624 if (GET_CODE (x
) != MEM
)
1626 if (GET_CODE (y
) != MEM
)
1631 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1632 move_by_pieces (x
, y
, INTVAL (size
), align
);
1635 /* Try the most limited insn first, because there's no point
1636 including more than one in the machine description unless
1637 the more limited one has some advantage. */
1639 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1640 enum machine_mode mode
;
1642 /* Since this is a move insn, we don't care about volatility. */
1645 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1646 mode
= GET_MODE_WIDER_MODE (mode
))
1648 enum insn_code code
= movstr_optab
[(int) mode
];
1649 insn_operand_predicate_fn pred
;
1651 if (code
!= CODE_FOR_nothing
1652 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1653 here because if SIZE is less than the mode mask, as it is
1654 returned by the macro, it will definitely be less than the
1655 actual mode mask. */
1656 && ((GET_CODE (size
) == CONST_INT
1657 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1658 <= (GET_MODE_MASK (mode
) >> 1)))
1659 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1660 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1661 || (*pred
) (x
, BLKmode
))
1662 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1663 || (*pred
) (y
, BLKmode
))
1664 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1665 || (*pred
) (opalign
, VOIDmode
)))
1668 rtx last
= get_last_insn ();
1671 op2
= convert_to_mode (mode
, size
, 1);
1672 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1673 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1674 op2
= copy_to_mode_reg (mode
, op2
);
1676 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1684 delete_insns_since (last
);
1690 /* X, Y, or SIZE may have been passed through protect_from_queue.
1692 It is unsafe to save the value generated by protect_from_queue
1693 and reuse it later. Consider what happens if emit_queue is
1694 called before the return value from protect_from_queue is used.
1696 Expansion of the CALL_EXPR below will call emit_queue before
1697 we are finished emitting RTL for argument setup. So if we are
1698 not careful we could get the wrong value for an argument.
1700 To avoid this problem we go ahead and emit code to copy X, Y &
1701 SIZE into new pseudos. We can then place those new pseudos
1702 into an RTL_EXPR and use them later, even after a call to
1705 Note this is not strictly needed for library calls since they
1706 do not call emit_queue before loading their arguments. However,
1707 we may need to have library calls call emit_queue in the future
1708 since failing to do so could cause problems for targets which
1709 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1710 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1711 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1716 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1717 TREE_UNSIGNED (integer_type_node
));
1718 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1721 #ifdef TARGET_MEM_FUNCTIONS
1722 /* It is incorrect to use the libcall calling conventions to call
1723 memcpy in this context.
1725 This could be a user call to memcpy and the user may wish to
1726 examine the return value from memcpy.
1728 For targets where libcalls and normal calls have different conventions
1729 for returning pointers, we could end up generating incorrect code.
1731 So instead of using a libcall sequence we build up a suitable
1732 CALL_EXPR and expand the call in the normal fashion. */
1733 if (fn
== NULL_TREE
)
1737 /* This was copied from except.c, I don't know if all this is
1738 necessary in this context or not. */
1739 fn
= get_identifier ("memcpy");
1740 push_obstacks_nochange ();
1741 end_temporary_allocation ();
1742 fntype
= build_pointer_type (void_type_node
);
1743 fntype
= build_function_type (fntype
, NULL_TREE
);
1744 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1745 ggc_add_tree_root (&fn
, 1);
1746 DECL_EXTERNAL (fn
) = 1;
1747 TREE_PUBLIC (fn
) = 1;
1748 DECL_ARTIFICIAL (fn
) = 1;
1749 make_decl_rtl (fn
, NULL_PTR
, 1);
1750 assemble_external (fn
);
1754 /* We need to make an argument list for the function call.
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1759 = build_tree_list (NULL_TREE
,
1760 make_tree (build_pointer_type (void_type_node
), x
));
1761 TREE_CHAIN (arg_list
)
1762 = build_tree_list (NULL_TREE
,
1763 make_tree (build_pointer_type (void_type_node
), y
));
1764 TREE_CHAIN (TREE_CHAIN (arg_list
))
1765 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1770 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1771 call_expr
, arg_list
, NULL_TREE
);
1772 TREE_SIDE_EFFECTS (call_expr
) = 1;
1774 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1776 emit_library_call (bcopy_libfunc
, 0,
1777 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1778 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1779 TREE_UNSIGNED (integer_type_node
)),
1780 TYPE_MODE (integer_type_node
));
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1791 move_block_to_reg (regno
, x
, nregs
, mode
)
1795 enum machine_mode mode
;
1798 #ifdef HAVE_load_multiple
1806 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1807 x
= validize_mem (force_const_mem (mode
, x
));
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple
)
1813 last
= get_last_insn ();
1814 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1822 delete_insns_since (last
);
1826 for (i
= 0; i
< nregs
; i
++)
1827 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1828 operand_subword_force (x
, i
, mode
));
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1836 move_block_from_reg (regno
, x
, nregs
, size
)
1843 #ifdef HAVE_store_multiple
1847 enum machine_mode mode
;
1849 /* If SIZE is that of a mode no bigger than a word, just use that
1850 mode's store operation. */
1851 if (size
<= UNITS_PER_WORD
1852 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1854 emit_move_insn (change_address (x
, mode
, NULL
),
1855 gen_rtx_REG (mode
, regno
));
1859 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1860 to the left before storing to memory. Note that the previous test
1861 doesn't handle all cases (e.g. SIZE == 3). */
1862 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1864 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1870 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1871 gen_rtx_REG (word_mode
, regno
),
1872 build_int_2 ((UNITS_PER_WORD
- size
)
1873 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1874 emit_move_insn (tem
, shift
);
1878 /* See if the machine can do this with a store multiple insn. */
1879 #ifdef HAVE_store_multiple
1880 if (HAVE_store_multiple
)
1882 last
= get_last_insn ();
1883 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1891 delete_insns_since (last
);
1895 for (i
= 0; i
< nregs
; i
++)
1897 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1902 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1906 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1907 registers represented by a PARALLEL. SSIZE represents the total size of
1908 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1910 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1911 the balance will be in what would be the low-order memory addresses, i.e.
1912 left justified for big endian, right justified for little endian. This
1913 happens to be true for the targets currently using this support. If this
1914 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1918 emit_group_load (dst
, orig_src
, ssize
, align
)
1926 if (GET_CODE (dst
) != PARALLEL
)
1929 /* Check for a NULL entry, used to indicate that the parameter goes
1930 both on the stack and in registers. */
1931 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1936 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1938 /* If we won't be loading directly from memory, protect the real source
1939 from strange tricks we might play. */
1941 if (GET_CODE (src
) != MEM
&& ! CONSTANT_P (src
))
1943 if (GET_MODE (src
) == VOIDmode
)
1944 src
= gen_reg_rtx (GET_MODE (dst
));
1946 src
= gen_reg_rtx (GET_MODE (orig_src
));
1947 emit_move_insn (src
, orig_src
);
1950 /* Process the pieces. */
1951 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1953 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1954 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1955 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1961 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1962 bytelen
= ssize
- bytepos
;
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src
) == MEM
1969 && align
>= GET_MODE_ALIGNMENT (mode
)
1970 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1971 && bytelen
== GET_MODE_SIZE (mode
))
1973 tmps
[i
] = gen_reg_rtx (mode
);
1974 emit_move_insn (tmps
[i
],
1975 change_address (src
, mode
,
1976 plus_constant (XEXP (src
, 0),
1979 else if (GET_CODE (src
) == CONCAT
)
1982 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
1983 tmps
[i
] = XEXP (src
, 0);
1984 else if (bytepos
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
1985 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
1986 tmps
[i
] = XEXP (src
, 1);
1990 else if ((CONSTANT_P (src
)
1991 && (GET_MODE (src
) == VOIDmode
|| GET_MODE (src
) == mode
))
1992 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1995 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1996 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1997 mode
, mode
, align
, ssize
);
1999 if (BYTES_BIG_ENDIAN
&& shift
)
2000 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2001 tmps
[i
], 0, OPTAB_WIDEN
);
2006 /* Copy the extracted pieces into the proper (probable) hard regs. */
2007 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2008 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2011 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2012 registers represented by a PARALLEL. SSIZE represents the total size of
2013 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2016 emit_group_store (orig_dst
, src
, ssize
, align
)
2024 if (GET_CODE (src
) != PARALLEL
)
2027 /* Check for a NULL entry, used to indicate that the parameter goes
2028 both on the stack and in registers. */
2029 if (XEXP (XVECEXP (src
, 0, 0), 0))
2034 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2036 /* Copy the (probable) hard regs into pseudos. */
2037 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2039 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2040 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2041 emit_move_insn (tmps
[i
], reg
);
2045 /* If we won't be storing directly into memory, protect the real destination
2046 from strange tricks we might play. */
2048 if (GET_CODE (dst
) == PARALLEL
)
2052 /* We can get a PARALLEL dst if there is a conditional expression in
2053 a return statement. In that case, the dst and src are the same,
2054 so no action is necessary. */
2055 if (rtx_equal_p (dst
, src
))
2058 /* It is unclear if we can ever reach here, but we may as well handle
2059 it. Allocate a temporary, and split this into a store/load to/from
2062 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2063 emit_group_store (temp
, src
, ssize
, align
);
2064 emit_group_load (dst
, temp
, ssize
, align
);
2067 else if (GET_CODE (dst
) != MEM
)
2069 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2070 /* Make life a bit easier for combine. */
2071 emit_move_insn (dst
, const0_rtx
);
2074 /* Process the pieces. */
2075 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2077 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2078 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2079 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2081 /* Handle trailing fragments that run over the size of the struct. */
2082 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2084 if (BYTES_BIG_ENDIAN
)
2086 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2087 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2088 tmps
[i
], 0, OPTAB_WIDEN
);
2090 bytelen
= ssize
- bytepos
;
2093 /* Optimize the access just a bit. */
2094 if (GET_CODE (dst
) == MEM
2095 && align
>= GET_MODE_ALIGNMENT (mode
)
2096 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2097 && bytelen
== GET_MODE_SIZE (mode
))
2098 emit_move_insn (change_address (dst
, mode
,
2099 plus_constant (XEXP (dst
, 0),
2103 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2104 mode
, tmps
[i
], align
, ssize
);
2109 /* Copy from the pseudo into the (probable) hard reg. */
2110 if (GET_CODE (dst
) == REG
)
2111 emit_move_insn (orig_dst
, dst
);
2114 /* Generate code to copy a BLKmode object of TYPE out of a
2115 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2116 is null, a stack temporary is created. TGTBLK is returned.
2118 The primary purpose of this routine is to handle functions
2119 that return BLKmode structures in registers. Some machines
2120 (the PA for example) want to return all small structures
2121 in registers regardless of the structure's alignment. */
2124 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2129 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2130 rtx src
= NULL
, dst
= NULL
;
2131 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2132 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2136 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2137 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2138 preserve_temp_slots (tgtblk
);
2141 /* This code assumes srcreg is at least a full word. If it isn't,
2142 copy it into a new pseudo which is a full word. */
2143 if (GET_MODE (srcreg
) != BLKmode
2144 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2145 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2152 big_endian_correction
2153 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2155 /* Copy the structure BITSIZE bites at a time.
2157 We could probably emit more efficient code for machines which do not use
2158 strict alignment, but it doesn't seem worth the effort at the current
2160 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2161 bitpos
< bytes
* BITS_PER_UNIT
;
2162 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2164 /* We need a new source operand each time xbitpos is on a
2165 word boundary and when xbitpos == big_endian_correction
2166 (the first time through). */
2167 if (xbitpos
% BITS_PER_WORD
== 0
2168 || xbitpos
== big_endian_correction
)
2169 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, BLKmode
);
2171 /* We need a new destination operand each time bitpos is on
2173 if (bitpos
% BITS_PER_WORD
== 0)
2174 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2176 /* Use xbitpos for the source extraction (right justified) and
2177 xbitpos for the destination store (left justified). */
2178 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2179 extract_bit_field (src
, bitsize
,
2180 xbitpos
% BITS_PER_WORD
, 1,
2181 NULL_RTX
, word_mode
, word_mode
,
2182 bitsize
, BITS_PER_WORD
),
2183 bitsize
, BITS_PER_WORD
);
2189 /* Add a USE expression for REG to the (possibly empty) list pointed
2190 to by CALL_FUSAGE. REG must denote a hard register. */
2193 use_reg (call_fusage
, reg
)
2194 rtx
*call_fusage
, reg
;
2196 if (GET_CODE (reg
) != REG
2197 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2201 = gen_rtx_EXPR_LIST (VOIDmode
,
2202 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2205 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2206 starting at REGNO. All of these registers must be hard registers. */
2209 use_regs (call_fusage
, regno
, nregs
)
2216 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2219 for (i
= 0; i
< nregs
; i
++)
2220 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2223 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2224 PARALLEL REGS. This is for calls that pass values in multiple
2225 non-contiguous locations. The Irix 6 ABI has examples of this. */
2228 use_group_regs (call_fusage
, regs
)
2234 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2236 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2242 use_reg (call_fusage
, reg
);
2246 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2247 rtx with BLKmode). The caller must pass TO through protect_from_queue
2248 before calling. ALIGN is maximum alignment we can assume. */
2251 clear_by_pieces (to
, len
, align
)
2253 unsigned HOST_WIDE_INT len
;
2256 struct clear_by_pieces data
;
2257 rtx to_addr
= XEXP (to
, 0);
2258 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2259 enum machine_mode mode
= VOIDmode
, tmode
;
2260 enum insn_code icode
;
2263 data
.to_addr
= to_addr
;
2266 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2267 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2269 data
.explicit_inc_to
= 0;
2271 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2276 /* If copying requires more than two move insns,
2277 copy addresses to registers (to make displacements shorter)
2278 and use post-increment if available. */
2280 && move_by_pieces_ninsns (len
, align
) > 2)
2282 /* Determine the main mode we'll be using. */
2283 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2284 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2285 if (GET_MODE_SIZE (tmode
) < max_size
)
2288 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2290 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2292 data
.explicit_inc_to
= -1;
2295 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
2296 && ! data
.autinc_to
)
2298 data
.to_addr
= copy_addr_to_reg (to_addr
);
2300 data
.explicit_inc_to
= 1;
2303 if ( !data
.autinc_to
&& CONSTANT_P (to_addr
))
2304 data
.to_addr
= copy_addr_to_reg (to_addr
);
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2308 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2309 align
= MOVE_MAX
* BITS_PER_UNIT
;
2311 /* First move what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2314 while (max_size
> 1)
2316 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2317 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2318 if (GET_MODE_SIZE (tmode
) < max_size
)
2321 if (mode
== VOIDmode
)
2324 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2325 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2326 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2328 max_size
= GET_MODE_SIZE (mode
);
2331 /* The code above should have handled everything. */
2336 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2337 with move instructions for mode MODE. GENFUN is the gen_... function
2338 to make a move insn for that mode. DATA has all the other info. */
2341 clear_by_pieces_1 (genfun
, mode
, data
)
2342 rtx (*genfun
) PARAMS ((rtx
, ...));
2343 enum machine_mode mode
;
2344 struct clear_by_pieces
*data
;
2346 unsigned int size
= GET_MODE_SIZE (mode
);
2349 while (data
->len
>= size
)
2352 data
->offset
-= size
;
2354 if (data
->autinc_to
)
2356 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
2357 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
2360 to1
= change_address (data
->to
, mode
,
2361 plus_constant (data
->to_addr
, data
->offset
));
2363 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2364 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2366 emit_insn ((*genfun
) (to1
, const0_rtx
));
2368 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2369 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2371 if (! data
->reverse
)
2372 data
->offset
+= size
;
2378 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2379 its length in bytes and ALIGN is the maximum alignment we can is has.
2381 If we call a function that returns the length of the block, return it. */
2384 clear_storage (object
, size
, align
)
2389 #ifdef TARGET_MEM_FUNCTIONS
2391 tree call_expr
, arg_list
;
2395 if (GET_MODE (object
) == BLKmode
)
2397 object
= protect_from_queue (object
, 1);
2398 size
= protect_from_queue (size
, 0);
2400 if (GET_CODE (size
) == CONST_INT
2401 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2402 clear_by_pieces (object
, INTVAL (size
), align
);
2405 /* Try the most limited insn first, because there's no point
2406 including more than one in the machine description unless
2407 the more limited one has some advantage. */
2409 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2410 enum machine_mode mode
;
2412 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2413 mode
= GET_MODE_WIDER_MODE (mode
))
2415 enum insn_code code
= clrstr_optab
[(int) mode
];
2416 insn_operand_predicate_fn pred
;
2418 if (code
!= CODE_FOR_nothing
2419 /* We don't need MODE to be narrower than
2420 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2421 the mode mask, as it is returned by the macro, it will
2422 definitely be less than the actual mode mask. */
2423 && ((GET_CODE (size
) == CONST_INT
2424 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2425 <= (GET_MODE_MASK (mode
) >> 1)))
2426 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2427 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2428 || (*pred
) (object
, BLKmode
))
2429 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2430 || (*pred
) (opalign
, VOIDmode
)))
2433 rtx last
= get_last_insn ();
2436 op1
= convert_to_mode (mode
, size
, 1);
2437 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2438 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2439 op1
= copy_to_mode_reg (mode
, op1
);
2441 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2448 delete_insns_since (last
);
2452 /* OBJECT or SIZE may have been passed through protect_from_queue.
2454 It is unsafe to save the value generated by protect_from_queue
2455 and reuse it later. Consider what happens if emit_queue is
2456 called before the return value from protect_from_queue is used.
2458 Expansion of the CALL_EXPR below will call emit_queue before
2459 we are finished emitting RTL for argument setup. So if we are
2460 not careful we could get the wrong value for an argument.
2462 To avoid this problem we go ahead and emit code to copy OBJECT
2463 and SIZE into new pseudos. We can then place those new pseudos
2464 into an RTL_EXPR and use them later, even after a call to
2467 Note this is not strictly needed for library calls since they
2468 do not call emit_queue before loading their arguments. However,
2469 we may need to have library calls call emit_queue in the future
2470 since failing to do so could cause problems for targets which
2471 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2472 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2477 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2478 TREE_UNSIGNED (integer_type_node
));
2479 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2482 #ifdef TARGET_MEM_FUNCTIONS
2483 /* It is incorrect to use the libcall calling conventions to call
2484 memset in this context.
2486 This could be a user call to memset and the user may wish to
2487 examine the return value from memset.
2489 For targets where libcalls and normal calls have different
2490 conventions for returning pointers, we could end up generating
2493 So instead of using a libcall sequence we build up a suitable
2494 CALL_EXPR and expand the call in the normal fashion. */
2495 if (fn
== NULL_TREE
)
2499 /* This was copied from except.c, I don't know if all this is
2500 necessary in this context or not. */
2501 fn
= get_identifier ("memset");
2502 push_obstacks_nochange ();
2503 end_temporary_allocation ();
2504 fntype
= build_pointer_type (void_type_node
);
2505 fntype
= build_function_type (fntype
, NULL_TREE
);
2506 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2507 ggc_add_tree_root (&fn
, 1);
2508 DECL_EXTERNAL (fn
) = 1;
2509 TREE_PUBLIC (fn
) = 1;
2510 DECL_ARTIFICIAL (fn
) = 1;
2511 make_decl_rtl (fn
, NULL_PTR
, 1);
2512 assemble_external (fn
);
2516 /* We need to make an argument list for the function call.
2518 memset has three arguments, the first is a void * addresses, the
2519 second a integer with the initialization value, the last is a
2520 size_t byte count for the copy. */
2522 = build_tree_list (NULL_TREE
,
2523 make_tree (build_pointer_type (void_type_node
),
2525 TREE_CHAIN (arg_list
)
2526 = build_tree_list (NULL_TREE
,
2527 make_tree (integer_type_node
, const0_rtx
));
2528 TREE_CHAIN (TREE_CHAIN (arg_list
))
2529 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2530 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2532 /* Now we have to build up the CALL_EXPR itself. */
2533 call_expr
= build1 (ADDR_EXPR
,
2534 build_pointer_type (TREE_TYPE (fn
)), fn
);
2535 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2536 call_expr
, arg_list
, NULL_TREE
);
2537 TREE_SIDE_EFFECTS (call_expr
) = 1;
2539 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2541 emit_library_call (bzero_libfunc
, 0,
2542 VOIDmode
, 2, object
, Pmode
, size
,
2543 TYPE_MODE (integer_type_node
));
2548 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2553 /* Generate code to copy Y into X.
2554 Both Y and X must have the same mode, except that
2555 Y can be a constant with VOIDmode.
2556 This mode cannot be BLKmode; use emit_block_move for that.
2558 Return the last instruction emitted. */
2561 emit_move_insn (x
, y
)
2564 enum machine_mode mode
= GET_MODE (x
);
2566 x
= protect_from_queue (x
, 1);
2567 y
= protect_from_queue (y
, 0);
2569 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2572 /* Never force constant_p_rtx to memory. */
2573 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2575 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2576 y
= force_const_mem (mode
, y
);
2578 /* If X or Y are memory references, verify that their addresses are valid
2580 if (GET_CODE (x
) == MEM
2581 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2582 && ! push_operand (x
, GET_MODE (x
)))
2584 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2585 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2587 if (GET_CODE (y
) == MEM
2588 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2590 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2591 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2593 if (mode
== BLKmode
)
2596 return emit_move_insn_1 (x
, y
);
2599 /* Low level part of emit_move_insn.
2600 Called just like emit_move_insn, but assumes X and Y
2601 are basically valid. */
2604 emit_move_insn_1 (x
, y
)
2607 enum machine_mode mode
= GET_MODE (x
);
2608 enum machine_mode submode
;
2609 enum mode_class
class = GET_MODE_CLASS (mode
);
2612 if (mode
>= MAX_MACHINE_MODE
)
2615 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2617 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2619 /* Expand complex moves by moving real part and imag part, if possible. */
2620 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2621 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2623 (class == MODE_COMPLEX_INT
2624 ? MODE_INT
: MODE_FLOAT
),
2626 && (mov_optab
->handlers
[(int) submode
].insn_code
2627 != CODE_FOR_nothing
))
2629 /* Don't split destination if it is a stack push. */
2630 int stack
= push_operand (x
, GET_MODE (x
));
2632 /* If this is a stack, push the highpart first, so it
2633 will be in the argument order.
2635 In that case, change_address is used only to convert
2636 the mode, not to change the address. */
2639 /* Note that the real part always precedes the imag part in memory
2640 regardless of machine's endianness. */
2641 #ifdef STACK_GROWS_DOWNWARD
2642 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2643 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2644 gen_imagpart (submode
, y
)));
2645 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2646 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2647 gen_realpart (submode
, y
)));
2649 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2650 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2651 gen_realpart (submode
, y
)));
2652 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2653 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2654 gen_imagpart (submode
, y
)));
2659 rtx realpart_x
, realpart_y
;
2660 rtx imagpart_x
, imagpart_y
;
2662 /* If this is a complex value with each part being smaller than a
2663 word, the usual calling sequence will likely pack the pieces into
2664 a single register. Unfortunately, SUBREG of hard registers only
2665 deals in terms of words, so we have a problem converting input
2666 arguments to the CONCAT of two registers that is used elsewhere
2667 for complex values. If this is before reload, we can copy it into
2668 memory and reload. FIXME, we should see about using extract and
2669 insert on integer registers, but complex short and complex char
2670 variables should be rarely used. */
2671 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2672 && (reload_in_progress
| reload_completed
) == 0)
2674 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2675 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2677 if (packed_dest_p
|| packed_src_p
)
2679 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2680 ? MODE_FLOAT
: MODE_INT
);
2682 enum machine_mode reg_mode
=
2683 mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2685 if (reg_mode
!= BLKmode
)
2687 rtx mem
= assign_stack_temp (reg_mode
,
2688 GET_MODE_SIZE (mode
), 0);
2690 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2692 cfun
->cannot_inline
= N_("function using short complex types cannot be inline");
2696 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2697 emit_move_insn_1 (cmem
, y
);
2698 return emit_move_insn_1 (sreg
, mem
);
2702 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2703 emit_move_insn_1 (mem
, sreg
);
2704 return emit_move_insn_1 (x
, cmem
);
2710 realpart_x
= gen_realpart (submode
, x
);
2711 realpart_y
= gen_realpart (submode
, y
);
2712 imagpart_x
= gen_imagpart (submode
, x
);
2713 imagpart_y
= gen_imagpart (submode
, y
);
2715 /* Show the output dies here. This is necessary for SUBREGs
2716 of pseudos since we cannot track their lifetimes correctly;
2717 hard regs shouldn't appear here except as return values.
2718 We never want to emit such a clobber after reload. */
2720 && ! (reload_in_progress
|| reload_completed
)
2721 && (GET_CODE (realpart_x
) == SUBREG
2722 || GET_CODE (imagpart_x
) == SUBREG
))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2727 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2728 (realpart_x
, realpart_y
));
2729 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2730 (imagpart_x
, imagpart_y
));
2733 return get_last_insn ();
2736 /* This will handle any multi-word mode that lacks a move_insn pattern.
2737 However, you will get better code if you define such patterns,
2738 even if they must turn into multiple assembler instructions. */
2739 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2745 #ifdef PUSH_ROUNDING
2747 /* If X is a push on the stack, do the push now and replace
2748 X with a reference to the stack pointer. */
2749 if (push_operand (x
, GET_MODE (x
)))
2751 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2752 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2756 /* If we are in reload, see if either operand is a MEM whose address
2757 is scheduled for replacement. */
2758 if (reload_in_progress
&& GET_CODE (x
) == MEM
2759 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2761 rtx
new = gen_rtx_MEM (GET_MODE (x
), inner
);
2763 MEM_COPY_ATTRIBUTES (new, x
);
2766 if (reload_in_progress
&& GET_CODE (y
) == MEM
2767 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2769 rtx
new = gen_rtx_MEM (GET_MODE (y
), inner
);
2771 MEM_COPY_ATTRIBUTES (new, y
);
2779 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2782 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2783 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2785 /* If we can't get a part of Y, put Y into memory if it is a
2786 constant. Otherwise, force it into a register. If we still
2787 can't get a part of Y, abort. */
2788 if (ypart
== 0 && CONSTANT_P (y
))
2790 y
= force_const_mem (mode
, y
);
2791 ypart
= operand_subword (y
, i
, 1, mode
);
2793 else if (ypart
== 0)
2794 ypart
= operand_subword_force (y
, i
, mode
);
2796 if (xpart
== 0 || ypart
== 0)
2799 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2801 last_insn
= emit_move_insn (xpart
, ypart
);
2804 seq
= gen_sequence ();
2807 /* Show the output dies here. This is necessary for SUBREGs
2808 of pseudos since we cannot track their lifetimes correctly;
2809 hard regs shouldn't appear here except as return values.
2810 We never want to emit such a clobber after reload. */
2812 && ! (reload_in_progress
|| reload_completed
)
2813 && need_clobber
!= 0)
2815 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2826 /* Pushing data onto the stack. */
2828 /* Push a block of length SIZE (perhaps variable)
2829 and return an rtx to address the beginning of the block.
2830 Note that it is not possible for the value returned to be a QUEUED.
2831 The value may be virtual_outgoing_args_rtx.
2833 EXTRA is the number of bytes of padding to push in addition to SIZE.
2834 BELOW nonzero means this padding comes at low addresses;
2835 otherwise, the padding comes at high addresses. */
2838 push_block (size
, extra
, below
)
2844 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2845 if (CONSTANT_P (size
))
2846 anti_adjust_stack (plus_constant (size
, extra
));
2847 else if (GET_CODE (size
) == REG
&& extra
== 0)
2848 anti_adjust_stack (size
);
2851 temp
= copy_to_mode_reg (Pmode
, size
);
2853 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2854 temp
, 0, OPTAB_LIB_WIDEN
);
2855 anti_adjust_stack (temp
);
2858 #ifndef STACK_GROWS_DOWNWARD
2859 #ifdef ARGS_GROW_DOWNWARD
2860 if (!ACCUMULATE_OUTGOING_ARGS
)
2868 /* Return the lowest stack address when STACK or ARGS grow downward and
2869 we are not aaccumulating outgoing arguments (the c4x port uses such
2871 temp
= virtual_outgoing_args_rtx
;
2872 if (extra
!= 0 && below
)
2873 temp
= plus_constant (temp
, extra
);
2877 if (GET_CODE (size
) == CONST_INT
)
2878 temp
= plus_constant (virtual_outgoing_args_rtx
,
2879 -INTVAL (size
) - (below
? 0 : extra
));
2880 else if (extra
!= 0 && !below
)
2881 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2882 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2884 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2885 negate_rtx (Pmode
, size
));
2888 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2894 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2897 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2898 block of SIZE bytes. */
2901 get_push_address (size
)
2906 if (STACK_PUSH_CODE
== POST_DEC
)
2907 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2908 else if (STACK_PUSH_CODE
== POST_INC
)
2909 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2911 temp
= stack_pointer_rtx
;
2913 return copy_to_reg (temp
);
2916 /* Generate code to push X onto the stack, assuming it has mode MODE and
2918 MODE is redundant except when X is a CONST_INT (since they don't
2920 SIZE is an rtx for the size of data to be copied (in bytes),
2921 needed only if X is BLKmode.
2923 ALIGN is maximum alignment we can assume.
2925 If PARTIAL and REG are both nonzero, then copy that many of the first
2926 words of X into registers starting with REG, and push the rest of X.
2927 The amount of space pushed is decreased by PARTIAL words,
2928 rounded *down* to a multiple of PARM_BOUNDARY.
2929 REG must be a hard register in this case.
2930 If REG is zero but PARTIAL is not, take any all others actions for an
2931 argument partially in registers, but do not actually load any
2934 EXTRA is the amount in bytes of extra space to leave next to this arg.
2935 This is ignored if an argument block has already been allocated.
2937 On a machine that lacks real push insns, ARGS_ADDR is the address of
2938 the bottom of the argument block for this call. We use indexing off there
2939 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2940 argument block has not been preallocated.
2942 ARGS_SO_FAR is the size of args previously pushed for this call.
2944 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2945 for arguments passed in registers. If nonzero, it will be the number
2946 of bytes required. */
2949 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2950 args_addr
, args_so_far
, reg_parm_stack_space
,
2953 enum machine_mode mode
;
2962 int reg_parm_stack_space
;
2966 enum direction stack_direction
2967 #ifdef STACK_GROWS_DOWNWARD
2973 /* Decide where to pad the argument: `downward' for below,
2974 `upward' for above, or `none' for don't pad it.
2975 Default is below for small data on big-endian machines; else above. */
2976 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2978 /* Invert direction if stack is post-update. */
2979 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2980 if (where_pad
!= none
)
2981 where_pad
= (where_pad
== downward
? upward
: downward
);
2983 xinner
= x
= protect_from_queue (x
, 0);
2985 if (mode
== BLKmode
)
2987 /* Copy a block into the stack, entirely or partially. */
2990 int used
= partial
* UNITS_PER_WORD
;
2991 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2999 /* USED is now the # of bytes we need not copy to the stack
3000 because registers will take care of them. */
3003 xinner
= change_address (xinner
, BLKmode
,
3004 plus_constant (XEXP (xinner
, 0), used
));
3006 /* If the partial register-part of the arg counts in its stack size,
3007 skip the part of stack space corresponding to the registers.
3008 Otherwise, start copying to the beginning of the stack space,
3009 by setting SKIP to 0. */
3010 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3012 #ifdef PUSH_ROUNDING
3013 /* Do it with several push insns if that doesn't take lots of insns
3014 and if there is no difficulty with push insns that skip bytes
3015 on the stack for alignment purposes. */
3018 && GET_CODE (size
) == CONST_INT
3020 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3021 /* Here we avoid the case of a structure whose weak alignment
3022 forces many pushes of a small amount of data,
3023 and such small pushes do rounding that causes trouble. */
3024 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3025 || align
>= BIGGEST_ALIGNMENT
3026 || PUSH_ROUNDING (align
) == align
)
3027 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3029 /* Push padding now if padding above and stack grows down,
3030 or if padding below and stack grows up.
3031 But if space already allocated, this has already been done. */
3032 if (extra
&& args_addr
== 0
3033 && where_pad
!= none
&& where_pad
!= stack_direction
)
3034 anti_adjust_stack (GEN_INT (extra
));
3036 stack_pointer_delta
+= INTVAL (size
) - used
;
3037 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
3038 INTVAL (size
) - used
, align
);
3040 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3044 in_check_memory_usage
= 1;
3045 temp
= get_push_address (INTVAL (size
) - used
);
3046 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3047 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3049 XEXP (xinner
, 0), Pmode
,
3050 GEN_INT (INTVAL (size
) - used
),
3051 TYPE_MODE (sizetype
));
3053 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3055 GEN_INT (INTVAL (size
) - used
),
3056 TYPE_MODE (sizetype
),
3057 GEN_INT (MEMORY_USE_RW
),
3058 TYPE_MODE (integer_type_node
));
3059 in_check_memory_usage
= 0;
3063 #endif /* PUSH_ROUNDING */
3067 /* Otherwise make space on the stack and copy the data
3068 to the address of that space. */
3070 /* Deduct words put into registers from the size we must copy. */
3073 if (GET_CODE (size
) == CONST_INT
)
3074 size
= GEN_INT (INTVAL (size
) - used
);
3076 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3077 GEN_INT (used
), NULL_RTX
, 0,
3081 /* Get the address of the stack space.
3082 In this case, we do not deal with EXTRA separately.
3083 A single stack adjust will do. */
3086 temp
= push_block (size
, extra
, where_pad
== downward
);
3089 else if (GET_CODE (args_so_far
) == CONST_INT
)
3090 temp
= memory_address (BLKmode
,
3091 plus_constant (args_addr
,
3092 skip
+ INTVAL (args_so_far
)));
3094 temp
= memory_address (BLKmode
,
3095 plus_constant (gen_rtx_PLUS (Pmode
,
3099 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3101 in_check_memory_usage
= 1;
3102 target
= copy_to_reg (temp
);
3103 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3104 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3106 XEXP (xinner
, 0), Pmode
,
3107 size
, TYPE_MODE (sizetype
));
3109 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3111 size
, TYPE_MODE (sizetype
),
3112 GEN_INT (MEMORY_USE_RW
),
3113 TYPE_MODE (integer_type_node
));
3114 in_check_memory_usage
= 0;
3117 target
= gen_rtx_MEM (BLKmode
, temp
);
3121 set_mem_attributes (target
, type
, 1);
3122 /* Function incoming arguments may overlap with sibling call
3123 outgoing arguments and we cannot allow reordering of reads
3124 from function arguments with stores to outgoing arguments
3125 of sibling calls. */
3126 MEM_ALIAS_SET (target
) = 0;
3129 /* TEMP is the address of the block. Copy the data there. */
3130 if (GET_CODE (size
) == CONST_INT
3131 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3133 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3138 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3139 enum machine_mode mode
;
3141 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3143 mode
= GET_MODE_WIDER_MODE (mode
))
3145 enum insn_code code
= movstr_optab
[(int) mode
];
3146 insn_operand_predicate_fn pred
;
3148 if (code
!= CODE_FOR_nothing
3149 && ((GET_CODE (size
) == CONST_INT
3150 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3151 <= (GET_MODE_MASK (mode
) >> 1)))
3152 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3153 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3154 || ((*pred
) (target
, BLKmode
)))
3155 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3156 || ((*pred
) (xinner
, BLKmode
)))
3157 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3158 || ((*pred
) (opalign
, VOIDmode
))))
3160 rtx op2
= convert_to_mode (mode
, size
, 1);
3161 rtx last
= get_last_insn ();
3164 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3165 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3166 op2
= copy_to_mode_reg (mode
, op2
);
3168 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3176 delete_insns_since (last
);
3181 if (!ACCUMULATE_OUTGOING_ARGS
)
3183 /* If the source is referenced relative to the stack pointer,
3184 copy it to another register to stabilize it. We do not need
3185 to do this if we know that we won't be changing sp. */
3187 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3188 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3189 temp
= copy_to_reg (temp
);
3192 /* Make inhibit_defer_pop nonzero around the library call
3193 to force it to pop the bcopy-arguments right away. */
3195 #ifdef TARGET_MEM_FUNCTIONS
3196 emit_library_call (memcpy_libfunc
, 0,
3197 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3198 convert_to_mode (TYPE_MODE (sizetype
),
3199 size
, TREE_UNSIGNED (sizetype
)),
3200 TYPE_MODE (sizetype
));
3202 emit_library_call (bcopy_libfunc
, 0,
3203 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3204 convert_to_mode (TYPE_MODE (integer_type_node
),
3206 TREE_UNSIGNED (integer_type_node
)),
3207 TYPE_MODE (integer_type_node
));
3212 else if (partial
> 0)
3214 /* Scalar partly in registers. */
3216 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3219 /* # words of start of argument
3220 that we must make space for but need not store. */
3221 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3222 int args_offset
= INTVAL (args_so_far
);
3225 /* Push padding now if padding above and stack grows down,
3226 or if padding below and stack grows up.
3227 But if space already allocated, this has already been done. */
3228 if (extra
&& args_addr
== 0
3229 && where_pad
!= none
&& where_pad
!= stack_direction
)
3230 anti_adjust_stack (GEN_INT (extra
));
3232 /* If we make space by pushing it, we might as well push
3233 the real data. Otherwise, we can leave OFFSET nonzero
3234 and leave the space uninitialized. */
3238 /* Now NOT_STACK gets the number of words that we don't need to
3239 allocate on the stack. */
3240 not_stack
= partial
- offset
;
3242 /* If the partial register-part of the arg counts in its stack size,
3243 skip the part of stack space corresponding to the registers.
3244 Otherwise, start copying to the beginning of the stack space,
3245 by setting SKIP to 0. */
3246 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3248 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3249 x
= validize_mem (force_const_mem (mode
, x
));
3251 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3252 SUBREGs of such registers are not allowed. */
3253 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3254 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3255 x
= copy_to_reg (x
);
3257 /* Loop over all the words allocated on the stack for this arg. */
3258 /* We can do it by words, because any scalar bigger than a word
3259 has a size a multiple of a word. */
3260 #ifndef PUSH_ARGS_REVERSED
3261 for (i
= not_stack
; i
< size
; i
++)
3263 for (i
= size
- 1; i
>= not_stack
; i
--)
3265 if (i
>= not_stack
+ offset
)
3266 emit_push_insn (operand_subword_force (x
, i
, mode
),
3267 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3269 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3271 reg_parm_stack_space
, alignment_pad
);
3276 rtx target
= NULL_RTX
;
3279 /* Push padding now if padding above and stack grows down,
3280 or if padding below and stack grows up.
3281 But if space already allocated, this has already been done. */
3282 if (extra
&& args_addr
== 0
3283 && where_pad
!= none
&& where_pad
!= stack_direction
)
3284 anti_adjust_stack (GEN_INT (extra
));
3286 #ifdef PUSH_ROUNDING
3287 if (args_addr
== 0 && PUSH_ARGS
)
3289 addr
= gen_push_operand ();
3290 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3295 if (GET_CODE (args_so_far
) == CONST_INT
)
3297 = memory_address (mode
,
3298 plus_constant (args_addr
,
3299 INTVAL (args_so_far
)));
3301 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3306 dest
= gen_rtx_MEM (mode
, addr
);
3309 set_mem_attributes (dest
, type
, 1);
3310 /* Function incoming arguments may overlap with sibling call
3311 outgoing arguments and we cannot allow reordering of reads
3312 from function arguments with stores to outgoing arguments
3313 of sibling calls. */
3314 MEM_ALIAS_SET (dest
) = 0;
3317 emit_move_insn (dest
, x
);
3319 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3321 in_check_memory_usage
= 1;
3323 target
= get_push_address (GET_MODE_SIZE (mode
));
3325 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3326 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3329 GEN_INT (GET_MODE_SIZE (mode
)),
3330 TYPE_MODE (sizetype
));
3332 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3334 GEN_INT (GET_MODE_SIZE (mode
)),
3335 TYPE_MODE (sizetype
),
3336 GEN_INT (MEMORY_USE_RW
),
3337 TYPE_MODE (integer_type_node
));
3338 in_check_memory_usage
= 0;
3343 /* If part should go in registers, copy that part
3344 into the appropriate registers. Do this now, at the end,
3345 since mem-to-mem copies above may do function calls. */
3346 if (partial
> 0 && reg
!= 0)
3348 /* Handle calls that pass values in multiple non-contiguous locations.
3349 The Irix 6 ABI has examples of this. */
3350 if (GET_CODE (reg
) == PARALLEL
)
3351 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3353 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3356 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3357 anti_adjust_stack (GEN_INT (extra
));
3359 if (alignment_pad
&& args_addr
== 0)
3360 anti_adjust_stack (alignment_pad
);
3363 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3371 /* Only registers can be subtargets. */
3372 || GET_CODE (x
) != REG
3373 /* If the register is readonly, it can't be set more than once. */
3374 || RTX_UNCHANGING_P (x
)
3375 /* Don't use hard regs to avoid extending their life. */
3376 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3377 /* Avoid subtargets inside loops,
3378 since they hide some invariant expressions. */
3379 || preserve_subexpressions_p ())
3383 /* Expand an assignment that stores the value of FROM into TO.
3384 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3385 (This may contain a QUEUED rtx;
3386 if the value is constant, this rtx is a constant.)
3387 Otherwise, the returned value is NULL_RTX.
3389 SUGGEST_REG is no longer actually used.
3390 It used to mean, copy the value through a register
3391 and return that register, if that is possible.
3392 We now use WANT_VALUE to decide whether to do this. */
3395 expand_assignment (to
, from
, want_value
, suggest_reg
)
3398 int suggest_reg ATTRIBUTE_UNUSED
;
3400 register rtx to_rtx
= 0;
3403 /* Don't crash if the lhs of the assignment was erroneous. */
3405 if (TREE_CODE (to
) == ERROR_MARK
)
3407 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3408 return want_value
? result
: NULL_RTX
;
3411 /* Assignment of a structure component needs special treatment
3412 if the structure component's rtx is not simply a MEM.
3413 Assignment of an array element at a constant index, and assignment of
3414 an array element in an unaligned packed structure field, has the same
3417 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3418 || TREE_CODE (to
) == ARRAY_REF
)
3420 enum machine_mode mode1
;
3421 HOST_WIDE_INT bitsize
, bitpos
;
3426 unsigned int alignment
;
3429 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3430 &unsignedp
, &volatilep
, &alignment
);
3432 /* If we are going to use store_bit_field and extract_bit_field,
3433 make sure to_rtx will be safe for multiple use. */
3435 if (mode1
== VOIDmode
&& want_value
)
3436 tem
= stabilize_reference (tem
);
3438 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3441 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3443 if (GET_CODE (to_rtx
) != MEM
)
3446 if (GET_MODE (offset_rtx
) != ptr_mode
)
3448 #ifdef POINTERS_EXTEND_UNSIGNED
3449 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3451 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3455 /* A constant address in TO_RTX can have VOIDmode, we must not try
3456 to call force_reg for that case. Avoid that case. */
3457 if (GET_CODE (to_rtx
) == MEM
3458 && GET_MODE (to_rtx
) == BLKmode
3459 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3461 && (bitpos
% bitsize
) == 0
3462 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3463 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3465 rtx temp
= change_address (to_rtx
, mode1
,
3466 plus_constant (XEXP (to_rtx
, 0),
3469 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3472 to_rtx
= change_address (to_rtx
, mode1
,
3473 force_reg (GET_MODE (XEXP (temp
, 0)),
3478 to_rtx
= change_address (to_rtx
, VOIDmode
,
3479 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3480 force_reg (ptr_mode
,
3486 if (GET_CODE (to_rtx
) == MEM
)
3488 /* When the offset is zero, to_rtx is the address of the
3489 structure we are storing into, and hence may be shared.
3490 We must make a new MEM before setting the volatile bit. */
3492 to_rtx
= copy_rtx (to_rtx
);
3494 MEM_VOLATILE_P (to_rtx
) = 1;
3496 #if 0 /* This was turned off because, when a field is volatile
3497 in an object which is not volatile, the object may be in a register,
3498 and then we would abort over here. */
3504 if (TREE_CODE (to
) == COMPONENT_REF
3505 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3508 to_rtx
= copy_rtx (to_rtx
);
3510 RTX_UNCHANGING_P (to_rtx
) = 1;
3513 /* Check the access. */
3514 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3519 enum machine_mode best_mode
;
3521 best_mode
= get_best_mode (bitsize
, bitpos
,
3522 TYPE_ALIGN (TREE_TYPE (tem
)),
3524 if (best_mode
== VOIDmode
)
3527 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3528 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3529 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3530 size
*= GET_MODE_SIZE (best_mode
);
3532 /* Check the access right of the pointer. */
3533 in_check_memory_usage
= 1;
3535 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3537 GEN_INT (size
), TYPE_MODE (sizetype
),
3538 GEN_INT (MEMORY_USE_WO
),
3539 TYPE_MODE (integer_type_node
));
3540 in_check_memory_usage
= 0;
3543 /* If this is a varying-length object, we must get the address of
3544 the source and do an explicit block move. */
3547 unsigned int from_align
;
3548 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3550 = change_address (to_rtx
, VOIDmode
,
3551 plus_constant (XEXP (to_rtx
, 0),
3552 bitpos
/ BITS_PER_UNIT
));
3554 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
),
3555 MIN (alignment
, from_align
));
3562 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3564 /* Spurious cast for HPUX compiler. */
3565 ? ((enum machine_mode
)
3566 TYPE_MODE (TREE_TYPE (to
)))
3570 int_size_in_bytes (TREE_TYPE (tem
)),
3571 get_alias_set (to
));
3573 preserve_temp_slots (result
);
3577 /* If the value is meaningful, convert RESULT to the proper mode.
3578 Otherwise, return nothing. */
3579 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3580 TYPE_MODE (TREE_TYPE (from
)),
3582 TREE_UNSIGNED (TREE_TYPE (to
)))
3587 /* If the rhs is a function call and its value is not an aggregate,
3588 call the function before we start to compute the lhs.
3589 This is needed for correct code for cases such as
3590 val = setjmp (buf) on machines where reference to val
3591 requires loading up part of an address in a separate insn.
3593 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3594 since it might be a promoted variable where the zero- or sign- extension
3595 needs to be done. Handling this in the normal way is safe because no
3596 computation is done before the call. */
3597 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3598 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3599 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3600 && GET_CODE (DECL_RTL (to
)) == REG
))
3605 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3607 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3609 /* Handle calls that return values in multiple non-contiguous locations.
3610 The Irix 6 ABI has examples of this. */
3611 if (GET_CODE (to_rtx
) == PARALLEL
)
3612 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3613 TYPE_ALIGN (TREE_TYPE (from
)));
3614 else if (GET_MODE (to_rtx
) == BLKmode
)
3615 emit_block_move (to_rtx
, value
, expr_size (from
),
3616 TYPE_ALIGN (TREE_TYPE (from
)));
3619 #ifdef POINTERS_EXTEND_UNSIGNED
3620 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3621 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3622 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3624 emit_move_insn (to_rtx
, value
);
3626 preserve_temp_slots (to_rtx
);
3629 return want_value
? to_rtx
: NULL_RTX
;
3632 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3633 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3637 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3638 if (GET_CODE (to_rtx
) == MEM
)
3639 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3642 /* Don't move directly into a return register. */
3643 if (TREE_CODE (to
) == RESULT_DECL
3644 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3649 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3651 if (GET_CODE (to_rtx
) == PARALLEL
)
3652 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3653 TYPE_ALIGN (TREE_TYPE (from
)));
3655 emit_move_insn (to_rtx
, temp
);
3657 preserve_temp_slots (to_rtx
);
3660 return want_value
? to_rtx
: NULL_RTX
;
3663 /* In case we are returning the contents of an object which overlaps
3664 the place the value is being stored, use a safe function when copying
3665 a value through a pointer into a structure value return block. */
3666 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3667 && current_function_returns_struct
3668 && !current_function_returns_pcc_struct
)
3673 size
= expr_size (from
);
3674 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3675 EXPAND_MEMORY_USE_DONT
);
3677 /* Copy the rights of the bitmap. */
3678 if (current_function_check_memory_usage
)
3679 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3680 XEXP (to_rtx
, 0), Pmode
,
3681 XEXP (from_rtx
, 0), Pmode
,
3682 convert_to_mode (TYPE_MODE (sizetype
),
3683 size
, TREE_UNSIGNED (sizetype
)),
3684 TYPE_MODE (sizetype
));
3686 #ifdef TARGET_MEM_FUNCTIONS
3687 emit_library_call (memcpy_libfunc
, 0,
3688 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3689 XEXP (from_rtx
, 0), Pmode
,
3690 convert_to_mode (TYPE_MODE (sizetype
),
3691 size
, TREE_UNSIGNED (sizetype
)),
3692 TYPE_MODE (sizetype
));
3694 emit_library_call (bcopy_libfunc
, 0,
3695 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3696 XEXP (to_rtx
, 0), Pmode
,
3697 convert_to_mode (TYPE_MODE (integer_type_node
),
3698 size
, TREE_UNSIGNED (integer_type_node
)),
3699 TYPE_MODE (integer_type_node
));
3702 preserve_temp_slots (to_rtx
);
3705 return want_value
? to_rtx
: NULL_RTX
;
3708 /* Compute FROM and store the value in the rtx we got. */
3711 result
= store_expr (from
, to_rtx
, want_value
);
3712 preserve_temp_slots (result
);
3715 return want_value
? result
: NULL_RTX
;
3718 /* Generate code for computing expression EXP,
3719 and storing the value into TARGET.
3720 TARGET may contain a QUEUED rtx.
3722 If WANT_VALUE is nonzero, return a copy of the value
3723 not in TARGET, so that we can be sure to use the proper
3724 value in a containing expression even if TARGET has something
3725 else stored in it. If possible, we copy the value through a pseudo
3726 and return that pseudo. Or, if the value is constant, we try to
3727 return the constant. In some cases, we return a pseudo
3728 copied *from* TARGET.
3730 If the mode is BLKmode then we may return TARGET itself.
3731 It turns out that in BLKmode it doesn't cause a problem.
3732 because C has no operators that could combine two different
3733 assignments into the same BLKmode object with different values
3734 with no sequence point. Will other languages need this to
3737 If WANT_VALUE is 0, we return NULL, to make sure
3738 to catch quickly any cases where the caller uses the value
3739 and fails to set WANT_VALUE. */
3742 store_expr (exp
, target
, want_value
)
3744 register rtx target
;
3748 int dont_return_target
= 0;
3750 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3752 /* Perform first part of compound expression, then assign from second
3754 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3756 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3758 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3760 /* For conditional expression, get safe form of the target. Then
3761 test the condition, doing the appropriate assignment on either
3762 side. This avoids the creation of unnecessary temporaries.
3763 For non-BLKmode, it is more efficient not to do this. */
3765 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3768 target
= protect_from_queue (target
, 1);
3770 do_pending_stack_adjust ();
3772 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3773 start_cleanup_deferral ();
3774 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3775 end_cleanup_deferral ();
3777 emit_jump_insn (gen_jump (lab2
));
3780 start_cleanup_deferral ();
3781 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3782 end_cleanup_deferral ();
3787 return want_value
? target
: NULL_RTX
;
3789 else if (queued_subexp_p (target
))
3790 /* If target contains a postincrement, let's not risk
3791 using it as the place to generate the rhs. */
3793 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3795 /* Expand EXP into a new pseudo. */
3796 temp
= gen_reg_rtx (GET_MODE (target
));
3797 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3800 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3802 /* If target is volatile, ANSI requires accessing the value
3803 *from* the target, if it is accessed. So make that happen.
3804 In no case return the target itself. */
3805 if (! MEM_VOLATILE_P (target
) && want_value
)
3806 dont_return_target
= 1;
3808 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3809 && GET_MODE (target
) != BLKmode
)
3810 /* If target is in memory and caller wants value in a register instead,
3811 arrange that. Pass TARGET as target for expand_expr so that,
3812 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3813 We know expand_expr will not use the target in that case.
3814 Don't do this if TARGET is volatile because we are supposed
3815 to write it and then read it. */
3817 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3818 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3819 temp
= copy_to_reg (temp
);
3820 dont_return_target
= 1;
3822 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3823 /* If this is an scalar in a register that is stored in a wider mode
3824 than the declared mode, compute the result into its declared mode
3825 and then convert to the wider mode. Our value is the computed
3828 /* If we don't want a value, we can do the conversion inside EXP,
3829 which will often result in some optimizations. Do the conversion
3830 in two steps: first change the signedness, if needed, then
3831 the extend. But don't do this if the type of EXP is a subtype
3832 of something else since then the conversion might involve
3833 more than just converting modes. */
3834 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3835 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3837 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3838 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3841 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3845 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3846 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3850 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3852 /* If TEMP is a volatile MEM and we want a result value, make
3853 the access now so it gets done only once. Likewise if
3854 it contains TARGET. */
3855 if (GET_CODE (temp
) == MEM
&& want_value
3856 && (MEM_VOLATILE_P (temp
)
3857 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3858 temp
= copy_to_reg (temp
);
3860 /* If TEMP is a VOIDmode constant, use convert_modes to make
3861 sure that we properly convert it. */
3862 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3863 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3864 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3865 SUBREG_PROMOTED_UNSIGNED_P (target
));
3867 convert_move (SUBREG_REG (target
), temp
,
3868 SUBREG_PROMOTED_UNSIGNED_P (target
));
3870 /* If we promoted a constant, change the mode back down to match
3871 target. Otherwise, the caller might get confused by a result whose
3872 mode is larger than expected. */
3874 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
3875 && GET_MODE (temp
) != VOIDmode
)
3877 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
3878 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3879 SUBREG_PROMOTED_UNSIGNED_P (temp
)
3880 = SUBREG_PROMOTED_UNSIGNED_P (target
);
3883 return want_value
? temp
: NULL_RTX
;
3887 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3888 /* Return TARGET if it's a specified hardware register.
3889 If TARGET is a volatile mem ref, either return TARGET
3890 or return a reg copied *from* TARGET; ANSI requires this.
3892 Otherwise, if TEMP is not TARGET, return TEMP
3893 if it is constant (for efficiency),
3894 or if we really want the correct value. */
3895 if (!(target
&& GET_CODE (target
) == REG
3896 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3897 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3898 && ! rtx_equal_p (temp
, target
)
3899 && (CONSTANT_P (temp
) || want_value
))
3900 dont_return_target
= 1;
3903 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3904 the same as that of TARGET, adjust the constant. This is needed, for
3905 example, in case it is a CONST_DOUBLE and we want only a word-sized
3907 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3908 && TREE_CODE (exp
) != ERROR_MARK
3909 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3910 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3911 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3913 if (current_function_check_memory_usage
3914 && GET_CODE (target
) == MEM
3915 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3917 in_check_memory_usage
= 1;
3918 if (GET_CODE (temp
) == MEM
)
3919 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3920 XEXP (target
, 0), Pmode
,
3921 XEXP (temp
, 0), Pmode
,
3922 expr_size (exp
), TYPE_MODE (sizetype
));
3924 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3925 XEXP (target
, 0), Pmode
,
3926 expr_size (exp
), TYPE_MODE (sizetype
),
3927 GEN_INT (MEMORY_USE_WO
),
3928 TYPE_MODE (integer_type_node
));
3929 in_check_memory_usage
= 0;
3932 /* If value was not generated in the target, store it there.
3933 Convert the value to TARGET's type first if nec. */
3934 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3935 one or both of them are volatile memory refs, we have to distinguish
3937 - expand_expr has used TARGET. In this case, we must not generate
3938 another copy. This can be detected by TARGET being equal according
3940 - expand_expr has not used TARGET - that means that the source just
3941 happens to have the same RTX form. Since temp will have been created
3942 by expand_expr, it will compare unequal according to == .
3943 We must generate a copy in this case, to reach the correct number
3944 of volatile memory references. */
3946 if ((! rtx_equal_p (temp
, target
)
3947 || (temp
!= target
&& (side_effects_p (temp
)
3948 || side_effects_p (target
))))
3949 && TREE_CODE (exp
) != ERROR_MARK
)
3951 target
= protect_from_queue (target
, 1);
3952 if (GET_MODE (temp
) != GET_MODE (target
)
3953 && GET_MODE (temp
) != VOIDmode
)
3955 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3956 if (dont_return_target
)
3958 /* In this case, we will return TEMP,
3959 so make sure it has the proper mode.
3960 But don't forget to store the value into TARGET. */
3961 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3962 emit_move_insn (target
, temp
);
3965 convert_move (target
, temp
, unsignedp
);
3968 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3970 /* Handle copying a string constant into an array.
3971 The string constant may be shorter than the array.
3972 So copy just the string's actual length, and clear the rest. */
3976 /* Get the size of the data type of the string,
3977 which is actually the size of the target. */
3978 size
= expr_size (exp
);
3979 if (GET_CODE (size
) == CONST_INT
3980 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3981 emit_block_move (target
, temp
, size
, TYPE_ALIGN (TREE_TYPE (exp
)));
3984 /* Compute the size of the data to copy from the string. */
3986 = size_binop (MIN_EXPR
,
3987 make_tree (sizetype
, size
),
3988 size_int (TREE_STRING_LENGTH (exp
)));
3989 unsigned int align
= TYPE_ALIGN (TREE_TYPE (exp
));
3990 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3994 /* Copy that much. */
3995 emit_block_move (target
, temp
, copy_size_rtx
,
3996 TYPE_ALIGN (TREE_TYPE (exp
)));
3998 /* Figure out how much is left in TARGET that we have to clear.
3999 Do all calculations in ptr_mode. */
4001 addr
= XEXP (target
, 0);
4002 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4004 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4006 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4007 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4008 align
= MIN (align
, (BITS_PER_UNIT
4009 * (INTVAL (copy_size_rtx
)
4010 & - INTVAL (copy_size_rtx
))));
4014 addr
= force_reg (ptr_mode
, addr
);
4015 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4016 copy_size_rtx
, NULL_RTX
, 0,
4019 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4020 copy_size_rtx
, NULL_RTX
, 0,
4023 align
= BITS_PER_UNIT
;
4024 label
= gen_label_rtx ();
4025 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4026 GET_MODE (size
), 0, 0, label
);
4028 align
= MIN (align
, expr_align (copy_size
));
4030 if (size
!= const0_rtx
)
4032 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4034 MEM_COPY_ATTRIBUTES (dest
, target
);
4036 /* Be sure we can write on ADDR. */
4037 in_check_memory_usage
= 1;
4038 if (current_function_check_memory_usage
)
4039 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
4041 size
, TYPE_MODE (sizetype
),
4042 GEN_INT (MEMORY_USE_WO
),
4043 TYPE_MODE (integer_type_node
));
4044 in_check_memory_usage
= 0;
4045 clear_storage (dest
, size
, align
);
4052 /* Handle calls that return values in multiple non-contiguous locations.
4053 The Irix 6 ABI has examples of this. */
4054 else if (GET_CODE (target
) == PARALLEL
)
4055 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
4056 TYPE_ALIGN (TREE_TYPE (exp
)));
4057 else if (GET_MODE (temp
) == BLKmode
)
4058 emit_block_move (target
, temp
, expr_size (exp
),
4059 TYPE_ALIGN (TREE_TYPE (exp
)));
4061 emit_move_insn (target
, temp
);
4064 /* If we don't want a value, return NULL_RTX. */
4068 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4069 ??? The latter test doesn't seem to make sense. */
4070 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4073 /* Return TARGET itself if it is a hard register. */
4074 else if (want_value
&& GET_MODE (target
) != BLKmode
4075 && ! (GET_CODE (target
) == REG
4076 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4077 return copy_to_reg (target
);
4083 /* Return 1 if EXP just contains zeros. */
4091 switch (TREE_CODE (exp
))
4095 case NON_LVALUE_EXPR
:
4096 return is_zeros_p (TREE_OPERAND (exp
, 0));
4099 return integer_zerop (exp
);
4103 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4106 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4109 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4110 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4111 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4112 if (! is_zeros_p (TREE_VALUE (elt
)))
4122 /* Return 1 if EXP contains mostly (3/4) zeros. */
4125 mostly_zeros_p (exp
)
4128 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4130 int elts
= 0, zeros
= 0;
4131 tree elt
= CONSTRUCTOR_ELTS (exp
);
4132 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4134 /* If there are no ranges of true bits, it is all zero. */
4135 return elt
== NULL_TREE
;
4137 for (; elt
; elt
= TREE_CHAIN (elt
))
4139 /* We do not handle the case where the index is a RANGE_EXPR,
4140 so the statistic will be somewhat inaccurate.
4141 We do make a more accurate count in store_constructor itself,
4142 so since this function is only used for nested array elements,
4143 this should be close enough. */
4144 if (mostly_zeros_p (TREE_VALUE (elt
)))
4149 return 4 * zeros
>= 3 * elts
;
4152 return is_zeros_p (exp
);
4155 /* Helper function for store_constructor.
4156 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4157 TYPE is the type of the CONSTRUCTOR, not the element type.
4158 ALIGN and CLEARED are as for store_constructor.
4160 This provides a recursive shortcut back to store_constructor when it isn't
4161 necessary to go through store_field. This is so that we can pass through
4162 the cleared field to let store_constructor know that we may not have to
4163 clear a substructure if the outer structure has already been cleared. */
4166 store_constructor_field (target
, bitsize
, bitpos
,
4167 mode
, exp
, type
, align
, cleared
)
4169 unsigned HOST_WIDE_INT bitsize
;
4170 HOST_WIDE_INT bitpos
;
4171 enum machine_mode mode
;
4176 if (TREE_CODE (exp
) == CONSTRUCTOR
4177 && bitpos
% BITS_PER_UNIT
== 0
4178 /* If we have a non-zero bitpos for a register target, then we just
4179 let store_field do the bitfield handling. This is unlikely to
4180 generate unnecessary clear instructions anyways. */
4181 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4185 = change_address (target
,
4186 GET_MODE (target
) == BLKmode
4188 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4189 ? BLKmode
: VOIDmode
,
4190 plus_constant (XEXP (target
, 0),
4191 bitpos
/ BITS_PER_UNIT
));
4192 store_constructor (exp
, target
, align
, cleared
, bitsize
/ BITS_PER_UNIT
);
4195 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, align
,
4196 int_size_in_bytes (type
), 0);
4199 /* Store the value of constructor EXP into the rtx TARGET.
4200 TARGET is either a REG or a MEM.
4201 ALIGN is the maximum known alignment for TARGET.
4202 CLEARED is true if TARGET is known to have been zero'd.
4203 SIZE is the number of bytes of TARGET we are allowed to modify: this
4204 may not be the same as the size of EXP if we are assigning to a field
4205 which has been packed to exclude padding bits. */
4208 store_constructor (exp
, target
, align
, cleared
, size
)
4215 tree type
= TREE_TYPE (exp
);
4216 #ifdef WORD_REGISTER_OPERATIONS
4217 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4220 /* We know our target cannot conflict, since safe_from_p has been called. */
4222 /* Don't try copying piece by piece into a hard register
4223 since that is vulnerable to being clobbered by EXP.
4224 Instead, construct in a pseudo register and then copy it all. */
4225 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4227 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4228 store_constructor (exp
, temp
, align
, cleared
, size
);
4229 emit_move_insn (target
, temp
);
4234 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4235 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4239 /* Inform later passes that the whole union value is dead. */
4240 if ((TREE_CODE (type
) == UNION_TYPE
4241 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4244 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4246 /* If the constructor is empty, clear the union. */
4247 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4248 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4251 /* If we are building a static constructor into a register,
4252 set the initial value as zero so we can fold the value into
4253 a constant. But if more than one register is involved,
4254 this probably loses. */
4255 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4256 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4259 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4264 /* If the constructor has fewer fields than the structure
4265 or if we are initializing the structure to mostly zeros,
4266 clear the whole structure first. */
4268 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4269 != fields_length (type
))
4270 || mostly_zeros_p (exp
)))
4273 clear_storage (target
, GEN_INT (size
), align
);
4278 /* Inform later passes that the old value is dead. */
4279 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4281 /* Store each element of the constructor into
4282 the corresponding field of TARGET. */
4284 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4286 register tree field
= TREE_PURPOSE (elt
);
4287 #ifdef WORD_REGISTER_OPERATIONS
4288 tree value
= TREE_VALUE (elt
);
4290 register enum machine_mode mode
;
4291 HOST_WIDE_INT bitsize
;
4292 HOST_WIDE_INT bitpos
= 0;
4295 rtx to_rtx
= target
;
4297 /* Just ignore missing fields.
4298 We cleared the whole structure, above,
4299 if any fields are missing. */
4303 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4306 if (host_integerp (DECL_SIZE (field
), 1))
4307 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4311 unsignedp
= TREE_UNSIGNED (field
);
4312 mode
= DECL_MODE (field
);
4313 if (DECL_BIT_FIELD (field
))
4316 offset
= DECL_FIELD_OFFSET (field
);
4317 if (host_integerp (offset
, 0)
4318 && host_integerp (bit_position (field
), 0))
4320 bitpos
= int_bit_position (field
);
4324 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4330 if (contains_placeholder_p (offset
))
4331 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4332 offset
, make_tree (TREE_TYPE (exp
), target
));
4334 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4335 if (GET_CODE (to_rtx
) != MEM
)
4338 if (GET_MODE (offset_rtx
) != ptr_mode
)
4340 #ifdef POINTERS_EXTEND_UNSIGNED
4341 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4343 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4348 = change_address (to_rtx
, VOIDmode
,
4349 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4350 force_reg (ptr_mode
,
4352 align
= DECL_OFFSET_ALIGN (field
);
4355 if (TREE_READONLY (field
))
4357 if (GET_CODE (to_rtx
) == MEM
)
4358 to_rtx
= copy_rtx (to_rtx
);
4360 RTX_UNCHANGING_P (to_rtx
) = 1;
4363 #ifdef WORD_REGISTER_OPERATIONS
4364 /* If this initializes a field that is smaller than a word, at the
4365 start of a word, try to widen it to a full word.
4366 This special case allows us to output C++ member function
4367 initializations in a form that the optimizers can understand. */
4368 if (GET_CODE (target
) == REG
4369 && bitsize
< BITS_PER_WORD
4370 && bitpos
% BITS_PER_WORD
== 0
4371 && GET_MODE_CLASS (mode
) == MODE_INT
4372 && TREE_CODE (value
) == INTEGER_CST
4374 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4376 tree type
= TREE_TYPE (value
);
4377 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4379 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4380 value
= convert (type
, value
);
4382 if (BYTES_BIG_ENDIAN
)
4384 = fold (build (LSHIFT_EXPR
, type
, value
,
4385 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4386 bitsize
= BITS_PER_WORD
;
4390 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4391 TREE_VALUE (elt
), type
, align
, cleared
);
4394 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4399 tree domain
= TYPE_DOMAIN (type
);
4400 tree elttype
= TREE_TYPE (type
);
4401 int const_bounds_p
= (host_integerp (TYPE_MIN_VALUE (domain
), 0)
4402 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4403 HOST_WIDE_INT minelt
;
4404 HOST_WIDE_INT maxelt
;
4406 /* If we have constant bounds for the range of the type, get them. */
4409 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4410 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4413 /* If the constructor has fewer elements than the array,
4414 clear the whole array first. Similarly if this is
4415 static constructor of a non-BLKmode object. */
4416 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4420 HOST_WIDE_INT count
= 0, zero_count
= 0;
4421 need_to_clear
= ! const_bounds_p
;
4423 /* This loop is a more accurate version of the loop in
4424 mostly_zeros_p (it handles RANGE_EXPR in an index).
4425 It is also needed to check for missing elements. */
4426 for (elt
= CONSTRUCTOR_ELTS (exp
);
4427 elt
!= NULL_TREE
&& ! need_to_clear
;
4428 elt
= TREE_CHAIN (elt
))
4430 tree index
= TREE_PURPOSE (elt
);
4431 HOST_WIDE_INT this_node_count
;
4433 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4435 tree lo_index
= TREE_OPERAND (index
, 0);
4436 tree hi_index
= TREE_OPERAND (index
, 1);
4438 if (! host_integerp (lo_index
, 1)
4439 || ! host_integerp (hi_index
, 1))
4445 this_node_count
= (tree_low_cst (hi_index
, 1)
4446 - tree_low_cst (lo_index
, 1) + 1);
4449 this_node_count
= 1;
4451 count
+= this_node_count
;
4452 if (mostly_zeros_p (TREE_VALUE (elt
)))
4453 zero_count
+= this_node_count
;
4456 /* Clear the entire array first if there are any missing elements,
4457 or if the incidence of zero elements is >= 75%. */
4459 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4463 if (need_to_clear
&& size
> 0)
4466 clear_storage (target
, GEN_INT (size
), align
);
4470 /* Inform later passes that the old value is dead. */
4471 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4473 /* Store each element of the constructor into
4474 the corresponding element of TARGET, determined
4475 by counting the elements. */
4476 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4478 elt
= TREE_CHAIN (elt
), i
++)
4480 register enum machine_mode mode
;
4481 HOST_WIDE_INT bitsize
;
4482 HOST_WIDE_INT bitpos
;
4484 tree value
= TREE_VALUE (elt
);
4485 unsigned int align
= TYPE_ALIGN (TREE_TYPE (value
));
4486 tree index
= TREE_PURPOSE (elt
);
4487 rtx xtarget
= target
;
4489 if (cleared
&& is_zeros_p (value
))
4492 unsignedp
= TREE_UNSIGNED (elttype
);
4493 mode
= TYPE_MODE (elttype
);
4494 if (mode
== BLKmode
)
4495 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4496 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4499 bitsize
= GET_MODE_BITSIZE (mode
);
4501 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4503 tree lo_index
= TREE_OPERAND (index
, 0);
4504 tree hi_index
= TREE_OPERAND (index
, 1);
4505 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4506 struct nesting
*loop
;
4507 HOST_WIDE_INT lo
, hi
, count
;
4510 /* If the range is constant and "small", unroll the loop. */
4512 && host_integerp (lo_index
, 0)
4513 && host_integerp (hi_index
, 0)
4514 && (lo
= tree_low_cst (lo_index
, 0),
4515 hi
= tree_low_cst (hi_index
, 0),
4516 count
= hi
- lo
+ 1,
4517 (GET_CODE (target
) != MEM
4519 || (host_integerp (TYPE_SIZE (elttype
), 1)
4520 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4523 lo
-= minelt
; hi
-= minelt
;
4524 for (; lo
<= hi
; lo
++)
4526 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4527 store_constructor_field (target
, bitsize
, bitpos
, mode
,
4528 value
, type
, align
, cleared
);
4533 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4534 loop_top
= gen_label_rtx ();
4535 loop_end
= gen_label_rtx ();
4537 unsignedp
= TREE_UNSIGNED (domain
);
4539 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4541 DECL_RTL (index
) = index_r
4542 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4545 if (TREE_CODE (value
) == SAVE_EXPR
4546 && SAVE_EXPR_RTL (value
) == 0)
4548 /* Make sure value gets expanded once before the
4550 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4553 store_expr (lo_index
, index_r
, 0);
4554 loop
= expand_start_loop (0);
4556 /* Assign value to element index. */
4558 = convert (ssizetype
,
4559 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4560 index
, TYPE_MIN_VALUE (domain
))));
4561 position
= size_binop (MULT_EXPR
, position
,
4563 TYPE_SIZE_UNIT (elttype
)));
4565 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4566 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4567 xtarget
= change_address (target
, mode
, addr
);
4568 if (TREE_CODE (value
) == CONSTRUCTOR
)
4569 store_constructor (value
, xtarget
, align
, cleared
,
4570 bitsize
/ BITS_PER_UNIT
);
4572 store_expr (value
, xtarget
, 0);
4574 expand_exit_loop_if_false (loop
,
4575 build (LT_EXPR
, integer_type_node
,
4578 expand_increment (build (PREINCREMENT_EXPR
,
4580 index
, integer_one_node
), 0, 0);
4582 emit_label (loop_end
);
4585 else if ((index
!= 0 && ! host_integerp (index
, 0))
4586 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4592 index
= ssize_int (1);
4595 index
= convert (ssizetype
,
4596 fold (build (MINUS_EXPR
, index
,
4597 TYPE_MIN_VALUE (domain
))));
4599 position
= size_binop (MULT_EXPR
, index
,
4601 TYPE_SIZE_UNIT (elttype
)));
4602 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4603 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4604 xtarget
= change_address (target
, mode
, addr
);
4605 store_expr (value
, xtarget
, 0);
4610 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4611 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4613 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4615 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4616 type
, align
, cleared
);
4621 /* Set constructor assignments. */
4622 else if (TREE_CODE (type
) == SET_TYPE
)
4624 tree elt
= CONSTRUCTOR_ELTS (exp
);
4625 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4626 tree domain
= TYPE_DOMAIN (type
);
4627 tree domain_min
, domain_max
, bitlength
;
4629 /* The default implementation strategy is to extract the constant
4630 parts of the constructor, use that to initialize the target,
4631 and then "or" in whatever non-constant ranges we need in addition.
4633 If a large set is all zero or all ones, it is
4634 probably better to set it using memset (if available) or bzero.
4635 Also, if a large set has just a single range, it may also be
4636 better to first clear all the first clear the set (using
4637 bzero/memset), and set the bits we want. */
4639 /* Check for all zeros. */
4640 if (elt
== NULL_TREE
&& size
> 0)
4643 clear_storage (target
, GEN_INT (size
), TYPE_ALIGN (type
));
4647 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4648 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4649 bitlength
= size_binop (PLUS_EXPR
,
4650 size_diffop (domain_max
, domain_min
),
4653 nbits
= tree_low_cst (bitlength
, 1);
4655 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4656 are "complicated" (more than one range), initialize (the
4657 constant parts) by copying from a constant. */
4658 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4659 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4661 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4662 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4663 char *bit_buffer
= (char *) alloca (nbits
);
4664 HOST_WIDE_INT word
= 0;
4665 unsigned int bit_pos
= 0;
4666 unsigned int ibit
= 0;
4667 unsigned int offset
= 0; /* In bytes from beginning of set. */
4669 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4672 if (bit_buffer
[ibit
])
4674 if (BYTES_BIG_ENDIAN
)
4675 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4677 word
|= 1 << bit_pos
;
4681 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4683 if (word
!= 0 || ! cleared
)
4685 rtx datum
= GEN_INT (word
);
4688 /* The assumption here is that it is safe to use
4689 XEXP if the set is multi-word, but not if
4690 it's single-word. */
4691 if (GET_CODE (target
) == MEM
)
4693 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4694 to_rtx
= change_address (target
, mode
, to_rtx
);
4696 else if (offset
== 0)
4700 emit_move_insn (to_rtx
, datum
);
4707 offset
+= set_word_size
/ BITS_PER_UNIT
;
4712 /* Don't bother clearing storage if the set is all ones. */
4713 if (TREE_CHAIN (elt
) != NULL_TREE
4714 || (TREE_PURPOSE (elt
) == NULL_TREE
4716 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4717 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4718 || (tree_low_cst (TREE_VALUE (elt
), 0)
4719 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4720 != (HOST_WIDE_INT
) nbits
))))
4721 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4723 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4725 /* Start of range of element or NULL. */
4726 tree startbit
= TREE_PURPOSE (elt
);
4727 /* End of range of element, or element value. */
4728 tree endbit
= TREE_VALUE (elt
);
4729 #ifdef TARGET_MEM_FUNCTIONS
4730 HOST_WIDE_INT startb
, endb
;
4732 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4734 bitlength_rtx
= expand_expr (bitlength
,
4735 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4737 /* Handle non-range tuple element like [ expr ]. */
4738 if (startbit
== NULL_TREE
)
4740 startbit
= save_expr (endbit
);
4744 startbit
= convert (sizetype
, startbit
);
4745 endbit
= convert (sizetype
, endbit
);
4746 if (! integer_zerop (domain_min
))
4748 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4749 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4751 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4752 EXPAND_CONST_ADDRESS
);
4753 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4754 EXPAND_CONST_ADDRESS
);
4758 targetx
= assign_stack_temp (GET_MODE (target
),
4759 GET_MODE_SIZE (GET_MODE (target
)),
4761 emit_move_insn (targetx
, target
);
4764 else if (GET_CODE (target
) == MEM
)
4769 #ifdef TARGET_MEM_FUNCTIONS
4770 /* Optimization: If startbit and endbit are
4771 constants divisible by BITS_PER_UNIT,
4772 call memset instead. */
4773 if (TREE_CODE (startbit
) == INTEGER_CST
4774 && TREE_CODE (endbit
) == INTEGER_CST
4775 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4776 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4778 emit_library_call (memset_libfunc
, 0,
4780 plus_constant (XEXP (targetx
, 0),
4781 startb
/ BITS_PER_UNIT
),
4783 constm1_rtx
, TYPE_MODE (integer_type_node
),
4784 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4785 TYPE_MODE (sizetype
));
4789 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4790 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4791 bitlength_rtx
, TYPE_MODE (sizetype
),
4792 startbit_rtx
, TYPE_MODE (sizetype
),
4793 endbit_rtx
, TYPE_MODE (sizetype
));
4796 emit_move_insn (target
, targetx
);
4804 /* Store the value of EXP (an expression tree)
4805 into a subfield of TARGET which has mode MODE and occupies
4806 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4807 If MODE is VOIDmode, it means that we are storing into a bit-field.
4809 If VALUE_MODE is VOIDmode, return nothing in particular.
4810 UNSIGNEDP is not used in this case.
4812 Otherwise, return an rtx for the value stored. This rtx
4813 has mode VALUE_MODE if that is convenient to do.
4814 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4816 ALIGN is the alignment that TARGET is known to have.
4817 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4819 ALIAS_SET is the alias set for the destination. This value will
4820 (in general) be different from that for TARGET, since TARGET is a
4821 reference to the containing structure. */
4824 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4825 unsignedp
, align
, total_size
, alias_set
)
4827 HOST_WIDE_INT bitsize
;
4828 HOST_WIDE_INT bitpos
;
4829 enum machine_mode mode
;
4831 enum machine_mode value_mode
;
4834 HOST_WIDE_INT total_size
;
4837 HOST_WIDE_INT width_mask
= 0;
4839 if (TREE_CODE (exp
) == ERROR_MARK
)
4842 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4843 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4845 /* If we are storing into an unaligned field of an aligned union that is
4846 in a register, we may have the mode of TARGET being an integer mode but
4847 MODE == BLKmode. In that case, get an aligned object whose size and
4848 alignment are the same as TARGET and store TARGET into it (we can avoid
4849 the store if the field being stored is the entire width of TARGET). Then
4850 call ourselves recursively to store the field into a BLKmode version of
4851 that object. Finally, load from the object into TARGET. This is not
4852 very efficient in general, but should only be slightly more expensive
4853 than the otherwise-required unaligned accesses. Perhaps this can be
4854 cleaned up later. */
4857 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4859 rtx object
= assign_stack_temp (GET_MODE (target
),
4860 GET_MODE_SIZE (GET_MODE (target
)), 0);
4861 rtx blk_object
= copy_rtx (object
);
4863 MEM_SET_IN_STRUCT_P (object
, 1);
4864 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4865 PUT_MODE (blk_object
, BLKmode
);
4867 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4868 emit_move_insn (object
, target
);
4870 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4871 align
, total_size
, alias_set
);
4873 /* Even though we aren't returning target, we need to
4874 give it the updated value. */
4875 emit_move_insn (target
, object
);
4880 if (GET_CODE (target
) == CONCAT
)
4882 /* We're storing into a struct containing a single __complex. */
4886 return store_expr (exp
, target
, 0);
4889 /* If the structure is in a register or if the component
4890 is a bit field, we cannot use addressing to access it.
4891 Use bit-field techniques or SUBREG to store in it. */
4893 if (mode
== VOIDmode
4894 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4895 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4896 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4897 || GET_CODE (target
) == REG
4898 || GET_CODE (target
) == SUBREG
4899 /* If the field isn't aligned enough to store as an ordinary memref,
4900 store it as a bit field. */
4901 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4902 && (align
< GET_MODE_ALIGNMENT (mode
)
4903 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
4904 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
4905 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
4906 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
4907 /* If the RHS and field are a constant size and the size of the
4908 RHS isn't the same size as the bitfield, we must use bitfield
4911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
4912 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
4914 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4916 /* If BITSIZE is narrower than the size of the type of EXP
4917 we will be narrowing TEMP. Normally, what's wanted are the
4918 low-order bits. However, if EXP's type is a record and this is
4919 big-endian machine, we want the upper BITSIZE bits. */
4920 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4921 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4922 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4923 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4924 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4928 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4930 if (mode
!= VOIDmode
&& mode
!= BLKmode
4931 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4932 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4934 /* If the modes of TARGET and TEMP are both BLKmode, both
4935 must be in memory and BITPOS must be aligned on a byte
4936 boundary. If so, we simply do a block copy. */
4937 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4939 unsigned int exp_align
= expr_align (exp
);
4941 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4942 || bitpos
% BITS_PER_UNIT
!= 0)
4945 target
= change_address (target
, VOIDmode
,
4946 plus_constant (XEXP (target
, 0),
4947 bitpos
/ BITS_PER_UNIT
));
4949 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4950 align
= MIN (exp_align
, align
);
4952 /* Find an alignment that is consistent with the bit position. */
4953 while ((bitpos
% align
) != 0)
4956 emit_block_move (target
, temp
,
4957 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4961 return value_mode
== VOIDmode
? const0_rtx
: target
;
4964 /* Store the value in the bitfield. */
4965 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4966 if (value_mode
!= VOIDmode
)
4968 /* The caller wants an rtx for the value. */
4969 /* If possible, avoid refetching from the bitfield itself. */
4971 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4974 enum machine_mode tmode
;
4977 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4978 tmode
= GET_MODE (temp
);
4979 if (tmode
== VOIDmode
)
4981 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4982 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4983 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4985 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4986 NULL_RTX
, value_mode
, 0, align
,
4993 rtx addr
= XEXP (target
, 0);
4996 /* If a value is wanted, it must be the lhs;
4997 so make the address stable for multiple use. */
4999 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5000 && ! CONSTANT_ADDRESS_P (addr
)
5001 /* A frame-pointer reference is already stable. */
5002 && ! (GET_CODE (addr
) == PLUS
5003 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5004 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5005 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5006 addr
= copy_to_reg (addr
);
5008 /* Now build a reference to just the desired component. */
5010 to_rtx
= copy_rtx (change_address (target
, mode
,
5011 plus_constant (addr
,
5013 / BITS_PER_UNIT
))));
5014 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5015 MEM_ALIAS_SET (to_rtx
) = alias_set
;
5017 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5021 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5022 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5023 ARRAY_REFs and find the ultimate containing object, which we return.
5025 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5026 bit position, and *PUNSIGNEDP to the signedness of the field.
5027 If the position of the field is variable, we store a tree
5028 giving the variable offset (in units) in *POFFSET.
5029 This offset is in addition to the bit position.
5030 If the position is not variable, we store 0 in *POFFSET.
5031 We set *PALIGNMENT to the alignment of the address that will be
5032 computed. This is the alignment of the thing we return if *POFFSET
5033 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5035 If any of the extraction expressions is volatile,
5036 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5038 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5039 is a mode that can be used to access the field. In that case, *PBITSIZE
5042 If the field describes a variable-sized object, *PMODE is set to
5043 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5044 this case, but the address of the object can be found. */
5047 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5048 punsignedp
, pvolatilep
, palignment
)
5050 HOST_WIDE_INT
*pbitsize
;
5051 HOST_WIDE_INT
*pbitpos
;
5053 enum machine_mode
*pmode
;
5056 unsigned int *palignment
;
5059 enum machine_mode mode
= VOIDmode
;
5060 tree offset
= size_zero_node
;
5061 tree bit_offset
= bitsize_zero_node
;
5062 unsigned int alignment
= BIGGEST_ALIGNMENT
;
5065 /* First get the mode, signedness, and size. We do this from just the
5066 outermost expression. */
5067 if (TREE_CODE (exp
) == COMPONENT_REF
)
5069 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5070 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5071 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5073 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5075 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5077 size_tree
= TREE_OPERAND (exp
, 1);
5078 *punsignedp
= TREE_UNSIGNED (exp
);
5082 mode
= TYPE_MODE (TREE_TYPE (exp
));
5083 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5085 if (mode
== BLKmode
)
5086 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5088 *pbitsize
= GET_MODE_BITSIZE (mode
);
5093 if (! host_integerp (size_tree
, 1))
5094 mode
= BLKmode
, *pbitsize
= -1;
5096 *pbitsize
= tree_low_cst (size_tree
, 1);
5099 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5100 and find the ultimate containing object. */
5103 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5104 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5105 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5107 tree field
= TREE_OPERAND (exp
, 1);
5108 tree this_offset
= DECL_FIELD_OFFSET (field
);
5110 /* If this field hasn't been filled in yet, don't go
5111 past it. This should only happen when folding expressions
5112 made during type construction. */
5113 if (this_offset
== 0)
5115 else if (! TREE_CONSTANT (this_offset
)
5116 && contains_placeholder_p (this_offset
))
5117 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5119 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5120 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5121 DECL_FIELD_BIT_OFFSET (field
));
5123 if (! host_integerp (offset
, 0))
5124 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5127 else if (TREE_CODE (exp
) == ARRAY_REF
)
5129 tree index
= TREE_OPERAND (exp
, 1);
5130 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5131 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5132 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (exp
));
5134 /* We assume all arrays have sizes that are a multiple of a byte.
5135 First subtract the lower bound, if any, in the type of the
5136 index, then convert to sizetype and multiply by the size of the
5138 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5139 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5142 /* If the index has a self-referential type, pass it to a
5143 WITH_RECORD_EXPR; if the component size is, pass our
5144 component to one. */
5145 if (! TREE_CONSTANT (index
)
5146 && contains_placeholder_p (index
))
5147 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5148 if (! TREE_CONSTANT (unit_size
)
5149 && contains_placeholder_p (unit_size
))
5150 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
,
5151 TREE_OPERAND (exp
, 0));
5153 offset
= size_binop (PLUS_EXPR
, offset
,
5154 size_binop (MULT_EXPR
,
5155 convert (sizetype
, index
),
5159 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5160 && ! ((TREE_CODE (exp
) == NOP_EXPR
5161 || TREE_CODE (exp
) == CONVERT_EXPR
)
5162 && (TYPE_MODE (TREE_TYPE (exp
))
5163 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5166 /* If any reference in the chain is volatile, the effect is volatile. */
5167 if (TREE_THIS_VOLATILE (exp
))
5170 /* If the offset is non-constant already, then we can't assume any
5171 alignment more than the alignment here. */
5172 if (! TREE_CONSTANT (offset
))
5173 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5175 exp
= TREE_OPERAND (exp
, 0);
5179 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5180 else if (TREE_TYPE (exp
) != 0)
5181 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5183 /* If OFFSET is constant, see if we can return the whole thing as a
5184 constant bit position. Otherwise, split it up. */
5185 if (host_integerp (offset
, 0)
5186 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5188 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5189 && host_integerp (tem
, 0))
5190 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5192 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5195 *palignment
= alignment
;
5199 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5201 static enum memory_use_mode
5202 get_memory_usage_from_modifier (modifier
)
5203 enum expand_modifier modifier
;
5209 return MEMORY_USE_RO
;
5211 case EXPAND_MEMORY_USE_WO
:
5212 return MEMORY_USE_WO
;
5214 case EXPAND_MEMORY_USE_RW
:
5215 return MEMORY_USE_RW
;
5217 case EXPAND_MEMORY_USE_DONT
:
5218 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5219 MEMORY_USE_DONT, because they are modifiers to a call of
5220 expand_expr in the ADDR_EXPR case of expand_expr. */
5221 case EXPAND_CONST_ADDRESS
:
5222 case EXPAND_INITIALIZER
:
5223 return MEMORY_USE_DONT
;
5224 case EXPAND_MEMORY_USE_BAD
:
5230 /* Given an rtx VALUE that may contain additions and multiplications,
5231 return an equivalent value that just refers to a register or memory.
5232 This is done by generating instructions to perform the arithmetic
5233 and returning a pseudo-register containing the value.
5235 The returned value may be a REG, SUBREG, MEM or constant. */
5238 force_operand (value
, target
)
5241 register optab binoptab
= 0;
5242 /* Use a temporary to force order of execution of calls to
5246 /* Use subtarget as the target for operand 0 of a binary operation. */
5247 register rtx subtarget
= get_subtarget (target
);
5249 /* Check for a PIC address load. */
5251 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5252 && XEXP (value
, 0) == pic_offset_table_rtx
5253 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5254 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5255 || GET_CODE (XEXP (value
, 1)) == CONST
))
5258 subtarget
= gen_reg_rtx (GET_MODE (value
));
5259 emit_move_insn (subtarget
, value
);
5263 if (GET_CODE (value
) == PLUS
)
5264 binoptab
= add_optab
;
5265 else if (GET_CODE (value
) == MINUS
)
5266 binoptab
= sub_optab
;
5267 else if (GET_CODE (value
) == MULT
)
5269 op2
= XEXP (value
, 1);
5270 if (!CONSTANT_P (op2
)
5271 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5273 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5274 return expand_mult (GET_MODE (value
), tmp
,
5275 force_operand (op2
, NULL_RTX
),
5281 op2
= XEXP (value
, 1);
5282 if (!CONSTANT_P (op2
)
5283 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5285 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5287 binoptab
= add_optab
;
5288 op2
= negate_rtx (GET_MODE (value
), op2
);
5291 /* Check for an addition with OP2 a constant integer and our first
5292 operand a PLUS of a virtual register and something else. In that
5293 case, we want to emit the sum of the virtual register and the
5294 constant first and then add the other value. This allows virtual
5295 register instantiation to simply modify the constant rather than
5296 creating another one around this addition. */
5297 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5298 && GET_CODE (XEXP (value
, 0)) == PLUS
5299 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5300 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5301 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5303 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5304 XEXP (XEXP (value
, 0), 0), op2
,
5305 subtarget
, 0, OPTAB_LIB_WIDEN
);
5306 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5307 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5308 target
, 0, OPTAB_LIB_WIDEN
);
5311 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5312 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5313 force_operand (op2
, NULL_RTX
),
5314 target
, 0, OPTAB_LIB_WIDEN
);
5315 /* We give UNSIGNEDP = 0 to expand_binop
5316 because the only operations we are expanding here are signed ones. */
5321 /* Subroutine of expand_expr:
5322 save the non-copied parts (LIST) of an expr (LHS), and return a list
5323 which can restore these values to their previous values,
5324 should something modify their storage. */
5327 save_noncopied_parts (lhs
, list
)
5334 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5335 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5336 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5339 tree part
= TREE_VALUE (tail
);
5340 tree part_type
= TREE_TYPE (part
);
5341 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5342 rtx target
= assign_temp (part_type
, 0, 1, 1);
5343 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5344 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5345 parts
= tree_cons (to_be_saved
,
5346 build (RTL_EXPR
, part_type
, NULL_TREE
,
5349 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5354 /* Subroutine of expand_expr:
5355 record the non-copied parts (LIST) of an expr (LHS), and return a list
5356 which specifies the initial values of these parts. */
5359 init_noncopied_parts (lhs
, list
)
5366 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5367 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5368 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5369 else if (TREE_PURPOSE (tail
))
5371 tree part
= TREE_VALUE (tail
);
5372 tree part_type
= TREE_TYPE (part
);
5373 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5374 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5379 /* Subroutine of expand_expr: return nonzero iff there is no way that
5380 EXP can reference X, which is being modified. TOP_P is nonzero if this
5381 call is going to be used to determine whether we need a temporary
5382 for EXP, as opposed to a recursive call to this function.
5384 It is always safe for this routine to return zero since it merely
5385 searches for optimization opportunities. */
5388 safe_from_p (x
, exp
, top_p
)
5395 static int save_expr_count
;
5396 static int save_expr_size
= 0;
5397 static tree
*save_expr_rewritten
;
5398 static tree save_expr_trees
[256];
5401 /* If EXP has varying size, we MUST use a target since we currently
5402 have no way of allocating temporaries of variable size
5403 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5404 So we assume here that something at a higher level has prevented a
5405 clash. This is somewhat bogus, but the best we can do. Only
5406 do this when X is BLKmode and when we are at the top level. */
5407 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5408 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5409 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5410 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5411 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5413 && GET_MODE (x
) == BLKmode
))
5416 if (top_p
&& save_expr_size
== 0)
5420 save_expr_count
= 0;
5421 save_expr_size
= ARRAY_SIZE (save_expr_trees
);
5422 save_expr_rewritten
= &save_expr_trees
[0];
5424 rtn
= safe_from_p (x
, exp
, 1);
5426 for (i
= 0; i
< save_expr_count
; ++i
)
5428 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5430 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5438 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5439 find the underlying pseudo. */
5440 if (GET_CODE (x
) == SUBREG
)
5443 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5447 /* If X is a location in the outgoing argument area, it is always safe. */
5448 if (GET_CODE (x
) == MEM
5449 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5450 || (GET_CODE (XEXP (x
, 0)) == PLUS
5451 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5454 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5457 exp_rtl
= DECL_RTL (exp
);
5464 if (TREE_CODE (exp
) == TREE_LIST
)
5465 return ((TREE_VALUE (exp
) == 0
5466 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5467 && (TREE_CHAIN (exp
) == 0
5468 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5469 else if (TREE_CODE (exp
) == ERROR_MARK
)
5470 return 1; /* An already-visited SAVE_EXPR? */
5475 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5479 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5480 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5484 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5485 the expression. If it is set, we conflict iff we are that rtx or
5486 both are in memory. Otherwise, we check all operands of the
5487 expression recursively. */
5489 switch (TREE_CODE (exp
))
5492 return (staticp (TREE_OPERAND (exp
, 0))
5493 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5494 || TREE_STATIC (exp
));
5497 if (GET_CODE (x
) == MEM
)
5502 exp_rtl
= CALL_EXPR_RTL (exp
);
5505 /* Assume that the call will clobber all hard registers and
5507 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5508 || GET_CODE (x
) == MEM
)
5515 /* If a sequence exists, we would have to scan every instruction
5516 in the sequence to see if it was safe. This is probably not
5518 if (RTL_EXPR_SEQUENCE (exp
))
5521 exp_rtl
= RTL_EXPR_RTL (exp
);
5524 case WITH_CLEANUP_EXPR
:
5525 exp_rtl
= RTL_EXPR_RTL (exp
);
5528 case CLEANUP_POINT_EXPR
:
5529 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5532 exp_rtl
= SAVE_EXPR_RTL (exp
);
5536 /* This SAVE_EXPR might appear many times in the top-level
5537 safe_from_p() expression, and if it has a complex
5538 subexpression, examining it multiple times could result
5539 in a combinatorial explosion. E.g. on an Alpha
5540 running at least 200MHz, a Fortran test case compiled with
5541 optimization took about 28 minutes to compile -- even though
5542 it was only a few lines long, and the complicated line causing
5543 so much time to be spent in the earlier version of safe_from_p()
5544 had only 293 or so unique nodes.
5546 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5547 where it is so we can turn it back in the top-level safe_from_p()
5550 /* For now, don't bother re-sizing the array. */
5551 if (save_expr_count
>= save_expr_size
)
5553 save_expr_rewritten
[save_expr_count
++] = exp
;
5555 nops
= TREE_CODE_LENGTH (SAVE_EXPR
);
5556 for (i
= 0; i
< nops
; i
++)
5558 tree operand
= TREE_OPERAND (exp
, i
);
5559 if (operand
== NULL_TREE
)
5561 TREE_SET_CODE (exp
, ERROR_MARK
);
5562 if (!safe_from_p (x
, operand
, 0))
5564 TREE_SET_CODE (exp
, SAVE_EXPR
);
5566 TREE_SET_CODE (exp
, ERROR_MARK
);
5570 /* The only operand we look at is operand 1. The rest aren't
5571 part of the expression. */
5572 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5574 case METHOD_CALL_EXPR
:
5575 /* This takes a rtx argument, but shouldn't appear here. */
5582 /* If we have an rtx, we do not need to scan our operands. */
5586 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
5587 for (i
= 0; i
< nops
; i
++)
5588 if (TREE_OPERAND (exp
, i
) != 0
5589 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5593 /* If we have an rtl, find any enclosed object. Then see if we conflict
5597 if (GET_CODE (exp_rtl
) == SUBREG
)
5599 exp_rtl
= SUBREG_REG (exp_rtl
);
5600 if (GET_CODE (exp_rtl
) == REG
5601 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5605 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5606 are memory and EXP is not readonly. */
5607 return ! (rtx_equal_p (x
, exp_rtl
)
5608 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5609 && ! TREE_READONLY (exp
)));
5612 /* If we reach here, it is safe. */
5616 /* Subroutine of expand_expr: return nonzero iff EXP is an
5617 expression whose type is statically determinable. */
5623 if (TREE_CODE (exp
) == PARM_DECL
5624 || TREE_CODE (exp
) == VAR_DECL
5625 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5626 || TREE_CODE (exp
) == COMPONENT_REF
5627 || TREE_CODE (exp
) == ARRAY_REF
)
5632 /* Subroutine of expand_expr: return rtx if EXP is a
5633 variable or parameter; else return 0. */
5640 switch (TREE_CODE (exp
))
5644 return DECL_RTL (exp
);
5650 #ifdef MAX_INTEGER_COMPUTATION_MODE
5652 check_max_integer_computation_mode (exp
)
5655 enum tree_code code
;
5656 enum machine_mode mode
;
5658 /* Strip any NOPs that don't change the mode. */
5660 code
= TREE_CODE (exp
);
5662 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5663 if (code
== NOP_EXPR
5664 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5667 /* First check the type of the overall operation. We need only look at
5668 unary, binary and relational operations. */
5669 if (TREE_CODE_CLASS (code
) == '1'
5670 || TREE_CODE_CLASS (code
) == '2'
5671 || TREE_CODE_CLASS (code
) == '<')
5673 mode
= TYPE_MODE (TREE_TYPE (exp
));
5674 if (GET_MODE_CLASS (mode
) == MODE_INT
5675 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5676 fatal ("unsupported wide integer operation");
5679 /* Check operand of a unary op. */
5680 if (TREE_CODE_CLASS (code
) == '1')
5682 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5683 if (GET_MODE_CLASS (mode
) == MODE_INT
5684 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5685 fatal ("unsupported wide integer operation");
5688 /* Check operands of a binary/comparison op. */
5689 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5691 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5692 if (GET_MODE_CLASS (mode
) == MODE_INT
5693 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5694 fatal ("unsupported wide integer operation");
5696 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5697 if (GET_MODE_CLASS (mode
) == MODE_INT
5698 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5699 fatal ("unsupported wide integer operation");
5704 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5705 has any readonly fields. If any of the fields have types that
5706 contain readonly fields, return true as well. */
5709 readonly_fields_p (type
)
5714 for (field
= TYPE_FIELDS (type
); field
!= 0; field
= TREE_CHAIN (field
))
5715 if (TREE_CODE (field
) == FIELD_DECL
5716 && (TREE_READONLY (field
)
5717 || (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
5718 && readonly_fields_p (TREE_TYPE (field
)))))
5724 /* expand_expr: generate code for computing expression EXP.
5725 An rtx for the computed value is returned. The value is never null.
5726 In the case of a void EXP, const0_rtx is returned.
5728 The value may be stored in TARGET if TARGET is nonzero.
5729 TARGET is just a suggestion; callers must assume that
5730 the rtx returned may not be the same as TARGET.
5732 If TARGET is CONST0_RTX, it means that the value will be ignored.
5734 If TMODE is not VOIDmode, it suggests generating the
5735 result in mode TMODE. But this is done only when convenient.
5736 Otherwise, TMODE is ignored and the value generated in its natural mode.
5737 TMODE is just a suggestion; callers must assume that
5738 the rtx returned may not have mode TMODE.
5740 Note that TARGET may have neither TMODE nor MODE. In that case, it
5741 probably will not be used.
5743 If MODIFIER is EXPAND_SUM then when EXP is an addition
5744 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5745 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5746 products as above, or REG or MEM, or constant.
5747 Ordinarily in such cases we would output mul or add instructions
5748 and then return a pseudo reg containing the sum.
5750 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5751 it also marks a label as absolutely required (it can't be dead).
5752 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5753 This is used for outputting expressions used in initializers.
5755 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5756 with a constant address even if that address is not normally legitimate.
5757 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5760 expand_expr (exp
, target
, tmode
, modifier
)
5763 enum machine_mode tmode
;
5764 enum expand_modifier modifier
;
5766 register rtx op0
, op1
, temp
;
5767 tree type
= TREE_TYPE (exp
);
5768 int unsignedp
= TREE_UNSIGNED (type
);
5769 register enum machine_mode mode
;
5770 register enum tree_code code
= TREE_CODE (exp
);
5772 rtx subtarget
, original_target
;
5775 /* Used by check-memory-usage to make modifier read only. */
5776 enum expand_modifier ro_modifier
;
5778 /* Handle ERROR_MARK before anybody tries to access its type. */
5779 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
5781 op0
= CONST0_RTX (tmode
);
5787 mode
= TYPE_MODE (type
);
5788 /* Use subtarget as the target for operand 0 of a binary operation. */
5789 subtarget
= get_subtarget (target
);
5790 original_target
= target
;
5791 ignore
= (target
== const0_rtx
5792 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5793 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5794 || code
== COND_EXPR
)
5795 && TREE_CODE (type
) == VOID_TYPE
));
5797 /* Make a read-only version of the modifier. */
5798 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5799 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5800 ro_modifier
= modifier
;
5802 ro_modifier
= EXPAND_NORMAL
;
5804 /* If we are going to ignore this result, we need only do something
5805 if there is a side-effect somewhere in the expression. If there
5806 is, short-circuit the most common cases here. Note that we must
5807 not call expand_expr with anything but const0_rtx in case this
5808 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5812 if (! TREE_SIDE_EFFECTS (exp
))
5815 /* Ensure we reference a volatile object even if value is ignored, but
5816 don't do this if all we are doing is taking its address. */
5817 if (TREE_THIS_VOLATILE (exp
)
5818 && TREE_CODE (exp
) != FUNCTION_DECL
5819 && mode
!= VOIDmode
&& mode
!= BLKmode
5820 && modifier
!= EXPAND_CONST_ADDRESS
)
5822 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5823 if (GET_CODE (temp
) == MEM
)
5824 temp
= copy_to_reg (temp
);
5828 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
5829 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
5830 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5831 VOIDmode
, ro_modifier
);
5832 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
5833 || code
== ARRAY_REF
)
5835 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5836 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5839 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5840 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5841 /* If the second operand has no side effects, just evaluate
5843 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5844 VOIDmode
, ro_modifier
);
5845 else if (code
== BIT_FIELD_REF
)
5847 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5848 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5849 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
5856 #ifdef MAX_INTEGER_COMPUTATION_MODE
5857 /* Only check stuff here if the mode we want is different from the mode
5858 of the expression; if it's the same, check_max_integer_computiation_mode
5859 will handle it. Do we really need to check this stuff at all? */
5862 && GET_MODE (target
) != mode
5863 && TREE_CODE (exp
) != INTEGER_CST
5864 && TREE_CODE (exp
) != PARM_DECL
5865 && TREE_CODE (exp
) != ARRAY_REF
5866 && TREE_CODE (exp
) != COMPONENT_REF
5867 && TREE_CODE (exp
) != BIT_FIELD_REF
5868 && TREE_CODE (exp
) != INDIRECT_REF
5869 && TREE_CODE (exp
) != CALL_EXPR
5870 && TREE_CODE (exp
) != VAR_DECL
5871 && TREE_CODE (exp
) != RTL_EXPR
)
5873 enum machine_mode mode
= GET_MODE (target
);
5875 if (GET_MODE_CLASS (mode
) == MODE_INT
5876 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5877 fatal ("unsupported wide integer operation");
5881 && TREE_CODE (exp
) != INTEGER_CST
5882 && TREE_CODE (exp
) != PARM_DECL
5883 && TREE_CODE (exp
) != ARRAY_REF
5884 && TREE_CODE (exp
) != COMPONENT_REF
5885 && TREE_CODE (exp
) != BIT_FIELD_REF
5886 && TREE_CODE (exp
) != INDIRECT_REF
5887 && TREE_CODE (exp
) != VAR_DECL
5888 && TREE_CODE (exp
) != CALL_EXPR
5889 && TREE_CODE (exp
) != RTL_EXPR
5890 && GET_MODE_CLASS (tmode
) == MODE_INT
5891 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5892 fatal ("unsupported wide integer operation");
5894 check_max_integer_computation_mode (exp
);
5897 /* If will do cse, generate all results into pseudo registers
5898 since 1) that allows cse to find more things
5899 and 2) otherwise cse could produce an insn the machine
5902 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5903 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5910 tree function
= decl_function_context (exp
);
5911 /* Handle using a label in a containing function. */
5912 if (function
!= current_function_decl
5913 && function
!= inline_function_decl
&& function
!= 0)
5915 struct function
*p
= find_function_data (function
);
5916 /* Allocate in the memory associated with the function
5917 that the label is in. */
5918 push_obstacks (p
->function_obstack
,
5919 p
->function_maybepermanent_obstack
);
5921 p
->expr
->x_forced_labels
5922 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
5923 p
->expr
->x_forced_labels
);
5928 if (modifier
== EXPAND_INITIALIZER
)
5929 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5934 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5935 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5936 if (function
!= current_function_decl
5937 && function
!= inline_function_decl
&& function
!= 0)
5938 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5943 if (DECL_RTL (exp
) == 0)
5945 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5946 return CONST0_RTX (mode
);
5949 /* ... fall through ... */
5952 /* If a static var's type was incomplete when the decl was written,
5953 but the type is complete now, lay out the decl now. */
5954 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5955 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5957 push_obstacks_nochange ();
5958 end_temporary_allocation ();
5959 layout_decl (exp
, 0);
5960 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5964 /* Although static-storage variables start off initialized, according to
5965 ANSI C, a memcpy could overwrite them with uninitialized values. So
5966 we check them too. This also lets us check for read-only variables
5967 accessed via a non-const declaration, in case it won't be detected
5968 any other way (e.g., in an embedded system or OS kernel without
5971 Aggregates are not checked here; they're handled elsewhere. */
5972 if (cfun
&& current_function_check_memory_usage
5974 && GET_CODE (DECL_RTL (exp
)) == MEM
5975 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5977 enum memory_use_mode memory_usage
;
5978 memory_usage
= get_memory_usage_from_modifier (modifier
);
5980 in_check_memory_usage
= 1;
5981 if (memory_usage
!= MEMORY_USE_DONT
)
5982 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5983 XEXP (DECL_RTL (exp
), 0), Pmode
,
5984 GEN_INT (int_size_in_bytes (type
)),
5985 TYPE_MODE (sizetype
),
5986 GEN_INT (memory_usage
),
5987 TYPE_MODE (integer_type_node
));
5988 in_check_memory_usage
= 0;
5991 /* ... fall through ... */
5995 if (DECL_RTL (exp
) == 0)
5998 /* Ensure variable marked as used even if it doesn't go through
5999 a parser. If it hasn't be used yet, write out an external
6001 if (! TREE_USED (exp
))
6003 assemble_external (exp
);
6004 TREE_USED (exp
) = 1;
6007 /* Show we haven't gotten RTL for this yet. */
6010 /* Handle variables inherited from containing functions. */
6011 context
= decl_function_context (exp
);
6013 /* We treat inline_function_decl as an alias for the current function
6014 because that is the inline function whose vars, types, etc.
6015 are being merged into the current function.
6016 See expand_inline_function. */
6018 if (context
!= 0 && context
!= current_function_decl
6019 && context
!= inline_function_decl
6020 /* If var is static, we don't need a static chain to access it. */
6021 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6022 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6026 /* Mark as non-local and addressable. */
6027 DECL_NONLOCAL (exp
) = 1;
6028 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6030 mark_addressable (exp
);
6031 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6033 addr
= XEXP (DECL_RTL (exp
), 0);
6034 if (GET_CODE (addr
) == MEM
)
6035 addr
= change_address (addr
, Pmode
,
6036 fix_lexical_addr (XEXP (addr
, 0), exp
));
6038 addr
= fix_lexical_addr (addr
, exp
);
6040 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
6043 /* This is the case of an array whose size is to be determined
6044 from its initializer, while the initializer is still being parsed.
6047 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6048 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6049 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
6050 XEXP (DECL_RTL (exp
), 0));
6052 /* If DECL_RTL is memory, we are in the normal case and either
6053 the address is not valid or it is not a register and -fforce-addr
6054 is specified, get the address into a register. */
6056 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6057 && modifier
!= EXPAND_CONST_ADDRESS
6058 && modifier
!= EXPAND_SUM
6059 && modifier
!= EXPAND_INITIALIZER
6060 && (! memory_address_p (DECL_MODE (exp
),
6061 XEXP (DECL_RTL (exp
), 0))
6063 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6064 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
6065 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6067 /* If we got something, return it. But first, set the alignment
6068 the address is a register. */
6071 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6072 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6077 /* If the mode of DECL_RTL does not match that of the decl, it
6078 must be a promoted value. We return a SUBREG of the wanted mode,
6079 but mark it so that we know that it was already extended. */
6081 if (GET_CODE (DECL_RTL (exp
)) == REG
6082 && GET_MODE (DECL_RTL (exp
)) != mode
)
6084 /* Get the signedness used for this variable. Ensure we get the
6085 same mode we got when the variable was declared. */
6086 if (GET_MODE (DECL_RTL (exp
))
6087 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6090 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
6091 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6092 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6096 return DECL_RTL (exp
);
6099 return immed_double_const (TREE_INT_CST_LOW (exp
),
6100 TREE_INT_CST_HIGH (exp
), mode
);
6103 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6104 EXPAND_MEMORY_USE_BAD
);
6107 /* If optimized, generate immediate CONST_DOUBLE
6108 which will be turned into memory by reload if necessary.
6110 We used to force a register so that loop.c could see it. But
6111 this does not allow gen_* patterns to perform optimizations with
6112 the constants. It also produces two insns in cases like "x = 1.0;".
6113 On most machines, floating-point constants are not permitted in
6114 many insns, so we'd end up copying it to a register in any case.
6116 Now, we do the copying in expand_binop, if appropriate. */
6117 return immed_real_const (exp
);
6121 if (! TREE_CST_RTL (exp
))
6122 output_constant_def (exp
);
6124 /* TREE_CST_RTL probably contains a constant address.
6125 On RISC machines where a constant address isn't valid,
6126 make some insns to get that address into a register. */
6127 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6128 && modifier
!= EXPAND_CONST_ADDRESS
6129 && modifier
!= EXPAND_INITIALIZER
6130 && modifier
!= EXPAND_SUM
6131 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6133 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6134 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6135 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6136 return TREE_CST_RTL (exp
);
6138 case EXPR_WITH_FILE_LOCATION
:
6141 const char *saved_input_filename
= input_filename
;
6142 int saved_lineno
= lineno
;
6143 input_filename
= EXPR_WFL_FILENAME (exp
);
6144 lineno
= EXPR_WFL_LINENO (exp
);
6145 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6146 emit_line_note (input_filename
, lineno
);
6147 /* Possibly avoid switching back and force here. */
6148 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6149 input_filename
= saved_input_filename
;
6150 lineno
= saved_lineno
;
6155 context
= decl_function_context (exp
);
6157 /* If this SAVE_EXPR was at global context, assume we are an
6158 initialization function and move it into our context. */
6160 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6162 /* We treat inline_function_decl as an alias for the current function
6163 because that is the inline function whose vars, types, etc.
6164 are being merged into the current function.
6165 See expand_inline_function. */
6166 if (context
== current_function_decl
|| context
== inline_function_decl
)
6169 /* If this is non-local, handle it. */
6172 /* The following call just exists to abort if the context is
6173 not of a containing function. */
6174 find_function_data (context
);
6176 temp
= SAVE_EXPR_RTL (exp
);
6177 if (temp
&& GET_CODE (temp
) == REG
)
6179 put_var_into_stack (exp
);
6180 temp
= SAVE_EXPR_RTL (exp
);
6182 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6184 return change_address (temp
, mode
,
6185 fix_lexical_addr (XEXP (temp
, 0), exp
));
6187 if (SAVE_EXPR_RTL (exp
) == 0)
6189 if (mode
== VOIDmode
)
6192 temp
= assign_temp (type
, 3, 0, 0);
6194 SAVE_EXPR_RTL (exp
) = temp
;
6195 if (!optimize
&& GET_CODE (temp
) == REG
)
6196 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6199 /* If the mode of TEMP does not match that of the expression, it
6200 must be a promoted value. We pass store_expr a SUBREG of the
6201 wanted mode but mark it so that we know that it was already
6202 extended. Note that `unsignedp' was modified above in
6205 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6207 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6208 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6209 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6212 if (temp
== const0_rtx
)
6213 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6214 EXPAND_MEMORY_USE_BAD
);
6216 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6218 TREE_USED (exp
) = 1;
6221 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6222 must be a promoted value. We return a SUBREG of the wanted mode,
6223 but mark it so that we know that it was already extended. */
6225 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6226 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6228 /* Compute the signedness and make the proper SUBREG. */
6229 promote_mode (type
, mode
, &unsignedp
, 0);
6230 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6231 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6232 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6236 return SAVE_EXPR_RTL (exp
);
6241 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6242 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6246 case PLACEHOLDER_EXPR
:
6248 tree placeholder_expr
;
6250 /* If there is an object on the head of the placeholder list,
6251 see if some object in it of type TYPE or a pointer to it. For
6252 further information, see tree.def. */
6253 for (placeholder_expr
= placeholder_list
;
6254 placeholder_expr
!= 0;
6255 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6257 tree need_type
= TYPE_MAIN_VARIANT (type
);
6259 tree old_list
= placeholder_list
;
6262 /* Find the outermost reference that is of the type we want.
6263 If none, see if any object has a type that is a pointer to
6264 the type we want. */
6265 for (elt
= TREE_PURPOSE (placeholder_expr
);
6266 elt
!= 0 && object
== 0;
6268 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6269 || TREE_CODE (elt
) == COND_EXPR
)
6270 ? TREE_OPERAND (elt
, 1)
6271 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6272 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6273 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6274 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6275 ? TREE_OPERAND (elt
, 0) : 0))
6276 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6279 for (elt
= TREE_PURPOSE (placeholder_expr
);
6280 elt
!= 0 && object
== 0;
6282 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6283 || TREE_CODE (elt
) == COND_EXPR
)
6284 ? TREE_OPERAND (elt
, 1)
6285 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6286 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6287 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6288 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6289 ? TREE_OPERAND (elt
, 0) : 0))
6290 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6291 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6293 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6297 /* Expand this object skipping the list entries before
6298 it was found in case it is also a PLACEHOLDER_EXPR.
6299 In that case, we want to translate it using subsequent
6301 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6302 temp
= expand_expr (object
, original_target
, tmode
,
6304 placeholder_list
= old_list
;
6310 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6313 case WITH_RECORD_EXPR
:
6314 /* Put the object on the placeholder list, expand our first operand,
6315 and pop the list. */
6316 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6318 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6319 tmode
, ro_modifier
);
6320 placeholder_list
= TREE_CHAIN (placeholder_list
);
6324 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6325 expand_goto (TREE_OPERAND (exp
, 0));
6327 expand_computed_goto (TREE_OPERAND (exp
, 0));
6331 expand_exit_loop_if_false (NULL_PTR
,
6332 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6335 case LABELED_BLOCK_EXPR
:
6336 if (LABELED_BLOCK_BODY (exp
))
6337 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6338 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6341 case EXIT_BLOCK_EXPR
:
6342 if (EXIT_BLOCK_RETURN (exp
))
6343 sorry ("returned value in block_exit_expr");
6344 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6349 expand_start_loop (1);
6350 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6358 tree vars
= TREE_OPERAND (exp
, 0);
6359 int vars_need_expansion
= 0;
6361 /* Need to open a binding contour here because
6362 if there are any cleanups they must be contained here. */
6363 expand_start_bindings (2);
6365 /* Mark the corresponding BLOCK for output in its proper place. */
6366 if (TREE_OPERAND (exp
, 2) != 0
6367 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6368 insert_block (TREE_OPERAND (exp
, 2));
6370 /* If VARS have not yet been expanded, expand them now. */
6373 if (DECL_RTL (vars
) == 0)
6375 vars_need_expansion
= 1;
6378 expand_decl_init (vars
);
6379 vars
= TREE_CHAIN (vars
);
6382 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6384 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6390 if (RTL_EXPR_SEQUENCE (exp
))
6392 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6394 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6395 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6397 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6398 free_temps_for_rtl_expr (exp
);
6399 return RTL_EXPR_RTL (exp
);
6402 /* If we don't need the result, just ensure we evaluate any
6407 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6408 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6409 EXPAND_MEMORY_USE_BAD
);
6413 /* All elts simple constants => refer to a constant in memory. But
6414 if this is a non-BLKmode mode, let it store a field at a time
6415 since that should make a CONST_INT or CONST_DOUBLE when we
6416 fold. Likewise, if we have a target we can use, it is best to
6417 store directly into the target unless the type is large enough
6418 that memcpy will be used. If we are making an initializer and
6419 all operands are constant, put it in memory as well. */
6420 else if ((TREE_STATIC (exp
)
6421 && ((mode
== BLKmode
6422 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6423 || TREE_ADDRESSABLE (exp
)
6424 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6425 && (! MOVE_BY_PIECES_P
6426 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6428 && ! mostly_zeros_p (exp
))))
6429 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6431 rtx constructor
= output_constant_def (exp
);
6433 if (modifier
!= EXPAND_CONST_ADDRESS
6434 && modifier
!= EXPAND_INITIALIZER
6435 && modifier
!= EXPAND_SUM
6436 && (! memory_address_p (GET_MODE (constructor
),
6437 XEXP (constructor
, 0))
6439 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6440 constructor
= change_address (constructor
, VOIDmode
,
6441 XEXP (constructor
, 0));
6447 /* Handle calls that pass values in multiple non-contiguous
6448 locations. The Irix 6 ABI has examples of this. */
6449 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6450 || GET_CODE (target
) == PARALLEL
)
6452 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6453 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6455 target
= assign_temp (type
, 0, 1, 1);
6458 if (TREE_READONLY (exp
))
6460 if (GET_CODE (target
) == MEM
)
6461 target
= copy_rtx (target
);
6463 RTX_UNCHANGING_P (target
) = 1;
6466 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0,
6467 int_size_in_bytes (TREE_TYPE (exp
)));
6473 tree exp1
= TREE_OPERAND (exp
, 0);
6475 tree string
= string_constant (exp1
, &index
);
6477 /* Try to optimize reads from const strings. */
6479 && TREE_CODE (string
) == STRING_CST
6480 && TREE_CODE (index
) == INTEGER_CST
6481 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6482 && GET_MODE_CLASS (mode
) == MODE_INT
6483 && GET_MODE_SIZE (mode
) == 1
6484 && modifier
!= EXPAND_MEMORY_USE_WO
)
6486 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6488 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6489 op0
= memory_address (mode
, op0
);
6491 if (cfun
&& current_function_check_memory_usage
6492 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6494 enum memory_use_mode memory_usage
;
6495 memory_usage
= get_memory_usage_from_modifier (modifier
);
6497 if (memory_usage
!= MEMORY_USE_DONT
)
6499 in_check_memory_usage
= 1;
6500 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6502 GEN_INT (int_size_in_bytes (type
)),
6503 TYPE_MODE (sizetype
),
6504 GEN_INT (memory_usage
),
6505 TYPE_MODE (integer_type_node
));
6506 in_check_memory_usage
= 0;
6510 temp
= gen_rtx_MEM (mode
, op0
);
6511 set_mem_attributes (temp
, exp
, 0);
6513 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6514 here, because, in C and C++, the fact that a location is accessed
6515 through a pointer to const does not mean that the value there can
6516 never change. Languages where it can never change should
6517 also set TREE_STATIC. */
6518 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6520 /* If we are writing to this object and its type is a record with
6521 readonly fields, we must mark it as readonly so it will
6522 conflict with readonly references to those fields. */
6523 if (modifier
== EXPAND_MEMORY_USE_WO
6524 && TREE_CODE (type
) == RECORD_TYPE
&& readonly_fields_p (type
))
6525 RTX_UNCHANGING_P (temp
) = 1;
6531 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6535 tree array
= TREE_OPERAND (exp
, 0);
6536 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6537 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6538 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6541 /* Optimize the special-case of a zero lower bound.
6543 We convert the low_bound to sizetype to avoid some problems
6544 with constant folding. (E.g. suppose the lower bound is 1,
6545 and its mode is QI. Without the conversion, (ARRAY
6546 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6547 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6549 if (! integer_zerop (low_bound
))
6550 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6552 /* Fold an expression like: "foo"[2].
6553 This is not done in fold so it won't happen inside &.
6554 Don't fold if this is for wide characters since it's too
6555 difficult to do correctly and this is a very rare case. */
6557 if (TREE_CODE (array
) == STRING_CST
6558 && TREE_CODE (index
) == INTEGER_CST
6559 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6560 && GET_MODE_CLASS (mode
) == MODE_INT
6561 && GET_MODE_SIZE (mode
) == 1)
6563 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6565 /* If this is a constant index into a constant array,
6566 just get the value from the array. Handle both the cases when
6567 we have an explicit constructor and when our operand is a variable
6568 that was declared const. */
6570 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6571 && TREE_CODE (index
) == INTEGER_CST
6572 && 0 > compare_tree_int (index
,
6573 list_length (CONSTRUCTOR_ELTS
6574 (TREE_OPERAND (exp
, 0)))))
6578 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6579 i
= TREE_INT_CST_LOW (index
);
6580 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6584 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6585 tmode
, ro_modifier
);
6588 else if (optimize
>= 1
6589 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6590 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6591 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6593 if (TREE_CODE (index
) == INTEGER_CST
)
6595 tree init
= DECL_INITIAL (array
);
6597 if (TREE_CODE (init
) == CONSTRUCTOR
)
6601 for (elem
= CONSTRUCTOR_ELTS (init
);
6603 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6604 elem
= TREE_CHAIN (elem
))
6608 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6609 tmode
, ro_modifier
);
6611 else if (TREE_CODE (init
) == STRING_CST
6612 && 0 > compare_tree_int (index
,
6613 TREE_STRING_LENGTH (init
)))
6615 (TREE_STRING_POINTER
6616 (init
)[TREE_INT_CST_LOW (index
)]));
6624 /* If the operand is a CONSTRUCTOR, we can just extract the
6625 appropriate field if it is present. Don't do this if we have
6626 already written the data since we want to refer to that copy
6627 and varasm.c assumes that's what we'll do. */
6628 if (code
!= ARRAY_REF
6629 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6630 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6634 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6635 elt
= TREE_CHAIN (elt
))
6636 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6637 /* We can normally use the value of the field in the
6638 CONSTRUCTOR. However, if this is a bitfield in
6639 an integral mode that we can fit in a HOST_WIDE_INT,
6640 we must mask only the number of bits in the bitfield,
6641 since this is done implicitly by the constructor. If
6642 the bitfield does not meet either of those conditions,
6643 we can't do this optimization. */
6644 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6645 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6647 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6648 <= HOST_BITS_PER_WIDE_INT
))))
6650 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6651 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6653 HOST_WIDE_INT bitsize
6654 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6656 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6658 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6659 op0
= expand_and (op0
, op1
, target
);
6663 enum machine_mode imode
6664 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6666 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6669 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6671 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6681 enum machine_mode mode1
;
6682 HOST_WIDE_INT bitsize
, bitpos
;
6685 unsigned int alignment
;
6686 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6687 &mode1
, &unsignedp
, &volatilep
,
6690 /* If we got back the original object, something is wrong. Perhaps
6691 we are evaluating an expression too early. In any event, don't
6692 infinitely recurse. */
6696 /* If TEM's type is a union of variable size, pass TARGET to the inner
6697 computation, since it will need a temporary and TARGET is known
6698 to have to do. This occurs in unchecked conversion in Ada. */
6700 op0
= expand_expr (tem
,
6701 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6702 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6704 ? target
: NULL_RTX
),
6706 (modifier
== EXPAND_INITIALIZER
6707 || modifier
== EXPAND_CONST_ADDRESS
)
6708 ? modifier
: EXPAND_NORMAL
);
6710 /* If this is a constant, put it into a register if it is a
6711 legitimate constant and OFFSET is 0 and memory if it isn't. */
6712 if (CONSTANT_P (op0
))
6714 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6715 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6717 op0
= force_reg (mode
, op0
);
6719 op0
= validize_mem (force_const_mem (mode
, op0
));
6724 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6726 /* If this object is in memory, put it into a register.
6727 This case can't occur in C, but can in Ada if we have
6728 unchecked conversion of an expression from a scalar type to
6729 an array or record type. */
6730 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6731 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6733 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
6735 mark_temp_addr_taken (memloc
);
6736 emit_move_insn (memloc
, op0
);
6740 if (GET_CODE (op0
) != MEM
)
6743 if (GET_MODE (offset_rtx
) != ptr_mode
)
6745 #ifdef POINTERS_EXTEND_UNSIGNED
6746 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6748 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6752 /* A constant address in OP0 can have VOIDmode, we must not try
6753 to call force_reg for that case. Avoid that case. */
6754 if (GET_CODE (op0
) == MEM
6755 && GET_MODE (op0
) == BLKmode
6756 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6758 && (bitpos
% bitsize
) == 0
6759 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6760 && alignment
== GET_MODE_ALIGNMENT (mode1
))
6762 rtx temp
= change_address (op0
, mode1
,
6763 plus_constant (XEXP (op0
, 0),
6766 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6769 op0
= change_address (op0
, mode1
,
6770 force_reg (GET_MODE (XEXP (temp
, 0)),
6775 op0
= change_address (op0
, VOIDmode
,
6776 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6777 force_reg (ptr_mode
,
6781 /* Don't forget about volatility even if this is a bitfield. */
6782 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6784 op0
= copy_rtx (op0
);
6785 MEM_VOLATILE_P (op0
) = 1;
6788 /* Check the access. */
6789 if (cfun
!= 0 && current_function_check_memory_usage
6790 && GET_CODE (op0
) == MEM
)
6792 enum memory_use_mode memory_usage
;
6793 memory_usage
= get_memory_usage_from_modifier (modifier
);
6795 if (memory_usage
!= MEMORY_USE_DONT
)
6800 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6801 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6803 /* Check the access right of the pointer. */
6804 in_check_memory_usage
= 1;
6805 if (size
> BITS_PER_UNIT
)
6806 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6808 GEN_INT (size
/ BITS_PER_UNIT
),
6809 TYPE_MODE (sizetype
),
6810 GEN_INT (memory_usage
),
6811 TYPE_MODE (integer_type_node
));
6812 in_check_memory_usage
= 0;
6816 /* In cases where an aligned union has an unaligned object
6817 as a field, we might be extracting a BLKmode value from
6818 an integer-mode (e.g., SImode) object. Handle this case
6819 by doing the extract into an object as wide as the field
6820 (which we know to be the width of a basic mode), then
6821 storing into memory, and changing the mode to BLKmode.
6822 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6823 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6824 if (mode1
== VOIDmode
6825 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6826 || (modifier
!= EXPAND_CONST_ADDRESS
6827 && modifier
!= EXPAND_INITIALIZER
6828 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6829 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6830 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6831 /* If the field isn't aligned enough to fetch as a memref,
6832 fetch it as a bit field. */
6833 || (mode1
!= BLKmode
6834 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
6835 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6836 < GET_MODE_ALIGNMENT (mode
))
6837 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
6838 /* If the type and the field are a constant size and the
6839 size of the type isn't the same size as the bitfield,
6840 we must use bitfield operations. */
6842 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
6844 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
6846 || (modifier
!= EXPAND_CONST_ADDRESS
6847 && modifier
!= EXPAND_INITIALIZER
6849 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
6850 && (TYPE_ALIGN (type
) > alignment
6851 || bitpos
% TYPE_ALIGN (type
) != 0)))
6853 enum machine_mode ext_mode
= mode
;
6855 if (ext_mode
== BLKmode
6856 && ! (target
!= 0 && GET_CODE (op0
) == MEM
6857 && GET_CODE (target
) == MEM
6858 && bitpos
% BITS_PER_UNIT
== 0))
6859 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6861 if (ext_mode
== BLKmode
)
6863 /* In this case, BITPOS must start at a byte boundary and
6864 TARGET, if specified, must be a MEM. */
6865 if (GET_CODE (op0
) != MEM
6866 || (target
!= 0 && GET_CODE (target
) != MEM
)
6867 || bitpos
% BITS_PER_UNIT
!= 0)
6870 op0
= change_address (op0
, VOIDmode
,
6871 plus_constant (XEXP (op0
, 0),
6872 bitpos
/ BITS_PER_UNIT
));
6874 target
= assign_temp (type
, 0, 1, 1);
6876 emit_block_move (target
, op0
,
6877 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6884 op0
= validize_mem (op0
);
6886 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6887 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6889 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6890 unsignedp
, target
, ext_mode
, ext_mode
,
6892 int_size_in_bytes (TREE_TYPE (tem
)));
6894 /* If the result is a record type and BITSIZE is narrower than
6895 the mode of OP0, an integral mode, and this is a big endian
6896 machine, we must put the field into the high-order bits. */
6897 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6898 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6899 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6900 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6901 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6905 if (mode
== BLKmode
)
6907 rtx
new = assign_stack_temp (ext_mode
,
6908 bitsize
/ BITS_PER_UNIT
, 0);
6910 emit_move_insn (new, op0
);
6911 op0
= copy_rtx (new);
6912 PUT_MODE (op0
, BLKmode
);
6913 MEM_SET_IN_STRUCT_P (op0
, 1);
6919 /* If the result is BLKmode, use that to access the object
6921 if (mode
== BLKmode
)
6924 /* Get a reference to just this component. */
6925 if (modifier
== EXPAND_CONST_ADDRESS
6926 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6927 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6928 (bitpos
/ BITS_PER_UNIT
)));
6930 op0
= change_address (op0
, mode1
,
6931 plus_constant (XEXP (op0
, 0),
6932 (bitpos
/ BITS_PER_UNIT
)));
6934 set_mem_attributes (op0
, exp
, 0);
6935 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6936 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6938 MEM_VOLATILE_P (op0
) |= volatilep
;
6939 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6940 || modifier
== EXPAND_CONST_ADDRESS
6941 || modifier
== EXPAND_INITIALIZER
)
6943 else if (target
== 0)
6944 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6946 convert_move (target
, op0
, unsignedp
);
6950 /* Intended for a reference to a buffer of a file-object in Pascal.
6951 But it's not certain that a special tree code will really be
6952 necessary for these. INDIRECT_REF might work for them. */
6958 /* Pascal set IN expression.
6961 rlo = set_low - (set_low%bits_per_word);
6962 the_word = set [ (index - rlo)/bits_per_word ];
6963 bit_index = index % bits_per_word;
6964 bitmask = 1 << bit_index;
6965 return !!(the_word & bitmask); */
6967 tree set
= TREE_OPERAND (exp
, 0);
6968 tree index
= TREE_OPERAND (exp
, 1);
6969 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6970 tree set_type
= TREE_TYPE (set
);
6971 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6972 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6973 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6974 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6975 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6976 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6977 rtx setaddr
= XEXP (setval
, 0);
6978 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6980 rtx diff
, quo
, rem
, addr
, bit
, result
;
6982 preexpand_calls (exp
);
6984 /* If domain is empty, answer is no. Likewise if index is constant
6985 and out of bounds. */
6986 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6987 && TREE_CODE (set_low_bound
) == INTEGER_CST
6988 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6989 || (TREE_CODE (index
) == INTEGER_CST
6990 && TREE_CODE (set_low_bound
) == INTEGER_CST
6991 && tree_int_cst_lt (index
, set_low_bound
))
6992 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6993 && TREE_CODE (index
) == INTEGER_CST
6994 && tree_int_cst_lt (set_high_bound
, index
))))
6998 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7000 /* If we get here, we have to generate the code for both cases
7001 (in range and out of range). */
7003 op0
= gen_label_rtx ();
7004 op1
= gen_label_rtx ();
7006 if (! (GET_CODE (index_val
) == CONST_INT
7007 && GET_CODE (lo_r
) == CONST_INT
))
7009 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7010 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7013 if (! (GET_CODE (index_val
) == CONST_INT
7014 && GET_CODE (hi_r
) == CONST_INT
))
7016 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7017 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7020 /* Calculate the element number of bit zero in the first word
7022 if (GET_CODE (lo_r
) == CONST_INT
)
7023 rlow
= GEN_INT (INTVAL (lo_r
)
7024 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7026 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7027 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7028 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7030 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7031 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7033 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7034 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7035 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7036 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7038 addr
= memory_address (byte_mode
,
7039 expand_binop (index_mode
, add_optab
, diff
,
7040 setaddr
, NULL_RTX
, iunsignedp
,
7043 /* Extract the bit we want to examine. */
7044 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7045 gen_rtx_MEM (byte_mode
, addr
),
7046 make_tree (TREE_TYPE (index
), rem
),
7048 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7049 GET_MODE (target
) == byte_mode
? target
: 0,
7050 1, OPTAB_LIB_WIDEN
);
7052 if (result
!= target
)
7053 convert_move (target
, result
, 1);
7055 /* Output the code to handle the out-of-range case. */
7058 emit_move_insn (target
, const0_rtx
);
7063 case WITH_CLEANUP_EXPR
:
7064 if (RTL_EXPR_RTL (exp
) == 0)
7067 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7068 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
7070 /* That's it for this cleanup. */
7071 TREE_OPERAND (exp
, 2) = 0;
7073 return RTL_EXPR_RTL (exp
);
7075 case CLEANUP_POINT_EXPR
:
7077 /* Start a new binding layer that will keep track of all cleanup
7078 actions to be performed. */
7079 expand_start_bindings (2);
7081 target_temp_slot_level
= temp_slot_level
;
7083 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7084 /* If we're going to use this value, load it up now. */
7086 op0
= force_not_mem (op0
);
7087 preserve_temp_slots (op0
);
7088 expand_end_bindings (NULL_TREE
, 0, 0);
7093 /* Check for a built-in function. */
7094 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7095 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7097 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7098 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7100 /* If this call was expanded already by preexpand_calls,
7101 just return the result we got. */
7102 if (CALL_EXPR_RTL (exp
) != 0)
7103 return CALL_EXPR_RTL (exp
);
7105 return expand_call (exp
, target
, ignore
);
7107 case NON_LVALUE_EXPR
:
7110 case REFERENCE_EXPR
:
7111 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7114 if (TREE_CODE (type
) == UNION_TYPE
)
7116 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7118 /* If both input and output are BLKmode, this conversion
7119 isn't actually doing anything unless we need to make the
7120 alignment stricter. */
7121 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7122 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7123 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7124 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7129 if (mode
!= BLKmode
)
7130 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7132 target
= assign_temp (type
, 0, 1, 1);
7135 if (GET_CODE (target
) == MEM
)
7136 /* Store data into beginning of memory target. */
7137 store_expr (TREE_OPERAND (exp
, 0),
7138 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7140 else if (GET_CODE (target
) == REG
)
7141 /* Store this field into a union of the proper type. */
7142 store_field (target
,
7143 MIN ((int_size_in_bytes (TREE_TYPE
7144 (TREE_OPERAND (exp
, 0)))
7146 GET_MODE_BITSIZE (mode
)),
7147 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7148 VOIDmode
, 0, BITS_PER_UNIT
,
7149 int_size_in_bytes (type
), 0);
7153 /* Return the entire union. */
7157 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7159 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7162 /* If the signedness of the conversion differs and OP0 is
7163 a promoted SUBREG, clear that indication since we now
7164 have to do the proper extension. */
7165 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7166 && GET_CODE (op0
) == SUBREG
)
7167 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7172 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7173 if (GET_MODE (op0
) == mode
)
7176 /* If OP0 is a constant, just convert it into the proper mode. */
7177 if (CONSTANT_P (op0
))
7179 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7180 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7182 if (modifier
== EXPAND_INITIALIZER
)
7183 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7187 convert_to_mode (mode
, op0
,
7188 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7190 convert_move (target
, op0
,
7191 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7195 /* We come here from MINUS_EXPR when the second operand is a
7198 this_optab
= add_optab
;
7200 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7201 something else, make sure we add the register to the constant and
7202 then to the other thing. This case can occur during strength
7203 reduction and doing it this way will produce better code if the
7204 frame pointer or argument pointer is eliminated.
7206 fold-const.c will ensure that the constant is always in the inner
7207 PLUS_EXPR, so the only case we need to do anything about is if
7208 sp, ap, or fp is our second argument, in which case we must swap
7209 the innermost first argument and our second argument. */
7211 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7212 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7213 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7214 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7215 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7216 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7218 tree t
= TREE_OPERAND (exp
, 1);
7220 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7221 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7224 /* If the result is to be ptr_mode and we are adding an integer to
7225 something, we might be forming a constant. So try to use
7226 plus_constant. If it produces a sum and we can't accept it,
7227 use force_operand. This allows P = &ARR[const] to generate
7228 efficient code on machines where a SYMBOL_REF is not a valid
7231 If this is an EXPAND_SUM call, always return the sum. */
7232 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7233 || mode
== ptr_mode
)
7235 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7236 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7237 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7241 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7243 /* Use immed_double_const to ensure that the constant is
7244 truncated according to the mode of OP1, then sign extended
7245 to a HOST_WIDE_INT. Using the constant directly can result
7246 in non-canonical RTL in a 64x32 cross compile. */
7248 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7250 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7251 op1
= plus_constant (op1
, INTVAL (constant_part
));
7252 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7253 op1
= force_operand (op1
, target
);
7257 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7258 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7259 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7263 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7265 if (! CONSTANT_P (op0
))
7267 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7268 VOIDmode
, modifier
);
7269 /* Don't go to both_summands if modifier
7270 says it's not right to return a PLUS. */
7271 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7275 /* Use immed_double_const to ensure that the constant is
7276 truncated according to the mode of OP1, then sign extended
7277 to a HOST_WIDE_INT. Using the constant directly can result
7278 in non-canonical RTL in a 64x32 cross compile. */
7280 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7282 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7283 op0
= plus_constant (op0
, INTVAL (constant_part
));
7284 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7285 op0
= force_operand (op0
, target
);
7290 /* No sense saving up arithmetic to be done
7291 if it's all in the wrong mode to form part of an address.
7292 And force_operand won't know whether to sign-extend or
7294 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7295 || mode
!= ptr_mode
)
7298 preexpand_calls (exp
);
7299 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7302 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7303 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7306 /* Make sure any term that's a sum with a constant comes last. */
7307 if (GET_CODE (op0
) == PLUS
7308 && CONSTANT_P (XEXP (op0
, 1)))
7314 /* If adding to a sum including a constant,
7315 associate it to put the constant outside. */
7316 if (GET_CODE (op1
) == PLUS
7317 && CONSTANT_P (XEXP (op1
, 1)))
7319 rtx constant_term
= const0_rtx
;
7321 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7324 /* Ensure that MULT comes first if there is one. */
7325 else if (GET_CODE (op0
) == MULT
)
7326 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7328 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7330 /* Let's also eliminate constants from op0 if possible. */
7331 op0
= eliminate_constant_term (op0
, &constant_term
);
7333 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7334 their sum should be a constant. Form it into OP1, since the
7335 result we want will then be OP0 + OP1. */
7337 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7342 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7345 /* Put a constant term last and put a multiplication first. */
7346 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7347 temp
= op1
, op1
= op0
, op0
= temp
;
7349 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7350 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7353 /* For initializers, we are allowed to return a MINUS of two
7354 symbolic constants. Here we handle all cases when both operands
7356 /* Handle difference of two symbolic constants,
7357 for the sake of an initializer. */
7358 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7359 && really_constant_p (TREE_OPERAND (exp
, 0))
7360 && really_constant_p (TREE_OPERAND (exp
, 1)))
7362 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7363 VOIDmode
, ro_modifier
);
7364 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7365 VOIDmode
, ro_modifier
);
7367 /* If the last operand is a CONST_INT, use plus_constant of
7368 the negated constant. Else make the MINUS. */
7369 if (GET_CODE (op1
) == CONST_INT
)
7370 return plus_constant (op0
, - INTVAL (op1
));
7372 return gen_rtx_MINUS (mode
, op0
, op1
);
7374 /* Convert A - const to A + (-const). */
7375 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7377 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7378 TREE_OPERAND (exp
, 1)));
7380 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7381 /* If we can't negate the constant in TYPE, leave it alone and
7382 expand_binop will negate it for us. We used to try to do it
7383 here in the signed version of TYPE, but that doesn't work
7384 on POINTER_TYPEs. */;
7387 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7391 this_optab
= sub_optab
;
7395 preexpand_calls (exp
);
7396 /* If first operand is constant, swap them.
7397 Thus the following special case checks need only
7398 check the second operand. */
7399 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7401 register tree t1
= TREE_OPERAND (exp
, 0);
7402 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7403 TREE_OPERAND (exp
, 1) = t1
;
7406 /* Attempt to return something suitable for generating an
7407 indexed address, for machines that support that. */
7409 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7410 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7411 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7413 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7416 /* Apply distributive law if OP0 is x+c. */
7417 if (GET_CODE (op0
) == PLUS
7418 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7423 (mode
, XEXP (op0
, 0),
7424 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7425 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7426 * INTVAL (XEXP (op0
, 1))));
7428 if (GET_CODE (op0
) != REG
)
7429 op0
= force_operand (op0
, NULL_RTX
);
7430 if (GET_CODE (op0
) != REG
)
7431 op0
= copy_to_mode_reg (mode
, op0
);
7434 gen_rtx_MULT (mode
, op0
,
7435 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7438 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7441 /* Check for multiplying things that have been extended
7442 from a narrower type. If this machine supports multiplying
7443 in that narrower type with a result in the desired type,
7444 do it that way, and avoid the explicit type-conversion. */
7445 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7446 && TREE_CODE (type
) == INTEGER_TYPE
7447 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7448 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7449 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7450 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7451 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7452 /* Don't use a widening multiply if a shift will do. */
7453 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7454 > HOST_BITS_PER_WIDE_INT
)
7455 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7457 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7458 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7460 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7461 /* If both operands are extended, they must either both
7462 be zero-extended or both be sign-extended. */
7463 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7465 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7467 enum machine_mode innermode
7468 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7469 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7470 ? smul_widen_optab
: umul_widen_optab
);
7471 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7472 ? umul_widen_optab
: smul_widen_optab
);
7473 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7475 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7477 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7478 NULL_RTX
, VOIDmode
, 0);
7479 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7480 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7483 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7484 NULL_RTX
, VOIDmode
, 0);
7487 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7488 && innermode
== word_mode
)
7491 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7492 NULL_RTX
, VOIDmode
, 0);
7493 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7494 op1
= convert_modes (innermode
, mode
,
7495 expand_expr (TREE_OPERAND (exp
, 1),
7496 NULL_RTX
, VOIDmode
, 0),
7499 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7500 NULL_RTX
, VOIDmode
, 0);
7501 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7502 unsignedp
, OPTAB_LIB_WIDEN
);
7503 htem
= expand_mult_highpart_adjust (innermode
,
7504 gen_highpart (innermode
, temp
),
7506 gen_highpart (innermode
, temp
),
7508 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7513 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7514 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7515 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7517 case TRUNC_DIV_EXPR
:
7518 case FLOOR_DIV_EXPR
:
7520 case ROUND_DIV_EXPR
:
7521 case EXACT_DIV_EXPR
:
7522 preexpand_calls (exp
);
7523 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7525 /* Possible optimization: compute the dividend with EXPAND_SUM
7526 then if the divisor is constant can optimize the case
7527 where some terms of the dividend have coeffs divisible by it. */
7528 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7529 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7530 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7533 this_optab
= flodiv_optab
;
7536 case TRUNC_MOD_EXPR
:
7537 case FLOOR_MOD_EXPR
:
7539 case ROUND_MOD_EXPR
:
7540 preexpand_calls (exp
);
7541 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7543 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7544 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7545 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7547 case FIX_ROUND_EXPR
:
7548 case FIX_FLOOR_EXPR
:
7550 abort (); /* Not used for C. */
7552 case FIX_TRUNC_EXPR
:
7553 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7555 target
= gen_reg_rtx (mode
);
7556 expand_fix (target
, op0
, unsignedp
);
7560 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7562 target
= gen_reg_rtx (mode
);
7563 /* expand_float can't figure out what to do if FROM has VOIDmode.
7564 So give it the correct mode. With -O, cse will optimize this. */
7565 if (GET_MODE (op0
) == VOIDmode
)
7566 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7568 expand_float (target
, op0
,
7569 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7573 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7574 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7580 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7582 /* Handle complex values specially. */
7583 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7584 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7585 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7587 /* Unsigned abs is simply the operand. Testing here means we don't
7588 risk generating incorrect code below. */
7589 if (TREE_UNSIGNED (type
))
7592 return expand_abs (mode
, op0
, target
,
7593 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7597 target
= original_target
;
7598 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7599 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7600 || GET_MODE (target
) != mode
7601 || (GET_CODE (target
) == REG
7602 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7603 target
= gen_reg_rtx (mode
);
7604 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7605 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7607 /* First try to do it with a special MIN or MAX instruction.
7608 If that does not win, use a conditional jump to select the proper
7610 this_optab
= (TREE_UNSIGNED (type
)
7611 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7612 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7614 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7619 /* At this point, a MEM target is no longer useful; we will get better
7622 if (GET_CODE (target
) == MEM
)
7623 target
= gen_reg_rtx (mode
);
7626 emit_move_insn (target
, op0
);
7628 op0
= gen_label_rtx ();
7630 /* If this mode is an integer too wide to compare properly,
7631 compare word by word. Rely on cse to optimize constant cases. */
7632 if (GET_MODE_CLASS (mode
) == MODE_INT
7633 && ! can_compare_p (GE
, mode
, ccp_jump
))
7635 if (code
== MAX_EXPR
)
7636 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7637 target
, op1
, NULL_RTX
, op0
);
7639 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7640 op1
, target
, NULL_RTX
, op0
);
7644 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7645 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7646 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7649 emit_move_insn (target
, op1
);
7654 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7655 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7661 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7662 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7667 /* ??? Can optimize bitwise operations with one arg constant.
7668 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7669 and (a bitwise1 b) bitwise2 b (etc)
7670 but that is probably not worth while. */
7672 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7673 boolean values when we want in all cases to compute both of them. In
7674 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7675 as actual zero-or-1 values and then bitwise anding. In cases where
7676 there cannot be any side effects, better code would be made by
7677 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7678 how to recognize those cases. */
7680 case TRUTH_AND_EXPR
:
7682 this_optab
= and_optab
;
7687 this_optab
= ior_optab
;
7690 case TRUTH_XOR_EXPR
:
7692 this_optab
= xor_optab
;
7699 preexpand_calls (exp
);
7700 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7702 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7703 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7706 /* Could determine the answer when only additive constants differ. Also,
7707 the addition of one can be handled by changing the condition. */
7714 case UNORDERED_EXPR
:
7721 preexpand_calls (exp
);
7722 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7726 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7727 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7729 && GET_CODE (original_target
) == REG
7730 && (GET_MODE (original_target
)
7731 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7733 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7736 if (temp
!= original_target
)
7737 temp
= copy_to_reg (temp
);
7739 op1
= gen_label_rtx ();
7740 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7741 GET_MODE (temp
), unsignedp
, 0, op1
);
7742 emit_move_insn (temp
, const1_rtx
);
7747 /* If no set-flag instruction, must generate a conditional
7748 store into a temporary variable. Drop through
7749 and handle this like && and ||. */
7751 case TRUTH_ANDIF_EXPR
:
7752 case TRUTH_ORIF_EXPR
:
7754 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7755 /* Make sure we don't have a hard reg (such as function's return
7756 value) live across basic blocks, if not optimizing. */
7757 || (!optimize
&& GET_CODE (target
) == REG
7758 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7759 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7762 emit_clr_insn (target
);
7764 op1
= gen_label_rtx ();
7765 jumpifnot (exp
, op1
);
7768 emit_0_to_1_insn (target
);
7771 return ignore
? const0_rtx
: target
;
7773 case TRUTH_NOT_EXPR
:
7774 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7775 /* The parser is careful to generate TRUTH_NOT_EXPR
7776 only with operands that are always zero or one. */
7777 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7778 target
, 1, OPTAB_LIB_WIDEN
);
7784 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7786 return expand_expr (TREE_OPERAND (exp
, 1),
7787 (ignore
? const0_rtx
: target
),
7791 /* If we would have a "singleton" (see below) were it not for a
7792 conversion in each arm, bring that conversion back out. */
7793 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7794 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7795 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7796 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7798 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7799 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7801 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7802 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7803 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7804 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7805 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7806 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7807 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7808 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7809 return expand_expr (build1 (NOP_EXPR
, type
,
7810 build (COND_EXPR
, TREE_TYPE (true),
7811 TREE_OPERAND (exp
, 0),
7813 target
, tmode
, modifier
);
7817 /* Note that COND_EXPRs whose type is a structure or union
7818 are required to be constructed to contain assignments of
7819 a temporary variable, so that we can evaluate them here
7820 for side effect only. If type is void, we must do likewise. */
7822 /* If an arm of the branch requires a cleanup,
7823 only that cleanup is performed. */
7826 tree binary_op
= 0, unary_op
= 0;
7828 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7829 convert it to our mode, if necessary. */
7830 if (integer_onep (TREE_OPERAND (exp
, 1))
7831 && integer_zerop (TREE_OPERAND (exp
, 2))
7832 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7836 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7841 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7842 if (GET_MODE (op0
) == mode
)
7846 target
= gen_reg_rtx (mode
);
7847 convert_move (target
, op0
, unsignedp
);
7851 /* Check for X ? A + B : A. If we have this, we can copy A to the
7852 output and conditionally add B. Similarly for unary operations.
7853 Don't do this if X has side-effects because those side effects
7854 might affect A or B and the "?" operation is a sequence point in
7855 ANSI. (operand_equal_p tests for side effects.) */
7857 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7858 && operand_equal_p (TREE_OPERAND (exp
, 2),
7859 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7860 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7861 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7862 && operand_equal_p (TREE_OPERAND (exp
, 1),
7863 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7864 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7865 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7866 && operand_equal_p (TREE_OPERAND (exp
, 2),
7867 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7868 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7869 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7870 && operand_equal_p (TREE_OPERAND (exp
, 1),
7871 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7872 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7874 /* If we are not to produce a result, we have no target. Otherwise,
7875 if a target was specified use it; it will not be used as an
7876 intermediate target unless it is safe. If no target, use a
7881 else if (original_target
7882 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7883 || (singleton
&& GET_CODE (original_target
) == REG
7884 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7885 && original_target
== var_rtx (singleton
)))
7886 && GET_MODE (original_target
) == mode
7887 #ifdef HAVE_conditional_move
7888 && (! can_conditionally_move_p (mode
)
7889 || GET_CODE (original_target
) == REG
7890 || TREE_ADDRESSABLE (type
))
7892 && ! (GET_CODE (original_target
) == MEM
7893 && MEM_VOLATILE_P (original_target
)))
7894 temp
= original_target
;
7895 else if (TREE_ADDRESSABLE (type
))
7898 temp
= assign_temp (type
, 0, 0, 1);
7900 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7901 do the test of X as a store-flag operation, do this as
7902 A + ((X != 0) << log C). Similarly for other simple binary
7903 operators. Only do for C == 1 if BRANCH_COST is low. */
7904 if (temp
&& singleton
&& binary_op
7905 && (TREE_CODE (binary_op
) == PLUS_EXPR
7906 || TREE_CODE (binary_op
) == MINUS_EXPR
7907 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7908 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7909 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7910 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7911 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7914 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7915 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7916 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7919 /* If we had X ? A : A + 1, do this as A + (X == 0).
7921 We have to invert the truth value here and then put it
7922 back later if do_store_flag fails. We cannot simply copy
7923 TREE_OPERAND (exp, 0) to another variable and modify that
7924 because invert_truthvalue can modify the tree pointed to
7926 if (singleton
== TREE_OPERAND (exp
, 1))
7927 TREE_OPERAND (exp
, 0)
7928 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7930 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7931 (safe_from_p (temp
, singleton
, 1)
7933 mode
, BRANCH_COST
<= 1);
7935 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7936 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7937 build_int_2 (tree_log2
7941 (safe_from_p (temp
, singleton
, 1)
7942 ? temp
: NULL_RTX
), 0);
7946 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7947 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7948 unsignedp
, OPTAB_LIB_WIDEN
);
7950 else if (singleton
== TREE_OPERAND (exp
, 1))
7951 TREE_OPERAND (exp
, 0)
7952 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7955 do_pending_stack_adjust ();
7957 op0
= gen_label_rtx ();
7959 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7963 /* If the target conflicts with the other operand of the
7964 binary op, we can't use it. Also, we can't use the target
7965 if it is a hard register, because evaluating the condition
7966 might clobber it. */
7968 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7969 || (GET_CODE (temp
) == REG
7970 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7971 temp
= gen_reg_rtx (mode
);
7972 store_expr (singleton
, temp
, 0);
7975 expand_expr (singleton
,
7976 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7977 if (singleton
== TREE_OPERAND (exp
, 1))
7978 jumpif (TREE_OPERAND (exp
, 0), op0
);
7980 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7982 start_cleanup_deferral ();
7983 if (binary_op
&& temp
== 0)
7984 /* Just touch the other operand. */
7985 expand_expr (TREE_OPERAND (binary_op
, 1),
7986 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7988 store_expr (build (TREE_CODE (binary_op
), type
,
7989 make_tree (type
, temp
),
7990 TREE_OPERAND (binary_op
, 1)),
7993 store_expr (build1 (TREE_CODE (unary_op
), type
,
7994 make_tree (type
, temp
)),
7998 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7999 comparison operator. If we have one of these cases, set the
8000 output to A, branch on A (cse will merge these two references),
8001 then set the output to FOO. */
8003 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8004 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8005 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8006 TREE_OPERAND (exp
, 1), 0)
8007 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8008 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8009 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8011 if (GET_CODE (temp
) == REG
8012 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8013 temp
= gen_reg_rtx (mode
);
8014 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8015 jumpif (TREE_OPERAND (exp
, 0), op0
);
8017 start_cleanup_deferral ();
8018 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8022 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8023 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8024 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8025 TREE_OPERAND (exp
, 2), 0)
8026 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8027 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8028 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8030 if (GET_CODE (temp
) == REG
8031 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8032 temp
= gen_reg_rtx (mode
);
8033 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8034 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8036 start_cleanup_deferral ();
8037 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8042 op1
= gen_label_rtx ();
8043 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8045 start_cleanup_deferral ();
8047 /* One branch of the cond can be void, if it never returns. For
8048 example A ? throw : E */
8050 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8051 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8053 expand_expr (TREE_OPERAND (exp
, 1),
8054 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8055 end_cleanup_deferral ();
8057 emit_jump_insn (gen_jump (op1
));
8060 start_cleanup_deferral ();
8062 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8063 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8065 expand_expr (TREE_OPERAND (exp
, 2),
8066 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8069 end_cleanup_deferral ();
8080 /* Something needs to be initialized, but we didn't know
8081 where that thing was when building the tree. For example,
8082 it could be the return value of a function, or a parameter
8083 to a function which lays down in the stack, or a temporary
8084 variable which must be passed by reference.
8086 We guarantee that the expression will either be constructed
8087 or copied into our original target. */
8089 tree slot
= TREE_OPERAND (exp
, 0);
8090 tree cleanups
= NULL_TREE
;
8093 if (TREE_CODE (slot
) != VAR_DECL
)
8097 target
= original_target
;
8099 /* Set this here so that if we get a target that refers to a
8100 register variable that's already been used, put_reg_into_stack
8101 knows that it should fix up those uses. */
8102 TREE_USED (slot
) = 1;
8106 if (DECL_RTL (slot
) != 0)
8108 target
= DECL_RTL (slot
);
8109 /* If we have already expanded the slot, so don't do
8111 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8116 target
= assign_temp (type
, 2, 0, 1);
8117 /* All temp slots at this level must not conflict. */
8118 preserve_temp_slots (target
);
8119 DECL_RTL (slot
) = target
;
8120 if (TREE_ADDRESSABLE (slot
))
8121 put_var_into_stack (slot
);
8123 /* Since SLOT is not known to the called function
8124 to belong to its stack frame, we must build an explicit
8125 cleanup. This case occurs when we must build up a reference
8126 to pass the reference as an argument. In this case,
8127 it is very likely that such a reference need not be
8130 if (TREE_OPERAND (exp
, 2) == 0)
8131 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8132 cleanups
= TREE_OPERAND (exp
, 2);
8137 /* This case does occur, when expanding a parameter which
8138 needs to be constructed on the stack. The target
8139 is the actual stack address that we want to initialize.
8140 The function we call will perform the cleanup in this case. */
8142 /* If we have already assigned it space, use that space,
8143 not target that we were passed in, as our target
8144 parameter is only a hint. */
8145 if (DECL_RTL (slot
) != 0)
8147 target
= DECL_RTL (slot
);
8148 /* If we have already expanded the slot, so don't do
8150 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8155 DECL_RTL (slot
) = target
;
8156 /* If we must have an addressable slot, then make sure that
8157 the RTL that we just stored in slot is OK. */
8158 if (TREE_ADDRESSABLE (slot
))
8159 put_var_into_stack (slot
);
8163 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8164 /* Mark it as expanded. */
8165 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8167 store_expr (exp1
, target
, 0);
8169 expand_decl_cleanup (NULL_TREE
, cleanups
);
8176 tree lhs
= TREE_OPERAND (exp
, 0);
8177 tree rhs
= TREE_OPERAND (exp
, 1);
8178 tree noncopied_parts
= 0;
8179 tree lhs_type
= TREE_TYPE (lhs
);
8181 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8182 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8183 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8184 TYPE_NONCOPIED_PARTS (lhs_type
));
8185 while (noncopied_parts
!= 0)
8187 expand_assignment (TREE_VALUE (noncopied_parts
),
8188 TREE_PURPOSE (noncopied_parts
), 0, 0);
8189 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8196 /* If lhs is complex, expand calls in rhs before computing it.
8197 That's so we don't compute a pointer and save it over a call.
8198 If lhs is simple, compute it first so we can give it as a
8199 target if the rhs is just a call. This avoids an extra temp and copy
8200 and that prevents a partial-subsumption which makes bad code.
8201 Actually we could treat component_ref's of vars like vars. */
8203 tree lhs
= TREE_OPERAND (exp
, 0);
8204 tree rhs
= TREE_OPERAND (exp
, 1);
8205 tree noncopied_parts
= 0;
8206 tree lhs_type
= TREE_TYPE (lhs
);
8210 if (TREE_CODE (lhs
) != VAR_DECL
8211 && TREE_CODE (lhs
) != RESULT_DECL
8212 && TREE_CODE (lhs
) != PARM_DECL
8213 && ! (TREE_CODE (lhs
) == INDIRECT_REF
8214 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
8215 preexpand_calls (exp
);
8217 /* Check for |= or &= of a bitfield of size one into another bitfield
8218 of size 1. In this case, (unless we need the result of the
8219 assignment) we can do this more efficiently with a
8220 test followed by an assignment, if necessary.
8222 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8223 things change so we do, this code should be enhanced to
8226 && TREE_CODE (lhs
) == COMPONENT_REF
8227 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8228 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8229 && TREE_OPERAND (rhs
, 0) == lhs
8230 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8231 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8232 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8234 rtx label
= gen_label_rtx ();
8236 do_jump (TREE_OPERAND (rhs
, 1),
8237 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8238 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8239 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8240 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8242 : integer_zero_node
)),
8244 do_pending_stack_adjust ();
8249 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8250 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8251 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8252 TYPE_NONCOPIED_PARTS (lhs_type
));
8254 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8255 while (noncopied_parts
!= 0)
8257 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8258 TREE_VALUE (noncopied_parts
), 0, 0);
8259 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8265 if (!TREE_OPERAND (exp
, 0))
8266 expand_null_return ();
8268 expand_return (TREE_OPERAND (exp
, 0));
8271 case PREINCREMENT_EXPR
:
8272 case PREDECREMENT_EXPR
:
8273 return expand_increment (exp
, 0, ignore
);
8275 case POSTINCREMENT_EXPR
:
8276 case POSTDECREMENT_EXPR
:
8277 /* Faster to treat as pre-increment if result is not used. */
8278 return expand_increment (exp
, ! ignore
, ignore
);
8281 /* If nonzero, TEMP will be set to the address of something that might
8282 be a MEM corresponding to a stack slot. */
8285 /* Are we taking the address of a nested function? */
8286 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8287 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8288 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8289 && ! TREE_STATIC (exp
))
8291 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8292 op0
= force_operand (op0
, target
);
8294 /* If we are taking the address of something erroneous, just
8296 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8300 /* We make sure to pass const0_rtx down if we came in with
8301 ignore set, to avoid doing the cleanups twice for something. */
8302 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8303 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8304 (modifier
== EXPAND_INITIALIZER
8305 ? modifier
: EXPAND_CONST_ADDRESS
));
8307 /* If we are going to ignore the result, OP0 will have been set
8308 to const0_rtx, so just return it. Don't get confused and
8309 think we are taking the address of the constant. */
8313 op0
= protect_from_queue (op0
, 0);
8315 /* We would like the object in memory. If it is a constant, we can
8316 have it be statically allocated into memory. For a non-constant,
8317 we need to allocate some memory and store the value into it. */
8319 if (CONSTANT_P (op0
))
8320 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8322 else if (GET_CODE (op0
) == MEM
)
8324 mark_temp_addr_taken (op0
);
8325 temp
= XEXP (op0
, 0);
8328 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8329 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8331 /* If this object is in a register, it must be not
8333 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8334 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8336 mark_temp_addr_taken (memloc
);
8337 emit_move_insn (memloc
, op0
);
8341 if (GET_CODE (op0
) != MEM
)
8344 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8346 temp
= XEXP (op0
, 0);
8347 #ifdef POINTERS_EXTEND_UNSIGNED
8348 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8349 && mode
== ptr_mode
)
8350 temp
= convert_memory_address (ptr_mode
, temp
);
8355 op0
= force_operand (XEXP (op0
, 0), target
);
8358 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8359 op0
= force_reg (Pmode
, op0
);
8361 if (GET_CODE (op0
) == REG
8362 && ! REG_USERVAR_P (op0
))
8363 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8365 /* If we might have had a temp slot, add an equivalent address
8368 update_temp_slot_address (temp
, op0
);
8370 #ifdef POINTERS_EXTEND_UNSIGNED
8371 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8372 && mode
== ptr_mode
)
8373 op0
= convert_memory_address (ptr_mode
, op0
);
8378 case ENTRY_VALUE_EXPR
:
8381 /* COMPLEX type for Extended Pascal & Fortran */
8384 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8387 /* Get the rtx code of the operands. */
8388 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8389 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8392 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8396 /* Move the real (op0) and imaginary (op1) parts to their location. */
8397 emit_move_insn (gen_realpart (mode
, target
), op0
);
8398 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8400 insns
= get_insns ();
8403 /* Complex construction should appear as a single unit. */
8404 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8405 each with a separate pseudo as destination.
8406 It's not correct for flow to treat them as a unit. */
8407 if (GET_CODE (target
) != CONCAT
)
8408 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8416 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8417 return gen_realpart (mode
, op0
);
8420 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8421 return gen_imagpart (mode
, op0
);
8425 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8429 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8432 target
= gen_reg_rtx (mode
);
8436 /* Store the realpart and the negated imagpart to target. */
8437 emit_move_insn (gen_realpart (partmode
, target
),
8438 gen_realpart (partmode
, op0
));
8440 imag_t
= gen_imagpart (partmode
, target
);
8441 temp
= expand_unop (partmode
, neg_optab
,
8442 gen_imagpart (partmode
, op0
), imag_t
, 0);
8444 emit_move_insn (imag_t
, temp
);
8446 insns
= get_insns ();
8449 /* Conjugate should appear as a single unit
8450 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8451 each with a separate pseudo as destination.
8452 It's not correct for flow to treat them as a unit. */
8453 if (GET_CODE (target
) != CONCAT
)
8454 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8461 case TRY_CATCH_EXPR
:
8463 tree handler
= TREE_OPERAND (exp
, 1);
8465 expand_eh_region_start ();
8467 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8469 expand_eh_region_end (handler
);
8474 case TRY_FINALLY_EXPR
:
8476 tree try_block
= TREE_OPERAND (exp
, 0);
8477 tree finally_block
= TREE_OPERAND (exp
, 1);
8478 rtx finally_label
= gen_label_rtx ();
8479 rtx done_label
= gen_label_rtx ();
8480 rtx return_link
= gen_reg_rtx (Pmode
);
8481 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8482 (tree
) finally_label
, (tree
) return_link
);
8483 TREE_SIDE_EFFECTS (cleanup
) = 1;
8485 /* Start a new binding layer that will keep track of all cleanup
8486 actions to be performed. */
8487 expand_start_bindings (2);
8489 target_temp_slot_level
= temp_slot_level
;
8491 expand_decl_cleanup (NULL_TREE
, cleanup
);
8492 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8494 preserve_temp_slots (op0
);
8495 expand_end_bindings (NULL_TREE
, 0, 0);
8496 emit_jump (done_label
);
8497 emit_label (finally_label
);
8498 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8499 emit_indirect_jump (return_link
);
8500 emit_label (done_label
);
8504 case GOTO_SUBROUTINE_EXPR
:
8506 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8507 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8508 rtx return_address
= gen_label_rtx ();
8509 emit_move_insn (return_link
,
8510 gen_rtx_LABEL_REF (Pmode
, return_address
));
8512 emit_label (return_address
);
8518 rtx dcc
= get_dynamic_cleanup_chain ();
8519 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8525 rtx dhc
= get_dynamic_handler_chain ();
8526 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8531 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8534 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8537 /* Here to do an ordinary binary operator, generating an instruction
8538 from the optab already placed in `this_optab'. */
8540 preexpand_calls (exp
);
8541 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8543 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8544 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8546 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8547 unsignedp
, OPTAB_LIB_WIDEN
);
8553 /* Similar to expand_expr, except that we don't specify a target, target
8554 mode, or modifier and we return the alignment of the inner type. This is
8555 used in cases where it is not necessary to align the result to the
8556 alignment of its type as long as we know the alignment of the result, for
8557 example for comparisons of BLKmode values. */
8560 expand_expr_unaligned (exp
, palign
)
8562 unsigned int *palign
;
8565 tree type
= TREE_TYPE (exp
);
8566 register enum machine_mode mode
= TYPE_MODE (type
);
8568 /* Default the alignment we return to that of the type. */
8569 *palign
= TYPE_ALIGN (type
);
8571 /* The only cases in which we do anything special is if the resulting mode
8573 if (mode
!= BLKmode
)
8574 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8576 switch (TREE_CODE (exp
))
8580 case NON_LVALUE_EXPR
:
8581 /* Conversions between BLKmode values don't change the underlying
8582 alignment or value. */
8583 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8584 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8588 /* Much of the code for this case is copied directly from expand_expr.
8589 We need to duplicate it here because we will do something different
8590 in the fall-through case, so we need to handle the same exceptions
8593 tree array
= TREE_OPERAND (exp
, 0);
8594 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8595 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8596 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8599 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8602 /* Optimize the special-case of a zero lower bound.
8604 We convert the low_bound to sizetype to avoid some problems
8605 with constant folding. (E.g. suppose the lower bound is 1,
8606 and its mode is QI. Without the conversion, (ARRAY
8607 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8608 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8610 if (! integer_zerop (low_bound
))
8611 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8613 /* If this is a constant index into a constant array,
8614 just get the value from the array. Handle both the cases when
8615 we have an explicit constructor and when our operand is a variable
8616 that was declared const. */
8618 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8619 && 0 > compare_tree_int (index
,
8620 list_length (CONSTRUCTOR_ELTS
8621 (TREE_OPERAND (exp
, 0)))))
8625 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8626 i
= TREE_INT_CST_LOW (index
);
8627 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8631 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8634 else if (optimize
>= 1
8635 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8636 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8637 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8639 if (TREE_CODE (index
) == INTEGER_CST
)
8641 tree init
= DECL_INITIAL (array
);
8643 if (TREE_CODE (init
) == CONSTRUCTOR
)
8647 for (elem
= CONSTRUCTOR_ELTS (init
);
8648 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
8649 elem
= TREE_CHAIN (elem
))
8653 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8663 /* If the operand is a CONSTRUCTOR, we can just extract the
8664 appropriate field if it is present. Don't do this if we have
8665 already written the data since we want to refer to that copy
8666 and varasm.c assumes that's what we'll do. */
8667 if (TREE_CODE (exp
) != ARRAY_REF
8668 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8669 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8673 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8674 elt
= TREE_CHAIN (elt
))
8675 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8676 /* Note that unlike the case in expand_expr, we know this is
8677 BLKmode and hence not an integer. */
8678 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8682 enum machine_mode mode1
;
8683 HOST_WIDE_INT bitsize
, bitpos
;
8686 unsigned int alignment
;
8688 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8689 &mode1
, &unsignedp
, &volatilep
,
8692 /* If we got back the original object, something is wrong. Perhaps
8693 we are evaluating an expression too early. In any event, don't
8694 infinitely recurse. */
8698 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8700 /* If this is a constant, put it into a register if it is a
8701 legitimate constant and OFFSET is 0 and memory if it isn't. */
8702 if (CONSTANT_P (op0
))
8704 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8706 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8708 op0
= force_reg (inner_mode
, op0
);
8710 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8715 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8717 /* If this object is in a register, put it into memory.
8718 This case can't occur in C, but can in Ada if we have
8719 unchecked conversion of an expression from a scalar type to
8720 an array or record type. */
8721 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8722 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8724 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
8726 mark_temp_addr_taken (memloc
);
8727 emit_move_insn (memloc
, op0
);
8731 if (GET_CODE (op0
) != MEM
)
8734 if (GET_MODE (offset_rtx
) != ptr_mode
)
8736 #ifdef POINTERS_EXTEND_UNSIGNED
8737 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
8739 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
8743 op0
= change_address (op0
, VOIDmode
,
8744 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
8745 force_reg (ptr_mode
,
8749 /* Don't forget about volatility even if this is a bitfield. */
8750 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
8752 op0
= copy_rtx (op0
);
8753 MEM_VOLATILE_P (op0
) = 1;
8756 /* Check the access. */
8757 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
8762 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
8763 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
8765 /* Check the access right of the pointer. */
8766 in_check_memory_usage
= 1;
8767 if (size
> BITS_PER_UNIT
)
8768 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
8769 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
8770 TYPE_MODE (sizetype
),
8771 GEN_INT (MEMORY_USE_RO
),
8772 TYPE_MODE (integer_type_node
));
8773 in_check_memory_usage
= 0;
8776 /* In cases where an aligned union has an unaligned object
8777 as a field, we might be extracting a BLKmode value from
8778 an integer-mode (e.g., SImode) object. Handle this case
8779 by doing the extract into an object as wide as the field
8780 (which we know to be the width of a basic mode), then
8781 storing into memory, and changing the mode to BLKmode.
8782 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8783 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8784 if (mode1
== VOIDmode
8785 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8786 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
8787 && (TYPE_ALIGN (type
) > alignment
8788 || bitpos
% TYPE_ALIGN (type
) != 0)))
8790 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
8792 if (ext_mode
== BLKmode
)
8794 /* In this case, BITPOS must start at a byte boundary. */
8795 if (GET_CODE (op0
) != MEM
8796 || bitpos
% BITS_PER_UNIT
!= 0)
8799 op0
= change_address (op0
, VOIDmode
,
8800 plus_constant (XEXP (op0
, 0),
8801 bitpos
/ BITS_PER_UNIT
));
8805 rtx
new = assign_stack_temp (ext_mode
,
8806 bitsize
/ BITS_PER_UNIT
, 0);
8808 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
8809 unsignedp
, NULL_RTX
, ext_mode
,
8810 ext_mode
, alignment
,
8811 int_size_in_bytes (TREE_TYPE (tem
)));
8813 /* If the result is a record type and BITSIZE is narrower than
8814 the mode of OP0, an integral mode, and this is a big endian
8815 machine, we must put the field into the high-order bits. */
8816 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
8817 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8818 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
8819 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
8820 size_int (GET_MODE_BITSIZE
8825 emit_move_insn (new, op0
);
8826 op0
= copy_rtx (new);
8827 PUT_MODE (op0
, BLKmode
);
8831 /* Get a reference to just this component. */
8832 op0
= change_address (op0
, mode1
,
8833 plus_constant (XEXP (op0
, 0),
8834 (bitpos
/ BITS_PER_UNIT
)));
8836 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
8838 /* Adjust the alignment in case the bit position is not
8839 a multiple of the alignment of the inner object. */
8840 while (bitpos
% alignment
!= 0)
8843 if (GET_CODE (XEXP (op0
, 0)) == REG
)
8844 mark_reg_pointer (XEXP (op0
, 0), alignment
);
8846 MEM_IN_STRUCT_P (op0
) = 1;
8847 MEM_VOLATILE_P (op0
) |= volatilep
;
8849 *palign
= alignment
;
8858 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8861 /* Return the tree node if a ARG corresponds to a string constant or zero
8862 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8863 in bytes within the string that ARG is accessing. The type of the
8864 offset will be `sizetype'. */
8867 string_constant (arg
, ptr_offset
)
8873 if (TREE_CODE (arg
) == ADDR_EXPR
8874 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8876 *ptr_offset
= size_zero_node
;
8877 return TREE_OPERAND (arg
, 0);
8879 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8881 tree arg0
= TREE_OPERAND (arg
, 0);
8882 tree arg1
= TREE_OPERAND (arg
, 1);
8887 if (TREE_CODE (arg0
) == ADDR_EXPR
8888 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8890 *ptr_offset
= convert (sizetype
, arg1
);
8891 return TREE_OPERAND (arg0
, 0);
8893 else if (TREE_CODE (arg1
) == ADDR_EXPR
8894 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8896 *ptr_offset
= convert (sizetype
, arg0
);
8897 return TREE_OPERAND (arg1
, 0);
8904 /* Expand code for a post- or pre- increment or decrement
8905 and return the RTX for the result.
8906 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8909 expand_increment (exp
, post
, ignore
)
8913 register rtx op0
, op1
;
8914 register rtx temp
, value
;
8915 register tree incremented
= TREE_OPERAND (exp
, 0);
8916 optab this_optab
= add_optab
;
8918 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8919 int op0_is_copy
= 0;
8920 int single_insn
= 0;
8921 /* 1 means we can't store into OP0 directly,
8922 because it is a subreg narrower than a word,
8923 and we don't dare clobber the rest of the word. */
8926 /* Stabilize any component ref that might need to be
8927 evaluated more than once below. */
8929 || TREE_CODE (incremented
) == BIT_FIELD_REF
8930 || (TREE_CODE (incremented
) == COMPONENT_REF
8931 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8932 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8933 incremented
= stabilize_reference (incremented
);
8934 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8935 ones into save exprs so that they don't accidentally get evaluated
8936 more than once by the code below. */
8937 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8938 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8939 incremented
= save_expr (incremented
);
8941 /* Compute the operands as RTX.
8942 Note whether OP0 is the actual lvalue or a copy of it:
8943 I believe it is a copy iff it is a register or subreg
8944 and insns were generated in computing it. */
8946 temp
= get_last_insn ();
8947 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
8949 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8950 in place but instead must do sign- or zero-extension during assignment,
8951 so we copy it into a new register and let the code below use it as
8954 Note that we can safely modify this SUBREG since it is know not to be
8955 shared (it was made by the expand_expr call above). */
8957 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8960 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8964 else if (GET_CODE (op0
) == SUBREG
8965 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
8967 /* We cannot increment this SUBREG in place. If we are
8968 post-incrementing, get a copy of the old value. Otherwise,
8969 just mark that we cannot increment in place. */
8971 op0
= copy_to_reg (op0
);
8976 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
8977 && temp
!= get_last_insn ());
8978 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8979 EXPAND_MEMORY_USE_BAD
);
8981 /* Decide whether incrementing or decrementing. */
8982 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
8983 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8984 this_optab
= sub_optab
;
8986 /* Convert decrement by a constant into a negative increment. */
8987 if (this_optab
== sub_optab
8988 && GET_CODE (op1
) == CONST_INT
)
8990 op1
= GEN_INT (-INTVAL (op1
));
8991 this_optab
= add_optab
;
8994 /* For a preincrement, see if we can do this with a single instruction. */
8997 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8998 if (icode
!= (int) CODE_FOR_nothing
8999 /* Make sure that OP0 is valid for operands 0 and 1
9000 of the insn we want to queue. */
9001 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9002 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9003 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9007 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9008 then we cannot just increment OP0. We must therefore contrive to
9009 increment the original value. Then, for postincrement, we can return
9010 OP0 since it is a copy of the old value. For preincrement, expand here
9011 unless we can do it with a single insn.
9013 Likewise if storing directly into OP0 would clobber high bits
9014 we need to preserve (bad_subreg). */
9015 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9017 /* This is the easiest way to increment the value wherever it is.
9018 Problems with multiple evaluation of INCREMENTED are prevented
9019 because either (1) it is a component_ref or preincrement,
9020 in which case it was stabilized above, or (2) it is an array_ref
9021 with constant index in an array in a register, which is
9022 safe to reevaluate. */
9023 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9024 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9025 ? MINUS_EXPR
: PLUS_EXPR
),
9028 TREE_OPERAND (exp
, 1));
9030 while (TREE_CODE (incremented
) == NOP_EXPR
9031 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9033 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9034 incremented
= TREE_OPERAND (incremented
, 0);
9037 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9038 return post
? op0
: temp
;
9043 /* We have a true reference to the value in OP0.
9044 If there is an insn to add or subtract in this mode, queue it.
9045 Queueing the increment insn avoids the register shuffling
9046 that often results if we must increment now and first save
9047 the old value for subsequent use. */
9049 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9050 op0
= stabilize (op0
);
9053 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9054 if (icode
!= (int) CODE_FOR_nothing
9055 /* Make sure that OP0 is valid for operands 0 and 1
9056 of the insn we want to queue. */
9057 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9058 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9060 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9061 op1
= force_reg (mode
, op1
);
9063 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9065 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9067 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9068 ? force_reg (Pmode
, XEXP (op0
, 0))
9069 : copy_to_reg (XEXP (op0
, 0)));
9072 op0
= change_address (op0
, VOIDmode
, addr
);
9073 temp
= force_reg (GET_MODE (op0
), op0
);
9074 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9075 op1
= force_reg (mode
, op1
);
9077 /* The increment queue is LIFO, thus we have to `queue'
9078 the instructions in reverse order. */
9079 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9080 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9085 /* Preincrement, or we can't increment with one simple insn. */
9087 /* Save a copy of the value before inc or dec, to return it later. */
9088 temp
= value
= copy_to_reg (op0
);
9090 /* Arrange to return the incremented value. */
9091 /* Copy the rtx because expand_binop will protect from the queue,
9092 and the results of that would be invalid for us to return
9093 if our caller does emit_queue before using our result. */
9094 temp
= copy_rtx (value
= op0
);
9096 /* Increment however we can. */
9097 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9098 current_function_check_memory_usage
? NULL_RTX
: op0
,
9099 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9100 /* Make sure the value is stored into OP0. */
9102 emit_move_insn (op0
, op1
);
9107 /* Expand all function calls contained within EXP, innermost ones first.
9108 But don't look within expressions that have sequence points.
9109 For each CALL_EXPR, record the rtx for its value
9110 in the CALL_EXPR_RTL field. */
9113 preexpand_calls (exp
)
9116 register int nops
, i
;
9117 int class = TREE_CODE_CLASS (TREE_CODE (exp
));
9119 if (! do_preexpand_calls
)
9122 /* Only expressions and references can contain calls. */
9124 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9127 switch (TREE_CODE (exp
))
9130 /* Do nothing if already expanded. */
9131 if (CALL_EXPR_RTL (exp
) != 0
9132 /* Do nothing if the call returns a variable-sized object. */
9133 || (TREE_CODE (TREE_TYPE (exp
)) != VOID_TYPE
9134 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
9135 /* Do nothing to built-in functions. */
9136 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9137 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9139 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9142 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9147 case TRUTH_ANDIF_EXPR
:
9148 case TRUTH_ORIF_EXPR
:
9149 /* If we find one of these, then we can be sure
9150 the adjust will be done for it (since it makes jumps).
9151 Do it now, so that if this is inside an argument
9152 of a function, we don't get the stack adjustment
9153 after some other args have already been pushed. */
9154 do_pending_stack_adjust ();
9159 case WITH_CLEANUP_EXPR
:
9160 case CLEANUP_POINT_EXPR
:
9161 case TRY_CATCH_EXPR
:
9165 if (SAVE_EXPR_RTL (exp
) != 0)
9172 nops
= TREE_CODE_LENGTH (TREE_CODE (exp
));
9173 for (i
= 0; i
< nops
; i
++)
9174 if (TREE_OPERAND (exp
, i
) != 0)
9176 if (TREE_CODE (exp
) == TARGET_EXPR
&& i
== 2)
9177 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9178 It doesn't happen before the call is made. */
9182 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9183 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9184 preexpand_calls (TREE_OPERAND (exp
, i
));
9189 /* At the start of a function, record that we have no previously-pushed
9190 arguments waiting to be popped. */
9193 init_pending_stack_adjust ()
9195 pending_stack_adjust
= 0;
9198 /* When exiting from function, if safe, clear out any pending stack adjust
9199 so the adjustment won't get done.
9201 Note, if the current function calls alloca, then it must have a
9202 frame pointer regardless of the value of flag_omit_frame_pointer. */
9205 clear_pending_stack_adjust ()
9207 #ifdef EXIT_IGNORE_STACK
9209 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9210 && EXIT_IGNORE_STACK
9211 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9212 && ! flag_inline_functions
)
9214 stack_pointer_delta
-= pending_stack_adjust
,
9215 pending_stack_adjust
= 0;
9220 /* Pop any previously-pushed arguments that have not been popped yet. */
9223 do_pending_stack_adjust ()
9225 if (inhibit_defer_pop
== 0)
9227 if (pending_stack_adjust
!= 0)
9228 adjust_stack (GEN_INT (pending_stack_adjust
));
9229 pending_stack_adjust
= 0;
9233 /* Expand conditional expressions. */
9235 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9236 LABEL is an rtx of code CODE_LABEL, in this function and all the
9240 jumpifnot (exp
, label
)
9244 do_jump (exp
, label
, NULL_RTX
);
9247 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9254 do_jump (exp
, NULL_RTX
, label
);
9257 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9258 the result is zero, or IF_TRUE_LABEL if the result is one.
9259 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9260 meaning fall through in that case.
9262 do_jump always does any pending stack adjust except when it does not
9263 actually perform a jump. An example where there is no jump
9264 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9266 This function is responsible for optimizing cases such as
9267 &&, || and comparison operators in EXP. */
9270 do_jump (exp
, if_false_label
, if_true_label
)
9272 rtx if_false_label
, if_true_label
;
9274 register enum tree_code code
= TREE_CODE (exp
);
9275 /* Some cases need to create a label to jump to
9276 in order to properly fall through.
9277 These cases set DROP_THROUGH_LABEL nonzero. */
9278 rtx drop_through_label
= 0;
9282 enum machine_mode mode
;
9284 #ifdef MAX_INTEGER_COMPUTATION_MODE
9285 check_max_integer_computation_mode (exp
);
9296 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9302 /* This is not true with #pragma weak */
9304 /* The address of something can never be zero. */
9306 emit_jump (if_true_label
);
9311 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9312 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9313 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9316 /* If we are narrowing the operand, we have to do the compare in the
9318 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9319 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9321 case NON_LVALUE_EXPR
:
9322 case REFERENCE_EXPR
:
9327 /* These cannot change zero->non-zero or vice versa. */
9328 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9331 case WITH_RECORD_EXPR
:
9332 /* Put the object on the placeholder list, recurse through our first
9333 operand, and pop the list. */
9334 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9336 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9337 placeholder_list
= TREE_CHAIN (placeholder_list
);
9341 /* This is never less insns than evaluating the PLUS_EXPR followed by
9342 a test and can be longer if the test is eliminated. */
9344 /* Reduce to minus. */
9345 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9346 TREE_OPERAND (exp
, 0),
9347 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9348 TREE_OPERAND (exp
, 1))));
9349 /* Process as MINUS. */
9353 /* Non-zero iff operands of minus differ. */
9354 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9355 TREE_OPERAND (exp
, 0),
9356 TREE_OPERAND (exp
, 1)),
9357 NE
, NE
, if_false_label
, if_true_label
);
9361 /* If we are AND'ing with a small constant, do this comparison in the
9362 smallest type that fits. If the machine doesn't have comparisons
9363 that small, it will be converted back to the wider comparison.
9364 This helps if we are testing the sign bit of a narrower object.
9365 combine can't do this for us because it can't know whether a
9366 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9368 if (! SLOW_BYTE_ACCESS
9369 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9370 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9371 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9372 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9373 && (type
= type_for_mode (mode
, 1)) != 0
9374 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9375 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9376 != CODE_FOR_nothing
))
9378 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9383 case TRUTH_NOT_EXPR
:
9384 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9387 case TRUTH_ANDIF_EXPR
:
9388 if (if_false_label
== 0)
9389 if_false_label
= drop_through_label
= gen_label_rtx ();
9390 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9391 start_cleanup_deferral ();
9392 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9393 end_cleanup_deferral ();
9396 case TRUTH_ORIF_EXPR
:
9397 if (if_true_label
== 0)
9398 if_true_label
= drop_through_label
= gen_label_rtx ();
9399 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9400 start_cleanup_deferral ();
9401 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9402 end_cleanup_deferral ();
9407 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9408 preserve_temp_slots (NULL_RTX
);
9412 do_pending_stack_adjust ();
9413 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9420 HOST_WIDE_INT bitsize
, bitpos
;
9422 enum machine_mode mode
;
9426 unsigned int alignment
;
9428 /* Get description of this reference. We don't actually care
9429 about the underlying object here. */
9430 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9431 &unsignedp
, &volatilep
, &alignment
);
9433 type
= type_for_size (bitsize
, unsignedp
);
9434 if (! SLOW_BYTE_ACCESS
9435 && type
!= 0 && bitsize
>= 0
9436 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9437 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9438 != CODE_FOR_nothing
))
9440 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9447 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9448 if (integer_onep (TREE_OPERAND (exp
, 1))
9449 && integer_zerop (TREE_OPERAND (exp
, 2)))
9450 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9452 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9453 && integer_onep (TREE_OPERAND (exp
, 2)))
9454 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9458 register rtx label1
= gen_label_rtx ();
9459 drop_through_label
= gen_label_rtx ();
9461 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9463 start_cleanup_deferral ();
9464 /* Now the THEN-expression. */
9465 do_jump (TREE_OPERAND (exp
, 1),
9466 if_false_label
? if_false_label
: drop_through_label
,
9467 if_true_label
? if_true_label
: drop_through_label
);
9468 /* In case the do_jump just above never jumps. */
9469 do_pending_stack_adjust ();
9470 emit_label (label1
);
9472 /* Now the ELSE-expression. */
9473 do_jump (TREE_OPERAND (exp
, 2),
9474 if_false_label
? if_false_label
: drop_through_label
,
9475 if_true_label
? if_true_label
: drop_through_label
);
9476 end_cleanup_deferral ();
9482 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9484 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9485 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9487 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9488 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9491 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9492 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9493 fold (build1 (REALPART_EXPR
,
9494 TREE_TYPE (inner_type
),
9496 fold (build1 (REALPART_EXPR
,
9497 TREE_TYPE (inner_type
),
9499 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9500 fold (build1 (IMAGPART_EXPR
,
9501 TREE_TYPE (inner_type
),
9503 fold (build1 (IMAGPART_EXPR
,
9504 TREE_TYPE (inner_type
),
9506 if_false_label
, if_true_label
);
9509 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9510 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9512 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9513 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9514 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9516 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9522 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9524 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9525 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9527 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9528 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9531 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9532 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9533 fold (build1 (REALPART_EXPR
,
9534 TREE_TYPE (inner_type
),
9536 fold (build1 (REALPART_EXPR
,
9537 TREE_TYPE (inner_type
),
9539 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9540 fold (build1 (IMAGPART_EXPR
,
9541 TREE_TYPE (inner_type
),
9543 fold (build1 (IMAGPART_EXPR
,
9544 TREE_TYPE (inner_type
),
9546 if_false_label
, if_true_label
);
9549 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9550 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9552 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9553 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9554 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9556 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9561 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9562 if (GET_MODE_CLASS (mode
) == MODE_INT
9563 && ! can_compare_p (LT
, mode
, ccp_jump
))
9564 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9566 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9570 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9571 if (GET_MODE_CLASS (mode
) == MODE_INT
9572 && ! can_compare_p (LE
, mode
, ccp_jump
))
9573 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9575 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9579 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9580 if (GET_MODE_CLASS (mode
) == MODE_INT
9581 && ! can_compare_p (GT
, mode
, ccp_jump
))
9582 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9584 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9588 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9589 if (GET_MODE_CLASS (mode
) == MODE_INT
9590 && ! can_compare_p (GE
, mode
, ccp_jump
))
9591 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9593 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9596 case UNORDERED_EXPR
:
9599 enum rtx_code cmp
, rcmp
;
9602 if (code
== UNORDERED_EXPR
)
9603 cmp
= UNORDERED
, rcmp
= ORDERED
;
9605 cmp
= ORDERED
, rcmp
= UNORDERED
;
9606 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9609 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9610 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9611 /* If the target doesn't provide either UNORDERED or ORDERED
9612 comparisons, canonicalize on UNORDERED for the library. */
9613 || rcmp
== UNORDERED
))
9617 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9619 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9624 enum rtx_code rcode1
;
9625 enum tree_code tcode2
;
9649 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9650 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9651 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9655 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9656 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9659 /* If the target doesn't support combined unordered
9660 compares, decompose into UNORDERED + comparison. */
9661 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9662 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9663 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9664 do_jump (exp
, if_false_label
, if_true_label
);
9671 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9673 /* This is not needed any more and causes poor code since it causes
9674 comparisons and tests from non-SI objects to have different code
9676 /* Copy to register to avoid generating bad insns by cse
9677 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9678 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9679 temp
= copy_to_reg (temp
);
9681 do_pending_stack_adjust ();
9682 /* Do any postincrements in the expression that was tested. */
9685 if (GET_CODE (temp
) == CONST_INT
|| GET_CODE (temp
) == LABEL_REF
)
9687 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9691 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9692 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9693 /* Note swapping the labels gives us not-equal. */
9694 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9695 else if (GET_MODE (temp
) != VOIDmode
)
9696 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9697 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9698 GET_MODE (temp
), NULL_RTX
, 0,
9699 if_false_label
, if_true_label
);
9704 if (drop_through_label
)
9706 /* If do_jump produces code that might be jumped around,
9707 do any stack adjusts from that code, before the place
9708 where control merges in. */
9709 do_pending_stack_adjust ();
9710 emit_label (drop_through_label
);
9714 /* Given a comparison expression EXP for values too wide to be compared
9715 with one insn, test the comparison and jump to the appropriate label.
9716 The code of EXP is ignored; we always test GT if SWAP is 0,
9717 and LT if SWAP is 1. */
9720 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9723 rtx if_false_label
, if_true_label
;
9725 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9726 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9727 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9728 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9730 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9733 /* Compare OP0 with OP1, word at a time, in mode MODE.
9734 UNSIGNEDP says to do unsigned comparison.
9735 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9738 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9739 enum machine_mode mode
;
9742 rtx if_false_label
, if_true_label
;
9744 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9745 rtx drop_through_label
= 0;
9748 if (! if_true_label
|| ! if_false_label
)
9749 drop_through_label
= gen_label_rtx ();
9750 if (! if_true_label
)
9751 if_true_label
= drop_through_label
;
9752 if (! if_false_label
)
9753 if_false_label
= drop_through_label
;
9755 /* Compare a word at a time, high order first. */
9756 for (i
= 0; i
< nwords
; i
++)
9758 rtx op0_word
, op1_word
;
9760 if (WORDS_BIG_ENDIAN
)
9762 op0_word
= operand_subword_force (op0
, i
, mode
);
9763 op1_word
= operand_subword_force (op1
, i
, mode
);
9767 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9768 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9771 /* All but high-order word must be compared as unsigned. */
9772 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9773 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9774 NULL_RTX
, if_true_label
);
9776 /* Consider lower words only if these are equal. */
9777 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9778 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9782 emit_jump (if_false_label
);
9783 if (drop_through_label
)
9784 emit_label (drop_through_label
);
9787 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9788 with one insn, test the comparison and jump to the appropriate label. */
9791 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9793 rtx if_false_label
, if_true_label
;
9795 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9796 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9797 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9798 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9800 rtx drop_through_label
= 0;
9802 if (! if_false_label
)
9803 drop_through_label
= if_false_label
= gen_label_rtx ();
9805 for (i
= 0; i
< nwords
; i
++)
9806 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9807 operand_subword_force (op1
, i
, mode
),
9808 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9809 word_mode
, NULL_RTX
, 0, if_false_label
,
9813 emit_jump (if_true_label
);
9814 if (drop_through_label
)
9815 emit_label (drop_through_label
);
9818 /* Jump according to whether OP0 is 0.
9819 We assume that OP0 has an integer mode that is too wide
9820 for the available compare insns. */
9823 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9825 rtx if_false_label
, if_true_label
;
9827 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9830 rtx drop_through_label
= 0;
9832 /* The fastest way of doing this comparison on almost any machine is to
9833 "or" all the words and compare the result. If all have to be loaded
9834 from memory and this is a very wide item, it's possible this may
9835 be slower, but that's highly unlikely. */
9837 part
= gen_reg_rtx (word_mode
);
9838 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9839 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9840 part
= expand_binop (word_mode
, ior_optab
, part
,
9841 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9842 part
, 1, OPTAB_WIDEN
);
9846 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9847 NULL_RTX
, 0, if_false_label
, if_true_label
);
9852 /* If we couldn't do the "or" simply, do this with a series of compares. */
9853 if (! if_false_label
)
9854 drop_through_label
= if_false_label
= gen_label_rtx ();
9856 for (i
= 0; i
< nwords
; i
++)
9857 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9858 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9859 if_false_label
, NULL_RTX
);
9862 emit_jump (if_true_label
);
9864 if (drop_through_label
)
9865 emit_label (drop_through_label
);
9868 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9869 (including code to compute the values to be compared)
9870 and set (CC0) according to the result.
9871 The decision as to signed or unsigned comparison must be made by the caller.
9873 We force a stack adjustment unless there are currently
9874 things pushed on the stack that aren't yet used.
9876 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9879 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9880 size of MODE should be used. */
9883 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9884 register rtx op0
, op1
;
9887 enum machine_mode mode
;
9893 /* If one operand is constant, make it the second one. Only do this
9894 if the other operand is not constant as well. */
9896 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9897 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9902 code
= swap_condition (code
);
9907 op0
= force_not_mem (op0
);
9908 op1
= force_not_mem (op1
);
9911 do_pending_stack_adjust ();
9913 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9914 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9918 /* There's no need to do this now that combine.c can eliminate lots of
9919 sign extensions. This can be less efficient in certain cases on other
9922 /* If this is a signed equality comparison, we can do it as an
9923 unsigned comparison since zero-extension is cheaper than sign
9924 extension and comparisons with zero are done as unsigned. This is
9925 the case even on machines that can do fast sign extension, since
9926 zero-extension is easier to combine with other operations than
9927 sign-extension is. If we are comparing against a constant, we must
9928 convert it to what it would look like unsigned. */
9929 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9930 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9932 if (GET_CODE (op1
) == CONST_INT
9933 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9934 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9939 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9941 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9944 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9945 The decision as to signed or unsigned comparison must be made by the caller.
9947 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9950 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9951 size of MODE should be used. */
9954 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
9955 if_false_label
, if_true_label
)
9956 register rtx op0
, op1
;
9959 enum machine_mode mode
;
9962 rtx if_false_label
, if_true_label
;
9965 int dummy_true_label
= 0;
9967 /* Reverse the comparison if that is safe and we want to jump if it is
9969 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9971 if_true_label
= if_false_label
;
9973 code
= reverse_condition (code
);
9976 /* If one operand is constant, make it the second one. Only do this
9977 if the other operand is not constant as well. */
9979 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9980 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9985 code
= swap_condition (code
);
9990 op0
= force_not_mem (op0
);
9991 op1
= force_not_mem (op1
);
9994 do_pending_stack_adjust ();
9996 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9997 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9999 if (tem
== const_true_rtx
)
10002 emit_jump (if_true_label
);
10006 if (if_false_label
)
10007 emit_jump (if_false_label
);
10013 /* There's no need to do this now that combine.c can eliminate lots of
10014 sign extensions. This can be less efficient in certain cases on other
10017 /* If this is a signed equality comparison, we can do it as an
10018 unsigned comparison since zero-extension is cheaper than sign
10019 extension and comparisons with zero are done as unsigned. This is
10020 the case even on machines that can do fast sign extension, since
10021 zero-extension is easier to combine with other operations than
10022 sign-extension is. If we are comparing against a constant, we must
10023 convert it to what it would look like unsigned. */
10024 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10025 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10027 if (GET_CODE (op1
) == CONST_INT
10028 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10029 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10034 if (! if_true_label
)
10036 dummy_true_label
= 1;
10037 if_true_label
= gen_label_rtx ();
10040 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
10043 if (if_false_label
)
10044 emit_jump (if_false_label
);
10045 if (dummy_true_label
)
10046 emit_label (if_true_label
);
10049 /* Generate code for a comparison expression EXP (including code to compute
10050 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10051 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10052 generated code will drop through.
10053 SIGNED_CODE should be the rtx operation for this comparison for
10054 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10056 We force a stack adjustment unless there are currently
10057 things pushed on the stack that aren't yet used. */
10060 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10063 enum rtx_code signed_code
, unsigned_code
;
10064 rtx if_false_label
, if_true_label
;
10066 unsigned int align0
, align1
;
10067 register rtx op0
, op1
;
10068 register tree type
;
10069 register enum machine_mode mode
;
10071 enum rtx_code code
;
10073 /* Don't crash if the comparison was erroneous. */
10074 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10075 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10078 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10079 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10080 mode
= TYPE_MODE (type
);
10081 unsignedp
= TREE_UNSIGNED (type
);
10082 code
= unsignedp
? unsigned_code
: signed_code
;
10084 #ifdef HAVE_canonicalize_funcptr_for_compare
10085 /* If function pointers need to be "canonicalized" before they can
10086 be reliably compared, then canonicalize them. */
10087 if (HAVE_canonicalize_funcptr_for_compare
10088 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10089 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10092 rtx new_op0
= gen_reg_rtx (mode
);
10094 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10098 if (HAVE_canonicalize_funcptr_for_compare
10099 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10103 rtx new_op1
= gen_reg_rtx (mode
);
10105 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10110 /* Do any postincrements in the expression that was tested. */
10113 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10115 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10116 MIN (align0
, align1
),
10117 if_false_label
, if_true_label
);
10120 /* Generate code to calculate EXP using a store-flag instruction
10121 and return an rtx for the result. EXP is either a comparison
10122 or a TRUTH_NOT_EXPR whose operand is a comparison.
10124 If TARGET is nonzero, store the result there if convenient.
10126 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10129 Return zero if there is no suitable set-flag instruction
10130 available on this machine.
10132 Once expand_expr has been called on the arguments of the comparison,
10133 we are committed to doing the store flag, since it is not safe to
10134 re-evaluate the expression. We emit the store-flag insn by calling
10135 emit_store_flag, but only expand the arguments if we have a reason
10136 to believe that emit_store_flag will be successful. If we think that
10137 it will, but it isn't, we have to simulate the store-flag with a
10138 set/jump/set sequence. */
10141 do_store_flag (exp
, target
, mode
, only_cheap
)
10144 enum machine_mode mode
;
10147 enum rtx_code code
;
10148 tree arg0
, arg1
, type
;
10150 enum machine_mode operand_mode
;
10154 enum insn_code icode
;
10155 rtx subtarget
= target
;
10158 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10159 result at the end. We can't simply invert the test since it would
10160 have already been inverted if it were valid. This case occurs for
10161 some floating-point comparisons. */
10163 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10164 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10166 arg0
= TREE_OPERAND (exp
, 0);
10167 arg1
= TREE_OPERAND (exp
, 1);
10168 type
= TREE_TYPE (arg0
);
10169 operand_mode
= TYPE_MODE (type
);
10170 unsignedp
= TREE_UNSIGNED (type
);
10172 /* We won't bother with BLKmode store-flag operations because it would mean
10173 passing a lot of information to emit_store_flag. */
10174 if (operand_mode
== BLKmode
)
10177 /* We won't bother with store-flag operations involving function pointers
10178 when function pointers must be canonicalized before comparisons. */
10179 #ifdef HAVE_canonicalize_funcptr_for_compare
10180 if (HAVE_canonicalize_funcptr_for_compare
10181 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10182 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10184 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10185 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10186 == FUNCTION_TYPE
))))
10193 /* Get the rtx comparison code to use. We know that EXP is a comparison
10194 operation of some type. Some comparisons against 1 and -1 can be
10195 converted to comparisons with zero. Do so here so that the tests
10196 below will be aware that we have a comparison with zero. These
10197 tests will not catch constants in the first operand, but constants
10198 are rarely passed as the first operand. */
10200 switch (TREE_CODE (exp
))
10209 if (integer_onep (arg1
))
10210 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10212 code
= unsignedp
? LTU
: LT
;
10215 if (! unsignedp
&& integer_all_onesp (arg1
))
10216 arg1
= integer_zero_node
, code
= LT
;
10218 code
= unsignedp
? LEU
: LE
;
10221 if (! unsignedp
&& integer_all_onesp (arg1
))
10222 arg1
= integer_zero_node
, code
= GE
;
10224 code
= unsignedp
? GTU
: GT
;
10227 if (integer_onep (arg1
))
10228 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10230 code
= unsignedp
? GEU
: GE
;
10233 case UNORDERED_EXPR
:
10259 /* Put a constant second. */
10260 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10262 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10263 code
= swap_condition (code
);
10266 /* If this is an equality or inequality test of a single bit, we can
10267 do this by shifting the bit being tested to the low-order bit and
10268 masking the result with the constant 1. If the condition was EQ,
10269 we xor it with 1. This does not require an scc insn and is faster
10270 than an scc insn even if we have it. */
10272 if ((code
== NE
|| code
== EQ
)
10273 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10274 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10276 tree inner
= TREE_OPERAND (arg0
, 0);
10277 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10280 /* If INNER is a right shift of a constant and it plus BITNUM does
10281 not overflow, adjust BITNUM and INNER. */
10283 if (TREE_CODE (inner
) == RSHIFT_EXPR
10284 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10285 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10286 && bitnum
< TYPE_PRECISION (type
)
10287 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10288 bitnum
- TYPE_PRECISION (type
)))
10290 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10291 inner
= TREE_OPERAND (inner
, 0);
10294 /* If we are going to be able to omit the AND below, we must do our
10295 operations as unsigned. If we must use the AND, we have a choice.
10296 Normally unsigned is faster, but for some machines signed is. */
10297 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10298 #ifdef LOAD_EXTEND_OP
10299 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10305 if (! get_subtarget (subtarget
)
10306 || GET_MODE (subtarget
) != operand_mode
10307 || ! safe_from_p (subtarget
, inner
, 1))
10310 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10313 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10314 size_int (bitnum
), subtarget
, ops_unsignedp
);
10316 if (GET_MODE (op0
) != mode
)
10317 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10319 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10320 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10321 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10323 /* Put the AND last so it can combine with more things. */
10324 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10325 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10330 /* Now see if we are likely to be able to do this. Return if not. */
10331 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10334 icode
= setcc_gen_code
[(int) code
];
10335 if (icode
== CODE_FOR_nothing
10336 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10338 /* We can only do this if it is one of the special cases that
10339 can be handled without an scc insn. */
10340 if ((code
== LT
&& integer_zerop (arg1
))
10341 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10343 else if (BRANCH_COST
>= 0
10344 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10345 && TREE_CODE (type
) != REAL_TYPE
10346 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10347 != CODE_FOR_nothing
)
10348 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10349 != CODE_FOR_nothing
)))
10355 preexpand_calls (exp
);
10356 if (! get_subtarget (target
)
10357 || GET_MODE (subtarget
) != operand_mode
10358 || ! safe_from_p (subtarget
, arg1
, 1))
10361 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10362 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10365 target
= gen_reg_rtx (mode
);
10367 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10368 because, if the emit_store_flag does anything it will succeed and
10369 OP0 and OP1 will not be used subsequently. */
10371 result
= emit_store_flag (target
, code
,
10372 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10373 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10374 operand_mode
, unsignedp
, 1);
10379 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10380 result
, 0, OPTAB_LIB_WIDEN
);
10384 /* If this failed, we have to do this with set/compare/jump/set code. */
10385 if (GET_CODE (target
) != REG
10386 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10387 target
= gen_reg_rtx (GET_MODE (target
));
10389 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10390 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10391 operand_mode
, NULL_RTX
, 0);
10392 if (GET_CODE (result
) == CONST_INT
)
10393 return (((result
== const0_rtx
&& ! invert
)
10394 || (result
!= const0_rtx
&& invert
))
10395 ? const0_rtx
: const1_rtx
);
10397 label
= gen_label_rtx ();
10398 if (bcc_gen_fctn
[(int) code
] == 0)
10401 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10402 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10403 emit_label (label
);
10408 /* Generate a tablejump instruction (used for switch statements). */
10410 #ifdef HAVE_tablejump
10412 /* INDEX is the value being switched on, with the lowest value
10413 in the table already subtracted.
10414 MODE is its expected mode (needed if INDEX is constant).
10415 RANGE is the length of the jump table.
10416 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10418 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10419 index value is out of range. */
10422 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10423 rtx index
, range
, table_label
, default_label
;
10424 enum machine_mode mode
;
10426 register rtx temp
, vector
;
10428 /* Do an unsigned comparison (in the proper mode) between the index
10429 expression and the value which represents the length of the range.
10430 Since we just finished subtracting the lower bound of the range
10431 from the index expression, this comparison allows us to simultaneously
10432 check that the original index expression value is both greater than
10433 or equal to the minimum value of the range and less than or equal to
10434 the maximum value of the range. */
10436 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10439 /* If index is in range, it must fit in Pmode.
10440 Convert to Pmode so we can index with it. */
10442 index
= convert_to_mode (Pmode
, index
, 1);
10444 /* Don't let a MEM slip thru, because then INDEX that comes
10445 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10446 and break_out_memory_refs will go to work on it and mess it up. */
10447 #ifdef PIC_CASE_VECTOR_ADDRESS
10448 if (flag_pic
&& GET_CODE (index
) != REG
)
10449 index
= copy_to_mode_reg (Pmode
, index
);
10452 /* If flag_force_addr were to affect this address
10453 it could interfere with the tricky assumptions made
10454 about addresses that contain label-refs,
10455 which may be valid only very near the tablejump itself. */
10456 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10457 GET_MODE_SIZE, because this indicates how large insns are. The other
10458 uses should all be Pmode, because they are addresses. This code
10459 could fail if addresses and insns are not the same size. */
10460 index
= gen_rtx_PLUS (Pmode
,
10461 gen_rtx_MULT (Pmode
, index
,
10462 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10463 gen_rtx_LABEL_REF (Pmode
, table_label
));
10464 #ifdef PIC_CASE_VECTOR_ADDRESS
10466 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10469 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10470 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10471 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10472 RTX_UNCHANGING_P (vector
) = 1;
10473 convert_move (temp
, vector
, 0);
10475 emit_jump_insn (gen_tablejump (temp
, table_label
));
10477 /* If we are generating PIC code or if the table is PC-relative, the
10478 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10479 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10483 #endif /* HAVE_tablejump */