cfgexpand.c (tree_expand_cfg): Fix comment.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
48
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 \f
52
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
63
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
68 \f
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
73
74 rtx
75 get_label_from_map (struct inline_remap *map, int i)
76 {
77 rtx x = map->label_map[i];
78
79 if (x == NULL_RTX)
80 x = map->label_map[i] = gen_label_rtx ();
81
82 return x;
83 }
84
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
87 bool
88 function_attribute_inlinable_p (tree fndecl)
89 {
90 if (targetm.attribute_table)
91 {
92 tree a;
93
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
95 {
96 tree name = TREE_PURPOSE (a);
97 int i;
98
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
102 }
103 }
104
105 return true;
106 }
107 \f
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
110
111 tree
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
113 {
114 tree copy;
115
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
118 {
119 tree type = TREE_TYPE (decl);
120
121 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
122 new PARM_DECL. */
123 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
124 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
125 TREE_READONLY (copy) = TREE_READONLY (decl);
126 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
127 }
128 else
129 {
130 copy = copy_node (decl);
131 /* The COPY is not abstract; it will be generated in TO_FN. */
132 DECL_ABSTRACT (copy) = 0;
133 lang_hooks.dup_lang_specific_decl (copy);
134
135 /* TREE_ADDRESSABLE isn't used to indicate that a label's
136 address has been taken; it's for internal bookkeeping in
137 expand_goto_internal. */
138 if (TREE_CODE (copy) == LABEL_DECL)
139 {
140 TREE_ADDRESSABLE (copy) = 0;
141 }
142 }
143
144 /* Don't generate debug information for the copy if we wouldn't have
145 generated it for the copy either. */
146 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
147 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
148
149 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
150 declaration inspired this copy. */
151 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
152
153 /* The new variable/label has no RTL, yet. */
154 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
155 SET_DECL_RTL (copy, NULL_RTX);
156
157 /* These args would always appear unused, if not for this. */
158 TREE_USED (copy) = 1;
159
160 /* Set the context for the new declaration. */
161 if (!DECL_CONTEXT (decl))
162 /* Globals stay global. */
163 ;
164 else if (DECL_CONTEXT (decl) != from_fn)
165 /* Things that weren't in the scope of the function we're inlining
166 from aren't in the scope we're inlining to, either. */
167 ;
168 else if (TREE_STATIC (decl))
169 /* Function-scoped static variables should stay in the original
170 function. */
171 ;
172 else
173 /* Ordinary automatic local variables are now in the scope of the
174 new function. */
175 DECL_CONTEXT (copy) = to_fn;
176
177 return copy;
178 }
179 \f
180 /* Unfortunately, we need a global copy of const_equiv map for communication
181 with a function called from note_stores. Be *very* careful that this
182 is used properly in the presence of recursion. */
183
184 varray_type global_const_equiv_varray;
185
186 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
187 except for those few rtx codes that are sharable.
188
189 We always return an rtx that is similar to that incoming rtx, with the
190 exception of possibly changing a REG to a SUBREG or vice versa. No
191 rtl is ever emitted.
192
193 If FOR_LHS is nonzero, if means we are processing something that will
194 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
195 inlining since we need to be conservative in how it is set for
196 such cases.
197
198 Handle constants that need to be placed in the constant pool by
199 calling `force_const_mem'. */
200
201 rtx
202 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
203 {
204 rtx copy, temp;
205 int i, j;
206 RTX_CODE code;
207 enum machine_mode mode;
208 const char *format_ptr;
209 int regno;
210
211 if (orig == 0)
212 return 0;
213
214 code = GET_CODE (orig);
215 mode = GET_MODE (orig);
216
217 switch (code)
218 {
219 case REG:
220 /* If the stack pointer register shows up, it must be part of
221 stack-adjustments (*not* because we eliminated the frame pointer!).
222 Small hard registers are returned as-is. Pseudo-registers
223 go through their `reg_map'. */
224 regno = REGNO (orig);
225 if (regno <= LAST_VIRTUAL_REGISTER)
226 {
227 /* Some hard registers are also mapped,
228 but others are not translated. */
229 if (map->reg_map[regno] != 0)
230 return map->reg_map[regno];
231
232 /* If this is the virtual frame pointer, make space in current
233 function's stack frame for the stack frame of the inline function.
234
235 Copy the address of this area into a pseudo. Map
236 virtual_stack_vars_rtx to this pseudo and set up a constant
237 equivalence for it to be the address. This will substitute the
238 address into insns where it can be substituted and use the new
239 pseudo where it can't. */
240 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
241 {
242 rtx loc, seq;
243 int size
244 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
245 #ifdef FRAME_GROWS_DOWNWARD
246 int alignment
247 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
248 / BITS_PER_UNIT);
249
250 /* In this case, virtual_stack_vars_rtx points to one byte
251 higher than the top of the frame area. So make sure we
252 allocate a big enough chunk to keep the frame pointer
253 aligned like a real one. */
254 if (alignment)
255 size = CEIL_ROUND (size, alignment);
256 #endif
257 start_sequence ();
258 loc = assign_stack_temp (BLKmode, size, 1);
259 loc = XEXP (loc, 0);
260 #ifdef FRAME_GROWS_DOWNWARD
261 /* In this case, virtual_stack_vars_rtx points to one byte
262 higher than the top of the frame area. So compute the offset
263 to one byte higher than our substitute frame. */
264 loc = plus_constant (loc, size);
265 #endif
266 map->reg_map[regno] = temp
267 = force_reg (Pmode, force_operand (loc, NULL_RTX));
268
269 #ifdef STACK_BOUNDARY
270 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
271 #endif
272
273 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
274
275 seq = get_insns ();
276 end_sequence ();
277 emit_insn_after (seq, map->insns_at_start);
278 return temp;
279 }
280 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
281 {
282 /* Do the same for a block to contain any arguments referenced
283 in memory. */
284 rtx loc, seq;
285 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
286
287 start_sequence ();
288 loc = assign_stack_temp (BLKmode, size, 1);
289 loc = XEXP (loc, 0);
290 /* When arguments grow downward, the virtual incoming
291 args pointer points to the top of the argument block,
292 so the remapped location better do the same. */
293 #ifdef ARGS_GROW_DOWNWARD
294 loc = plus_constant (loc, size);
295 #endif
296 map->reg_map[regno] = temp
297 = force_reg (Pmode, force_operand (loc, NULL_RTX));
298
299 #ifdef STACK_BOUNDARY
300 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
301 #endif
302
303 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
304
305 seq = get_insns ();
306 end_sequence ();
307 emit_insn_after (seq, map->insns_at_start);
308 return temp;
309 }
310 else
311 return orig;
312
313 abort ();
314 }
315 if (map->reg_map[regno] == NULL)
316 {
317 map->reg_map[regno] = gen_reg_rtx (mode);
318 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
319 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
320 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
321 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
322
323 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
324 mark_reg_pointer (map->reg_map[regno],
325 map->regno_pointer_align[regno]);
326 }
327 return map->reg_map[regno];
328
329 case SUBREG:
330 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
331 return simplify_gen_subreg (GET_MODE (orig), copy,
332 GET_MODE (SUBREG_REG (orig)),
333 SUBREG_BYTE (orig));
334
335 case USE:
336 case CLOBBER:
337 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
338 to (use foo) if the original insn didn't have a subreg.
339 Removing the subreg distorts the VAX movmemhi pattern
340 by changing the mode of an operand. */
341 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
342 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
343 copy = SUBREG_REG (copy);
344 return gen_rtx_fmt_e (code, VOIDmode, copy);
345
346 /* We need to handle "deleted" labels that appear in the DECL_RTL
347 of a LABEL_DECL. */
348 case NOTE:
349 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
350 break;
351
352 /* Fall through. */
353 case CODE_LABEL:
354 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
355 = LABEL_PRESERVE_P (orig);
356 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
357
358 case LABEL_REF:
359 copy
360 = gen_rtx_LABEL_REF
361 (mode,
362 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
363 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
364
365 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
366
367 /* The fact that this label was previously nonlocal does not mean
368 it still is, so we must check if it is within the range of
369 this function's labels. */
370 LABEL_REF_NONLOCAL_P (copy)
371 = (LABEL_REF_NONLOCAL_P (orig)
372 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
373 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
374
375 return copy;
376
377 case PC:
378 case CC0:
379 case CONST_INT:
380 case CONST_VECTOR:
381 return orig;
382
383 case SYMBOL_REF:
384 /* Symbols which represent the address of a label stored in the constant
385 pool must be modified to point to a constant pool entry for the
386 remapped label. Otherwise, symbols are returned unchanged. */
387 if (CONSTANT_POOL_ADDRESS_P (orig))
388 {
389 struct function *f = cfun;
390 rtx constant = get_pool_constant_for_function (f, orig);
391 if (GET_CODE (constant) == LABEL_REF)
392 return XEXP (force_const_mem
393 (GET_MODE (orig),
394 copy_rtx_and_substitute (constant, map, for_lhs)),
395 0);
396 }
397 return orig;
398
399 case CONST_DOUBLE:
400 /* We have to make a new copy of this CONST_DOUBLE because don't want
401 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
402 duplicate of a CONST_DOUBLE we have already seen. */
403 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
404 {
405 REAL_VALUE_TYPE d;
406
407 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
408 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
409 }
410 else
411 return immed_double_const (CONST_DOUBLE_LOW (orig),
412 CONST_DOUBLE_HIGH (orig), VOIDmode);
413
414 case CONST:
415 break;
416
417 case ASM_OPERANDS:
418 /* If a single asm insn contains multiple output operands then
419 it contains multiple ASM_OPERANDS rtx's that share the input
420 and constraint vecs. We must make sure that the copied insn
421 continues to share it. */
422 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
423 {
424 copy = rtx_alloc (ASM_OPERANDS);
425 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
426 PUT_MODE (copy, GET_MODE (orig));
427 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
428 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
429 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
430 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
431 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
432 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
433 = map->copy_asm_constraints_vector;
434 #ifdef USE_MAPPED_LOCATION
435 ASM_OPERANDS_SOURCE_LOCATION (copy)
436 = ASM_OPERANDS_SOURCE_LOCATION (orig);
437 #else
438 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
439 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
440 #endif
441 return copy;
442 }
443 break;
444
445 case CALL:
446 /* This is given special treatment because the first
447 operand of a CALL is a (MEM ...) which may get
448 forced into a register for cse. This is undesirable
449 if function-address cse isn't wanted or if we won't do cse. */
450 #ifndef NO_FUNCTION_CSE
451 if (! (optimize && ! flag_no_function_cse))
452 #endif
453 {
454 rtx copy
455 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
456 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
457 map, 0));
458
459 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
460
461 return
462 gen_rtx_CALL (GET_MODE (orig), copy,
463 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
464 }
465 break;
466
467 #if 0
468 /* Must be ifdefed out for loop unrolling to work. */
469 /* ??? Is this for the old or the new unroller? */
470 case RETURN:
471 abort ();
472 #endif
473
474 case SET:
475 /* If this is setting fp or ap, it means that we have a nonlocal goto.
476 Adjust the setting by the offset of the area we made.
477 If the nonlocal goto is into the current function,
478 this will result in unnecessarily bad code, but should work. */
479 if (SET_DEST (orig) == virtual_stack_vars_rtx
480 || SET_DEST (orig) == virtual_incoming_args_rtx)
481 {
482 /* In case a translation hasn't occurred already, make one now. */
483 rtx equiv_reg;
484 rtx equiv_loc;
485 HOST_WIDE_INT loc_offset;
486
487 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
488 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
489 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
490 REGNO (equiv_reg)).rtx;
491 loc_offset
492 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
493
494 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
495 force_operand
496 (plus_constant
497 (copy_rtx_and_substitute (SET_SRC (orig),
498 map, 0),
499 - loc_offset),
500 NULL_RTX));
501 }
502 else
503 return gen_rtx_SET (VOIDmode,
504 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
505 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
506 break;
507
508 case MEM:
509 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
510 map, 0));
511 MEM_COPY_ATTRIBUTES (copy, orig);
512 return copy;
513
514 default:
515 break;
516 }
517
518 copy = rtx_alloc (code);
519 PUT_MODE (copy, mode);
520 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
521 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
522 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
523
524 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
525
526 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
527 {
528 switch (*format_ptr++)
529 {
530 case '0':
531 X0ANY (copy, i) = X0ANY (orig, i);
532 break;
533
534 case 'e':
535 XEXP (copy, i)
536 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
537 break;
538
539 case 'u':
540 /* Change any references to old-insns to point to the
541 corresponding copied insns. */
542 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
543 break;
544
545 case 'E':
546 XVEC (copy, i) = XVEC (orig, i);
547 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
548 {
549 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
550 for (j = 0; j < XVECLEN (copy, i); j++)
551 XVECEXP (copy, i, j)
552 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
553 map, for_lhs);
554 }
555 break;
556
557 case 'w':
558 XWINT (copy, i) = XWINT (orig, i);
559 break;
560
561 case 'i':
562 XINT (copy, i) = XINT (orig, i);
563 break;
564
565 case 's':
566 XSTR (copy, i) = XSTR (orig, i);
567 break;
568
569 case 't':
570 XTREE (copy, i) = XTREE (orig, i);
571 break;
572
573 default:
574 abort ();
575 }
576 }
577
578 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
579 {
580 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
581 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
582 map->copy_asm_constraints_vector
583 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
584 }
585
586 return copy;
587 }
588 \f
589 /* Substitute known constant values into INSN, if that is valid. */
590
591 void
592 try_constants (rtx insn, struct inline_remap *map)
593 {
594 int i;
595
596 map->num_sets = 0;
597
598 /* First try just updating addresses, then other things. This is
599 important when we have something like the store of a constant
600 into memory and we can update the memory address but the machine
601 does not support a constant source. */
602 subst_constants (&PATTERN (insn), insn, map, 1);
603 apply_change_group ();
604 subst_constants (&PATTERN (insn), insn, map, 0);
605 apply_change_group ();
606
607 /* Enforce consistency between the addresses in the regular insn flow
608 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
609 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
610 {
611 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
612 apply_change_group ();
613 }
614
615 /* Show we don't know the value of anything stored or clobbered. */
616 note_stores (PATTERN (insn), mark_stores, NULL);
617 map->last_pc_value = 0;
618 #ifdef HAVE_cc0
619 map->last_cc0_value = 0;
620 #endif
621
622 /* Set up any constant equivalences made in this insn. */
623 for (i = 0; i < map->num_sets; i++)
624 {
625 if (REG_P (map->equiv_sets[i].dest))
626 {
627 int regno = REGNO (map->equiv_sets[i].dest);
628
629 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
630 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
631 /* Following clause is a hack to make case work where GNU C++
632 reassigns a variable to make cse work right. */
633 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
634 regno).rtx,
635 map->equiv_sets[i].equiv))
636 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
637 map->equiv_sets[i].equiv, map->const_age);
638 }
639 else if (map->equiv_sets[i].dest == pc_rtx)
640 map->last_pc_value = map->equiv_sets[i].equiv;
641 #ifdef HAVE_cc0
642 else if (map->equiv_sets[i].dest == cc0_rtx)
643 map->last_cc0_value = map->equiv_sets[i].equiv;
644 #endif
645 }
646 }
647 \f
648 /* Substitute known constants for pseudo regs in the contents of LOC,
649 which are part of INSN.
650 If INSN is zero, the substitution should always be done (this is used to
651 update DECL_RTL).
652 These changes are taken out by try_constants if the result is not valid.
653
654 Note that we are more concerned with determining when the result of a SET
655 is a constant, for further propagation, than actually inserting constants
656 into insns; cse will do the latter task better.
657
658 This function is also used to adjust address of items previously addressed
659 via the virtual stack variable or virtual incoming arguments registers.
660
661 If MEMONLY is nonzero, only make changes inside a MEM. */
662
663 static void
664 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
665 {
666 rtx x = *loc;
667 int i, j;
668 enum rtx_code code;
669 const char *format_ptr;
670 int num_changes = num_validated_changes ();
671 rtx new = 0;
672 enum machine_mode op0_mode = MAX_MACHINE_MODE;
673
674 code = GET_CODE (x);
675
676 switch (code)
677 {
678 case PC:
679 case CONST_INT:
680 case CONST_DOUBLE:
681 case CONST_VECTOR:
682 case SYMBOL_REF:
683 case CONST:
684 case LABEL_REF:
685 case ADDRESS:
686 return;
687
688 #ifdef HAVE_cc0
689 case CC0:
690 if (! memonly)
691 validate_change (insn, loc, map->last_cc0_value, 1);
692 return;
693 #endif
694
695 case USE:
696 case CLOBBER:
697 /* The only thing we can do with a USE or CLOBBER is possibly do
698 some substitutions in a MEM within it. */
699 if (MEM_P (XEXP (x, 0)))
700 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
701 return;
702
703 case REG:
704 /* Substitute for parms and known constants. Don't replace
705 hard regs used as user variables with constants. */
706 if (! memonly)
707 {
708 int regno = REGNO (x);
709 struct const_equiv_data *p;
710
711 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
712 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
713 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
714 p->rtx != 0)
715 && p->age >= map->const_age)
716 validate_change (insn, loc, p->rtx, 1);
717 }
718 return;
719
720 case SUBREG:
721 /* SUBREG applied to something other than a reg
722 should be treated as ordinary, since that must
723 be a special hack and we don't know how to treat it specially.
724 Consider for example mulsidi3 in m68k.md.
725 Ordinary SUBREG of a REG needs this special treatment. */
726 if (! memonly && REG_P (SUBREG_REG (x)))
727 {
728 rtx inner = SUBREG_REG (x);
729 rtx new = 0;
730
731 /* We can't call subst_constants on &SUBREG_REG (x) because any
732 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
733 see what is inside, try to form the new SUBREG and see if that is
734 valid. We handle two cases: extracting a full word in an
735 integral mode and extracting the low part. */
736 subst_constants (&inner, NULL_RTX, map, 0);
737 new = simplify_gen_subreg (GET_MODE (x), inner,
738 GET_MODE (SUBREG_REG (x)),
739 SUBREG_BYTE (x));
740
741 if (new)
742 validate_change (insn, loc, new, 1);
743 else
744 cancel_changes (num_changes);
745
746 return;
747 }
748 break;
749
750 case MEM:
751 subst_constants (&XEXP (x, 0), insn, map, 0);
752
753 /* If a memory address got spoiled, change it back. */
754 if (! memonly && insn != 0 && num_validated_changes () != num_changes
755 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
756 cancel_changes (num_changes);
757 return;
758
759 case SET:
760 {
761 /* Substitute constants in our source, and in any arguments to a
762 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
763 itself. */
764 rtx *dest_loc = &SET_DEST (x);
765 rtx dest = *dest_loc;
766 rtx src, tem;
767 enum machine_mode compare_mode = VOIDmode;
768
769 /* If SET_SRC is a COMPARE which subst_constants would turn into
770 COMPARE of 2 VOIDmode constants, note the mode in which comparison
771 is to be done. */
772 if (GET_CODE (SET_SRC (x)) == COMPARE)
773 {
774 src = SET_SRC (x);
775 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
776 || CC0_P (dest))
777 {
778 compare_mode = GET_MODE (XEXP (src, 0));
779 if (compare_mode == VOIDmode)
780 compare_mode = GET_MODE (XEXP (src, 1));
781 }
782 }
783
784 subst_constants (&SET_SRC (x), insn, map, memonly);
785 src = SET_SRC (x);
786
787 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
788 || GET_CODE (*dest_loc) == SUBREG
789 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
790 {
791 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
792 {
793 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
794 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
795 }
796 dest_loc = &XEXP (*dest_loc, 0);
797 }
798
799 /* Do substitute in the address of a destination in memory. */
800 if (MEM_P (*dest_loc))
801 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
802
803 /* Check for the case of DEST a SUBREG, both it and the underlying
804 register are less than one word, and the SUBREG has the wider mode.
805 In the case, we are really setting the underlying register to the
806 source converted to the mode of DEST. So indicate that. */
807 if (GET_CODE (dest) == SUBREG
808 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
809 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
810 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
811 <= GET_MODE_SIZE (GET_MODE (dest)))
812 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
813 src)))
814 src = tem, dest = SUBREG_REG (dest);
815
816 /* If storing a recognizable value save it for later recording. */
817 if ((map->num_sets < MAX_RECOG_OPERANDS)
818 && (CONSTANT_P (src)
819 || (REG_P (src)
820 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
821 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
822 || (GET_CODE (src) == PLUS
823 && REG_P (XEXP (src, 0))
824 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
825 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
826 && CONSTANT_P (XEXP (src, 1)))
827 || GET_CODE (src) == COMPARE
828 || CC0_P (dest)
829 || (dest == pc_rtx
830 && (src == pc_rtx || GET_CODE (src) == RETURN
831 || GET_CODE (src) == LABEL_REF))))
832 {
833 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
834 it will cause us to save the COMPARE with any constants
835 substituted, which is what we want for later. */
836 rtx src_copy = copy_rtx (src);
837 map->equiv_sets[map->num_sets].equiv = src_copy;
838 map->equiv_sets[map->num_sets++].dest = dest;
839 if (compare_mode != VOIDmode
840 && GET_CODE (src) == COMPARE
841 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
842 || CC0_P (dest))
843 && GET_MODE (XEXP (src, 0)) == VOIDmode
844 && GET_MODE (XEXP (src, 1)) == VOIDmode)
845 {
846 map->compare_src = src_copy;
847 map->compare_mode = compare_mode;
848 }
849 }
850 }
851 return;
852
853 default:
854 break;
855 }
856
857 format_ptr = GET_RTX_FORMAT (code);
858
859 /* If the first operand is an expression, save its mode for later. */
860 if (*format_ptr == 'e')
861 op0_mode = GET_MODE (XEXP (x, 0));
862
863 for (i = 0; i < GET_RTX_LENGTH (code); i++)
864 {
865 switch (*format_ptr++)
866 {
867 case '0':
868 break;
869
870 case 'e':
871 if (XEXP (x, i))
872 subst_constants (&XEXP (x, i), insn, map, memonly);
873 break;
874
875 case 'u':
876 case 'i':
877 case 's':
878 case 'w':
879 case 'n':
880 case 't':
881 case 'B':
882 break;
883
884 case 'E':
885 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
886 for (j = 0; j < XVECLEN (x, i); j++)
887 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
888
889 break;
890
891 default:
892 abort ();
893 }
894 }
895
896 /* If this is a commutative operation, move a constant to the second
897 operand unless the second operand is already a CONST_INT. */
898 if (! memonly
899 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
900 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
901 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
902 {
903 rtx tem = XEXP (x, 0);
904 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
905 validate_change (insn, &XEXP (x, 1), tem, 1);
906 }
907
908 /* Simplify the expression in case we put in some constants. */
909 if (! memonly)
910 switch (GET_RTX_CLASS (code))
911 {
912 case RTX_UNARY:
913 if (op0_mode == MAX_MACHINE_MODE)
914 abort ();
915 new = simplify_unary_operation (code, GET_MODE (x),
916 XEXP (x, 0), op0_mode);
917 break;
918
919 case RTX_COMPARE:
920 case RTX_COMM_COMPARE:
921 {
922 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
923
924 if (op_mode == VOIDmode)
925 op_mode = GET_MODE (XEXP (x, 1));
926
927 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
928 XEXP (x, 0), XEXP (x, 1));
929 break;
930 }
931
932 case RTX_BIN_ARITH:
933 case RTX_COMM_ARITH:
934 new = simplify_binary_operation (code, GET_MODE (x),
935 XEXP (x, 0), XEXP (x, 1));
936 break;
937
938 case RTX_BITFIELD_OPS:
939 case RTX_TERNARY:
940 if (op0_mode == MAX_MACHINE_MODE)
941 abort ();
942
943 if (code == IF_THEN_ELSE)
944 {
945 rtx op0 = XEXP (x, 0);
946
947 if (COMPARISON_P (op0)
948 && GET_MODE (op0) == VOIDmode
949 && ! side_effects_p (op0)
950 && XEXP (op0, 0) == map->compare_src
951 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
952 {
953 /* We have compare of two VOIDmode constants for which
954 we recorded the comparison mode. */
955 rtx tem =
956 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
957 map->compare_mode, XEXP (op0, 0),
958 XEXP (op0, 1));
959
960 if (GET_CODE (tem) != CONST_INT)
961 new = simplify_ternary_operation (code, GET_MODE (x),
962 op0_mode, tem, XEXP (x, 1),
963 XEXP (x, 2));
964 else if (tem == const0_rtx)
965 new = XEXP (x, 2);
966 else
967 new = XEXP (x, 1);
968 }
969 }
970 if (!new)
971 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
972 XEXP (x, 0), XEXP (x, 1),
973 XEXP (x, 2));
974 break;
975
976 default:
977 break;
978 }
979
980 if (new)
981 validate_change (insn, loc, new, 1);
982 }
983
984 /* Show that register modified no longer contain known constants. We are
985 called from note_stores with parts of the new insn. */
986
987 static void
988 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
989 {
990 int regno = -1;
991 enum machine_mode mode = VOIDmode;
992
993 /* DEST is always the innermost thing set, except in the case of
994 SUBREGs of hard registers. */
995
996 if (REG_P (dest))
997 regno = REGNO (dest), mode = GET_MODE (dest);
998 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
999 {
1000 regno = REGNO (SUBREG_REG (dest));
1001 if (regno < FIRST_PSEUDO_REGISTER)
1002 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1003 GET_MODE (SUBREG_REG (dest)),
1004 SUBREG_BYTE (dest),
1005 GET_MODE (dest));
1006 mode = GET_MODE (SUBREG_REG (dest));
1007 }
1008
1009 if (regno >= 0)
1010 {
1011 unsigned int uregno = regno;
1012 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1013 : uregno + hard_regno_nregs[uregno][mode] - 1);
1014 unsigned int i;
1015
1016 /* Ignore virtual stack var or virtual arg register since those
1017 are handled separately. */
1018 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1019 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1020 for (i = uregno; i <= last_reg; i++)
1021 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1022 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1023 }
1024 }
1025 \f
1026 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1027 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1028 that it points to the node itself, thus indicating that the node is its
1029 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1030 the given node is NULL, recursively descend the decl/block tree which
1031 it is the root of, and for each other ..._DECL or BLOCK node contained
1032 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1033 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1034 values to point to themselves. */
1035
1036 static void
1037 set_block_origin_self (tree stmt)
1038 {
1039 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1040 {
1041 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1042
1043 {
1044 tree local_decl;
1045
1046 for (local_decl = BLOCK_VARS (stmt);
1047 local_decl != NULL_TREE;
1048 local_decl = TREE_CHAIN (local_decl))
1049 set_decl_origin_self (local_decl); /* Potential recursion. */
1050 }
1051
1052 {
1053 tree subblock;
1054
1055 for (subblock = BLOCK_SUBBLOCKS (stmt);
1056 subblock != NULL_TREE;
1057 subblock = BLOCK_CHAIN (subblock))
1058 set_block_origin_self (subblock); /* Recurse. */
1059 }
1060 }
1061 }
1062
1063 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1064 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1065 node to so that it points to the node itself, thus indicating that the
1066 node represents its own (abstract) origin. Additionally, if the
1067 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1068 the decl/block tree of which the given node is the root of, and for
1069 each other ..._DECL or BLOCK node contained therein whose
1070 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1071 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1072 point to themselves. */
1073
1074 void
1075 set_decl_origin_self (tree decl)
1076 {
1077 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1078 {
1079 DECL_ABSTRACT_ORIGIN (decl) = decl;
1080 if (TREE_CODE (decl) == FUNCTION_DECL)
1081 {
1082 tree arg;
1083
1084 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1085 DECL_ABSTRACT_ORIGIN (arg) = arg;
1086 if (DECL_INITIAL (decl) != NULL_TREE
1087 && DECL_INITIAL (decl) != error_mark_node)
1088 set_block_origin_self (DECL_INITIAL (decl));
1089 }
1090 }
1091 }
1092 \f
1093 /* Given a pointer to some BLOCK node, and a boolean value to set the
1094 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1095 the given block, and for all local decls and all local sub-blocks
1096 (recursively) which are contained therein. */
1097
1098 static void
1099 set_block_abstract_flags (tree stmt, int setting)
1100 {
1101 tree local_decl;
1102 tree subblock;
1103
1104 BLOCK_ABSTRACT (stmt) = setting;
1105
1106 for (local_decl = BLOCK_VARS (stmt);
1107 local_decl != NULL_TREE;
1108 local_decl = TREE_CHAIN (local_decl))
1109 set_decl_abstract_flags (local_decl, setting);
1110
1111 for (subblock = BLOCK_SUBBLOCKS (stmt);
1112 subblock != NULL_TREE;
1113 subblock = BLOCK_CHAIN (subblock))
1114 set_block_abstract_flags (subblock, setting);
1115 }
1116
1117 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1118 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1119 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1120 set the abstract flags for all of the parameters, local vars, local
1121 blocks and sub-blocks (recursively) to the same setting. */
1122
1123 void
1124 set_decl_abstract_flags (tree decl, int setting)
1125 {
1126 DECL_ABSTRACT (decl) = setting;
1127 if (TREE_CODE (decl) == FUNCTION_DECL)
1128 {
1129 tree arg;
1130
1131 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1132 DECL_ABSTRACT (arg) = setting;
1133 if (DECL_INITIAL (decl) != NULL_TREE
1134 && DECL_INITIAL (decl) != error_mark_node)
1135 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1136 }
1137 }
1138 \f
1139 /* Functions to keep track of the values hard regs had at the start of
1140 the function. */
1141
1142 rtx
1143 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1144 {
1145 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1146 int i;
1147
1148 if (ivs == 0)
1149 return NULL_RTX;
1150
1151 for (i = 0; i < ivs->num_entries; i++)
1152 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1153 return ivs->entries[i].hard_reg;
1154
1155 return NULL_RTX;
1156 }
1157
1158 rtx
1159 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1160 {
1161 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1162 int i;
1163
1164 if (ivs == 0)
1165 return NULL_RTX;
1166
1167 for (i = 0; i < ivs->num_entries; i++)
1168 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1169 return ivs->entries[i].pseudo;
1170
1171 return NULL_RTX;
1172 }
1173
1174 rtx
1175 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1176 {
1177 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1178 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1179
1180 if (rv)
1181 return rv;
1182
1183 if (ivs == 0)
1184 {
1185 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1186 ivs = fun->hard_reg_initial_vals;
1187 ivs->num_entries = 0;
1188 ivs->max_entries = 5;
1189 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1190 }
1191
1192 if (ivs->num_entries >= ivs->max_entries)
1193 {
1194 ivs->max_entries += 5;
1195 ivs->entries = ggc_realloc (ivs->entries,
1196 ivs->max_entries
1197 * sizeof (initial_value_pair));
1198 }
1199
1200 ivs->entries[ivs->num_entries].hard_reg = reg;
1201 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1202
1203 return ivs->entries[ivs->num_entries++].pseudo;
1204 }
1205
1206 rtx
1207 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1208 {
1209 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1210 }
1211
1212 rtx
1213 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1214 {
1215 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1216 }
1217
1218 void
1219 emit_initial_value_sets (void)
1220 {
1221 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1222 int i;
1223 rtx seq;
1224
1225 if (ivs == 0)
1226 return;
1227
1228 start_sequence ();
1229 for (i = 0; i < ivs->num_entries; i++)
1230 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1231 seq = get_insns ();
1232 end_sequence ();
1233
1234 emit_insn_after (seq, entry_of_function ());
1235 }
1236
1237 /* If the backend knows where to allocate pseudos for hard
1238 register initial values, register these allocations now. */
1239 void
1240 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1241 {
1242 #ifdef ALLOCATE_INITIAL_VALUE
1243 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1244 int i;
1245
1246 if (ivs == 0)
1247 return;
1248
1249 for (i = 0; i < ivs->num_entries; i++)
1250 {
1251 int regno = REGNO (ivs->entries[i].pseudo);
1252 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1253
1254 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1255 ; /* Do nothing. */
1256 else if (MEM_P (x))
1257 reg_equiv_memory_loc[regno] = x;
1258 else if (REG_P (x))
1259 {
1260 reg_renumber[regno] = REGNO (x);
1261 /* Poke the regno right into regno_reg_rtx
1262 so that even fixed regs are accepted. */
1263 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1264 }
1265 else abort ();
1266 }
1267 #endif
1268 }
1269
1270 #include "gt-integrate.h"