cfgexpand.c (expand_gimple_cond): Always set the source location and block before...
[gcc.git] / gcc / cfglayout.c
1 /* Basic block reordering routines for the GNU compiler.
2 Copyright (C) 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "obstack.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "output.h"
32 #include "function.h"
33 #include "cfglayout.h"
34 #include "cfgloop.h"
35 #include "target.h"
36 #include "ggc.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "vecprim.h"
42 #include "emit-rtl.h"
43
44 /* Holds the interesting trailing notes for the function. */
45 rtx cfg_layout_function_footer;
46 rtx cfg_layout_function_header;
47
48 static rtx skip_insns_after_block (basic_block);
49 static void record_effective_endpoints (void);
50 static rtx label_for_bb (basic_block);
51 static void fixup_reorder_chain (void);
52
53 static void change_scope (rtx, tree, tree);
54
55 void verify_insn_chain (void);
56 static void fixup_fallthru_exit_predecessor (void);
57 \f
58 rtx
59 unlink_insn_chain (rtx first, rtx last)
60 {
61 rtx prevfirst = PREV_INSN (first);
62 rtx nextlast = NEXT_INSN (last);
63
64 PREV_INSN (first) = NULL;
65 NEXT_INSN (last) = NULL;
66 if (prevfirst)
67 NEXT_INSN (prevfirst) = nextlast;
68 if (nextlast)
69 PREV_INSN (nextlast) = prevfirst;
70 else
71 set_last_insn (prevfirst);
72 if (!prevfirst)
73 set_first_insn (nextlast);
74 return first;
75 }
76 \f
77 /* Skip over inter-block insns occurring after BB which are typically
78 associated with BB (e.g., barriers). If there are any such insns,
79 we return the last one. Otherwise, we return the end of BB. */
80
81 static rtx
82 skip_insns_after_block (basic_block bb)
83 {
84 rtx insn, last_insn, next_head, prev;
85
86 next_head = NULL_RTX;
87 if (bb->next_bb != EXIT_BLOCK_PTR)
88 next_head = BB_HEAD (bb->next_bb);
89
90 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
91 {
92 if (insn == next_head)
93 break;
94
95 switch (GET_CODE (insn))
96 {
97 case BARRIER:
98 last_insn = insn;
99 continue;
100
101 case NOTE:
102 switch (NOTE_KIND (insn))
103 {
104 case NOTE_INSN_BLOCK_END:
105 gcc_unreachable ();
106 continue;
107 default:
108 continue;
109 break;
110 }
111 break;
112
113 case CODE_LABEL:
114 if (NEXT_INSN (insn)
115 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
116 {
117 insn = NEXT_INSN (insn);
118 last_insn = insn;
119 continue;
120 }
121 break;
122
123 default:
124 break;
125 }
126
127 break;
128 }
129
130 /* It is possible to hit contradictory sequence. For instance:
131
132 jump_insn
133 NOTE_INSN_BLOCK_BEG
134 barrier
135
136 Where barrier belongs to jump_insn, but the note does not. This can be
137 created by removing the basic block originally following
138 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
139
140 for (insn = last_insn; insn != BB_END (bb); insn = prev)
141 {
142 prev = PREV_INSN (insn);
143 if (NOTE_P (insn))
144 switch (NOTE_KIND (insn))
145 {
146 case NOTE_INSN_BLOCK_END:
147 gcc_unreachable ();
148 break;
149 case NOTE_INSN_DELETED:
150 case NOTE_INSN_DELETED_LABEL:
151 continue;
152 default:
153 reorder_insns (insn, insn, last_insn);
154 }
155 }
156
157 return last_insn;
158 }
159
160 /* Locate or create a label for a given basic block. */
161
162 static rtx
163 label_for_bb (basic_block bb)
164 {
165 rtx label = BB_HEAD (bb);
166
167 if (!LABEL_P (label))
168 {
169 if (dump_file)
170 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
171
172 label = block_label (bb);
173 }
174
175 return label;
176 }
177
178 /* Locate the effective beginning and end of the insn chain for each
179 block, as defined by skip_insns_after_block above. */
180
181 static void
182 record_effective_endpoints (void)
183 {
184 rtx next_insn;
185 basic_block bb;
186 rtx insn;
187
188 for (insn = get_insns ();
189 insn
190 && NOTE_P (insn)
191 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
192 insn = NEXT_INSN (insn))
193 continue;
194 /* No basic blocks at all? */
195 gcc_assert (insn);
196
197 if (PREV_INSN (insn))
198 cfg_layout_function_header =
199 unlink_insn_chain (get_insns (), PREV_INSN (insn));
200 else
201 cfg_layout_function_header = NULL_RTX;
202
203 next_insn = get_insns ();
204 FOR_EACH_BB (bb)
205 {
206 rtx end;
207
208 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
209 bb->il.rtl->header = unlink_insn_chain (next_insn,
210 PREV_INSN (BB_HEAD (bb)));
211 end = skip_insns_after_block (bb);
212 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
213 bb->il.rtl->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
214 next_insn = NEXT_INSN (BB_END (bb));
215 }
216
217 cfg_layout_function_footer = next_insn;
218 if (cfg_layout_function_footer)
219 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
220 }
221 \f
222 /* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
223 numbers and files. In order to be GGC friendly we need to use separate
224 varrays. This also slightly improve the memory locality in binary search.
225 The _locs array contains locators where the given property change. The
226 block_locators_blocks contains the scope block that is used for all insn
227 locator greater than corresponding block_locators_locs value and smaller
228 than the following one. Similarly for the other properties. */
229 static VEC(int,heap) *block_locators_locs;
230 static GTY(()) VEC(tree,gc) *block_locators_blocks;
231 static VEC(int,heap) *locations_locators_locs;
232 DEF_VEC_O(location_t);
233 DEF_VEC_ALLOC_O(location_t,heap);
234 static VEC(location_t,heap) *locations_locators_vals;
235 int prologue_locator;
236 int epilogue_locator;
237
238 /* Hold current location information and last location information, so the
239 datastructures are built lazily only when some instructions in given
240 place are needed. */
241 static location_t curr_location, last_location;
242 static tree curr_block, last_block;
243 static int curr_rtl_loc = -1;
244
245 /* Allocate insn locator datastructure. */
246 void
247 insn_locators_alloc (void)
248 {
249 prologue_locator = epilogue_locator = 0;
250
251 block_locators_locs = VEC_alloc (int, heap, 32);
252 block_locators_blocks = VEC_alloc (tree, gc, 32);
253 locations_locators_locs = VEC_alloc (int, heap, 32);
254 locations_locators_vals = VEC_alloc (location_t, heap, 32);
255
256 last_location = -1;
257 curr_location = -1;
258 curr_block = NULL;
259 last_block = NULL;
260 curr_rtl_loc = 0;
261 }
262
263 /* At the end of emit stage, clear current location. */
264 void
265 insn_locators_finalize (void)
266 {
267 if (curr_rtl_loc >= 0)
268 epilogue_locator = curr_insn_locator ();
269 curr_rtl_loc = -1;
270 }
271
272 /* Allocate insn locator datastructure. */
273 void
274 insn_locators_free (void)
275 {
276 prologue_locator = epilogue_locator = 0;
277
278 VEC_free (int, heap, block_locators_locs);
279 VEC_free (tree,gc, block_locators_blocks);
280 VEC_free (int, heap, locations_locators_locs);
281 VEC_free (location_t, heap, locations_locators_vals);
282 }
283
284
285 /* Set current location. */
286 void
287 set_curr_insn_source_location (location_t location)
288 {
289 /* IV opts calls into RTL expansion to compute costs of operations. At this
290 time locators are not initialized. */
291 if (curr_rtl_loc == -1)
292 return;
293 curr_location = location;
294 }
295
296 /* Get current location. */
297 location_t
298 get_curr_insn_source_location (void)
299 {
300 return curr_location;
301 }
302
303 /* Set current scope block. */
304 void
305 set_curr_insn_block (tree b)
306 {
307 /* IV opts calls into RTL expansion to compute costs of operations. At this
308 time locators are not initialized. */
309 if (curr_rtl_loc == -1)
310 return;
311 if (b)
312 curr_block = b;
313 }
314
315 /* Get current scope block. */
316 tree
317 get_curr_insn_block (void)
318 {
319 return curr_block;
320 }
321
322 /* Return current insn locator. */
323 int
324 curr_insn_locator (void)
325 {
326 if (curr_rtl_loc == -1 || curr_location == UNKNOWN_LOCATION)
327 return 0;
328 if (last_block != curr_block)
329 {
330 curr_rtl_loc++;
331 VEC_safe_push (int, heap, block_locators_locs, curr_rtl_loc);
332 VEC_safe_push (tree, gc, block_locators_blocks, curr_block);
333 last_block = curr_block;
334 }
335 if (last_location != curr_location)
336 {
337 curr_rtl_loc++;
338 VEC_safe_push (int, heap, locations_locators_locs, curr_rtl_loc);
339 VEC_safe_push (location_t, heap, locations_locators_vals, &curr_location);
340 last_location = curr_location;
341 }
342 return curr_rtl_loc;
343 }
344
345 static unsigned int
346 into_cfg_layout_mode (void)
347 {
348 cfg_layout_initialize (0);
349 return 0;
350 }
351
352 static unsigned int
353 outof_cfg_layout_mode (void)
354 {
355 basic_block bb;
356
357 FOR_EACH_BB (bb)
358 if (bb->next_bb != EXIT_BLOCK_PTR)
359 bb->aux = bb->next_bb;
360
361 cfg_layout_finalize ();
362
363 return 0;
364 }
365
366 struct rtl_opt_pass pass_into_cfg_layout_mode =
367 {
368 {
369 RTL_PASS,
370 "into_cfglayout", /* name */
371 NULL, /* gate */
372 into_cfg_layout_mode, /* execute */
373 NULL, /* sub */
374 NULL, /* next */
375 0, /* static_pass_number */
376 TV_CFG, /* tv_id */
377 0, /* properties_required */
378 PROP_cfglayout, /* properties_provided */
379 0, /* properties_destroyed */
380 0, /* todo_flags_start */
381 TODO_dump_func, /* todo_flags_finish */
382 }
383 };
384
385 struct rtl_opt_pass pass_outof_cfg_layout_mode =
386 {
387 {
388 RTL_PASS,
389 "outof_cfglayout", /* name */
390 NULL, /* gate */
391 outof_cfg_layout_mode, /* execute */
392 NULL, /* sub */
393 NULL, /* next */
394 0, /* static_pass_number */
395 TV_CFG, /* tv_id */
396 0, /* properties_required */
397 0, /* properties_provided */
398 PROP_cfglayout, /* properties_destroyed */
399 0, /* todo_flags_start */
400 TODO_dump_func, /* todo_flags_finish */
401 }
402 };
403 \f
404 /* Return scope resulting from combination of S1 and S2. */
405 static tree
406 choose_inner_scope (tree s1, tree s2)
407 {
408 if (!s1)
409 return s2;
410 if (!s2)
411 return s1;
412 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
413 return s1;
414 return s2;
415 }
416 \f
417 /* Emit lexical block notes needed to change scope from S1 to S2. */
418
419 static void
420 change_scope (rtx orig_insn, tree s1, tree s2)
421 {
422 rtx insn = orig_insn;
423 tree com = NULL_TREE;
424 tree ts1 = s1, ts2 = s2;
425 tree s;
426
427 while (ts1 != ts2)
428 {
429 gcc_assert (ts1 && ts2);
430 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
431 ts1 = BLOCK_SUPERCONTEXT (ts1);
432 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
433 ts2 = BLOCK_SUPERCONTEXT (ts2);
434 else
435 {
436 ts1 = BLOCK_SUPERCONTEXT (ts1);
437 ts2 = BLOCK_SUPERCONTEXT (ts2);
438 }
439 }
440 com = ts1;
441
442 /* Close scopes. */
443 s = s1;
444 while (s != com)
445 {
446 rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
447 NOTE_BLOCK (note) = s;
448 s = BLOCK_SUPERCONTEXT (s);
449 }
450
451 /* Open scopes. */
452 s = s2;
453 while (s != com)
454 {
455 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
456 NOTE_BLOCK (insn) = s;
457 s = BLOCK_SUPERCONTEXT (s);
458 }
459 }
460
461 /* Return lexical scope block locator belongs to. */
462 static tree
463 locator_scope (int loc)
464 {
465 int max = VEC_length (int, block_locators_locs);
466 int min = 0;
467
468 /* When block_locators_locs was initialized, the pro- and epilogue
469 insns didn't exist yet and can therefore not be found this way.
470 But we know that they belong to the outer most block of the
471 current function.
472 Without this test, the prologue would be put inside the block of
473 the first valid instruction in the function and when that first
474 insn is part of an inlined function then the low_pc of that
475 inlined function is messed up. Likewise for the epilogue and
476 the last valid instruction. */
477 if (loc == prologue_locator || loc == epilogue_locator)
478 return DECL_INITIAL (cfun->decl);
479
480 if (!max || !loc)
481 return NULL;
482 while (1)
483 {
484 int pos = (min + max) / 2;
485 int tmp = VEC_index (int, block_locators_locs, pos);
486
487 if (tmp <= loc && min != pos)
488 min = pos;
489 else if (tmp > loc && max != pos)
490 max = pos;
491 else
492 {
493 min = pos;
494 break;
495 }
496 }
497 return VEC_index (tree, block_locators_blocks, min);
498 }
499
500 /* Return lexical scope block insn belongs to. */
501 tree
502 insn_scope (const_rtx insn)
503 {
504 return locator_scope (INSN_LOCATOR (insn));
505 }
506
507 /* Return line number of the statement specified by the locator. */
508 location_t
509 locator_location (int loc)
510 {
511 int max = VEC_length (int, locations_locators_locs);
512 int min = 0;
513
514 while (1)
515 {
516 int pos = (min + max) / 2;
517 int tmp = VEC_index (int, locations_locators_locs, pos);
518
519 if (tmp <= loc && min != pos)
520 min = pos;
521 else if (tmp > loc && max != pos)
522 max = pos;
523 else
524 {
525 min = pos;
526 break;
527 }
528 }
529 return *VEC_index (location_t, locations_locators_vals, min);
530 }
531
532 /* Return source line of the statement that produced this insn. */
533 int
534 locator_line (int loc)
535 {
536 expanded_location xloc;
537 if (!loc)
538 return 0;
539 else
540 xloc = expand_location (locator_location (loc));
541 return xloc.line;
542 }
543
544 /* Return line number of the statement that produced this insn. */
545 int
546 insn_line (const_rtx insn)
547 {
548 return locator_line (INSN_LOCATOR (insn));
549 }
550
551 /* Return source file of the statement specified by LOC. */
552 const char *
553 locator_file (int loc)
554 {
555 expanded_location xloc;
556 if (!loc)
557 return 0;
558 else
559 xloc = expand_location (locator_location (loc));
560 return xloc.file;
561 }
562
563 /* Return source file of the statement that produced this insn. */
564 const char *
565 insn_file (const_rtx insn)
566 {
567 return locator_file (INSN_LOCATOR (insn));
568 }
569
570 /* Return true if LOC1 and LOC2 locators have the same location and scope. */
571 bool
572 locator_eq (int loc1, int loc2)
573 {
574 if (loc1 == loc2)
575 return true;
576 if (locator_location (loc1) != locator_location (loc2))
577 return false;
578 return locator_scope (loc1) == locator_scope (loc2);
579 }
580
581 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
582 on the scope tree and the newly reordered instructions. */
583
584 void
585 reemit_insn_block_notes (void)
586 {
587 tree cur_block = DECL_INITIAL (cfun->decl);
588 rtx insn, note;
589
590 insn = get_insns ();
591 if (!active_insn_p (insn))
592 insn = next_active_insn (insn);
593 for (; insn; insn = next_active_insn (insn))
594 {
595 tree this_block;
596
597 /* Avoid putting scope notes between jump table and its label. */
598 if (JUMP_TABLE_DATA_P (insn))
599 continue;
600
601 this_block = insn_scope (insn);
602 /* For sequences compute scope resulting from merging all scopes
603 of instructions nested inside. */
604 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
605 {
606 int i;
607 rtx body = PATTERN (insn);
608
609 this_block = NULL;
610 for (i = 0; i < XVECLEN (body, 0); i++)
611 this_block = choose_inner_scope (this_block,
612 insn_scope (XVECEXP (body, 0, i)));
613 }
614 if (! this_block)
615 continue;
616
617 if (this_block != cur_block)
618 {
619 change_scope (insn, cur_block, this_block);
620 cur_block = this_block;
621 }
622 }
623
624 /* change_scope emits before the insn, not after. */
625 note = emit_note (NOTE_INSN_DELETED);
626 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
627 delete_insn (note);
628
629 reorder_blocks ();
630 }
631 \f
632
633 /* Link the basic blocks in the correct order, compacting the basic
634 block queue while at it. This also clears the visited flag on
635 all basic blocks. If STAY_IN_CFGLAYOUT_MODE is false, this function
636 also clears the basic block header and footer fields.
637
638 This function is usually called after a pass (e.g. tracer) finishes
639 some transformations while in cfglayout mode. The required sequence
640 of the basic blocks is in a linked list along the bb->aux field.
641 This functions re-links the basic block prev_bb and next_bb pointers
642 accordingly, and it compacts and renumbers the blocks. */
643
644 void
645 relink_block_chain (bool stay_in_cfglayout_mode)
646 {
647 basic_block bb, prev_bb;
648 int index;
649
650 /* Maybe dump the re-ordered sequence. */
651 if (dump_file)
652 {
653 fprintf (dump_file, "Reordered sequence:\n");
654 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
655 bb;
656 bb = (basic_block) bb->aux, index++)
657 {
658 fprintf (dump_file, " %i ", index);
659 if (get_bb_original (bb))
660 fprintf (dump_file, "duplicate of %i ",
661 get_bb_original (bb)->index);
662 else if (forwarder_block_p (bb)
663 && !LABEL_P (BB_HEAD (bb)))
664 fprintf (dump_file, "compensation ");
665 else
666 fprintf (dump_file, "bb %i ", bb->index);
667 fprintf (dump_file, " [%i]\n", bb->frequency);
668 }
669 }
670
671 /* Now reorder the blocks. */
672 prev_bb = ENTRY_BLOCK_PTR;
673 bb = ENTRY_BLOCK_PTR->next_bb;
674 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
675 {
676 bb->prev_bb = prev_bb;
677 prev_bb->next_bb = bb;
678 }
679 prev_bb->next_bb = EXIT_BLOCK_PTR;
680 EXIT_BLOCK_PTR->prev_bb = prev_bb;
681
682 /* Then, clean up the aux and visited fields. */
683 FOR_ALL_BB (bb)
684 {
685 bb->aux = NULL;
686 bb->il.rtl->visited = 0;
687 if (!stay_in_cfglayout_mode)
688 bb->il.rtl->header = bb->il.rtl->footer = NULL;
689 }
690
691 /* Maybe reset the original copy tables, they are not valid anymore
692 when we renumber the basic blocks in compact_blocks. If we are
693 are going out of cfglayout mode, don't re-allocate the tables. */
694 free_original_copy_tables ();
695 if (stay_in_cfglayout_mode)
696 initialize_original_copy_tables ();
697
698 /* Finally, put basic_block_info in the new order. */
699 compact_blocks ();
700 }
701 \f
702
703 /* Given a reorder chain, rearrange the code to match. */
704
705 static void
706 fixup_reorder_chain (void)
707 {
708 basic_block bb;
709 rtx insn = NULL;
710
711 if (cfg_layout_function_header)
712 {
713 set_first_insn (cfg_layout_function_header);
714 insn = cfg_layout_function_header;
715 while (NEXT_INSN (insn))
716 insn = NEXT_INSN (insn);
717 }
718
719 /* First do the bulk reordering -- rechain the blocks without regard to
720 the needed changes to jumps and labels. */
721
722 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
723 {
724 if (bb->il.rtl->header)
725 {
726 if (insn)
727 NEXT_INSN (insn) = bb->il.rtl->header;
728 else
729 set_first_insn (bb->il.rtl->header);
730 PREV_INSN (bb->il.rtl->header) = insn;
731 insn = bb->il.rtl->header;
732 while (NEXT_INSN (insn))
733 insn = NEXT_INSN (insn);
734 }
735 if (insn)
736 NEXT_INSN (insn) = BB_HEAD (bb);
737 else
738 set_first_insn (BB_HEAD (bb));
739 PREV_INSN (BB_HEAD (bb)) = insn;
740 insn = BB_END (bb);
741 if (bb->il.rtl->footer)
742 {
743 NEXT_INSN (insn) = bb->il.rtl->footer;
744 PREV_INSN (bb->il.rtl->footer) = insn;
745 while (NEXT_INSN (insn))
746 insn = NEXT_INSN (insn);
747 }
748 }
749
750 NEXT_INSN (insn) = cfg_layout_function_footer;
751 if (cfg_layout_function_footer)
752 PREV_INSN (cfg_layout_function_footer) = insn;
753
754 while (NEXT_INSN (insn))
755 insn = NEXT_INSN (insn);
756
757 set_last_insn (insn);
758 #ifdef ENABLE_CHECKING
759 verify_insn_chain ();
760 #endif
761
762 /* Now add jumps and labels as needed to match the blocks new
763 outgoing edges. */
764
765 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
766 {
767 edge e_fall, e_taken, e;
768 rtx bb_end_insn;
769 basic_block nb, src_bb;
770 edge_iterator ei;
771
772 if (EDGE_COUNT (bb->succs) == 0)
773 continue;
774
775 /* Find the old fallthru edge, and another non-EH edge for
776 a taken jump. */
777 e_taken = e_fall = NULL;
778
779 FOR_EACH_EDGE (e, ei, bb->succs)
780 if (e->flags & EDGE_FALLTHRU)
781 e_fall = e;
782 else if (! (e->flags & EDGE_EH))
783 e_taken = e;
784
785 bb_end_insn = BB_END (bb);
786 if (JUMP_P (bb_end_insn))
787 {
788 if (any_condjump_p (bb_end_insn))
789 {
790 /* This might happen if the conditional jump has side
791 effects and could therefore not be optimized away.
792 Make the basic block to end with a barrier in order
793 to prevent rtl_verify_flow_info from complaining. */
794 if (!e_fall)
795 {
796 gcc_assert (!onlyjump_p (bb_end_insn)
797 || returnjump_p (bb_end_insn));
798 bb->il.rtl->footer = emit_barrier_after (bb_end_insn);
799 continue;
800 }
801
802 /* If the old fallthru is still next, nothing to do. */
803 if (bb->aux == e_fall->dest
804 || e_fall->dest == EXIT_BLOCK_PTR)
805 continue;
806
807 /* The degenerated case of conditional jump jumping to the next
808 instruction can happen for jumps with side effects. We need
809 to construct a forwarder block and this will be done just
810 fine by force_nonfallthru below. */
811 if (!e_taken)
812 ;
813
814 /* There is another special case: if *neither* block is next,
815 such as happens at the very end of a function, then we'll
816 need to add a new unconditional jump. Choose the taken
817 edge based on known or assumed probability. */
818 else if (bb->aux != e_taken->dest)
819 {
820 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
821
822 if (note
823 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
824 && invert_jump (bb_end_insn,
825 (e_fall->dest == EXIT_BLOCK_PTR
826 ? NULL_RTX
827 : label_for_bb (e_fall->dest)), 0))
828 {
829 e_fall->flags &= ~EDGE_FALLTHRU;
830 gcc_checking_assert (could_fall_through
831 (e_taken->src, e_taken->dest));
832 e_taken->flags |= EDGE_FALLTHRU;
833 update_br_prob_note (bb);
834 e = e_fall, e_fall = e_taken, e_taken = e;
835 }
836 }
837
838 /* If the "jumping" edge is a crossing edge, and the fall
839 through edge is non-crossing, leave things as they are. */
840 else if ((e_taken->flags & EDGE_CROSSING)
841 && !(e_fall->flags & EDGE_CROSSING))
842 continue;
843
844 /* Otherwise we can try to invert the jump. This will
845 basically never fail, however, keep up the pretense. */
846 else if (invert_jump (bb_end_insn,
847 (e_fall->dest == EXIT_BLOCK_PTR
848 ? NULL_RTX
849 : label_for_bb (e_fall->dest)), 0))
850 {
851 e_fall->flags &= ~EDGE_FALLTHRU;
852 gcc_checking_assert (could_fall_through
853 (e_taken->src, e_taken->dest));
854 e_taken->flags |= EDGE_FALLTHRU;
855 update_br_prob_note (bb);
856 continue;
857 }
858 }
859 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
860 {
861 /* If the old fallthru is still next or if
862 asm goto doesn't have a fallthru (e.g. when followed by
863 __builtin_unreachable ()), nothing to do. */
864 if (! e_fall
865 || bb->aux == e_fall->dest
866 || e_fall->dest == EXIT_BLOCK_PTR)
867 continue;
868
869 /* Otherwise we'll have to use the fallthru fixup below. */
870 }
871 else
872 {
873 /* Otherwise we have some return, switch or computed
874 jump. In the 99% case, there should not have been a
875 fallthru edge. */
876 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
877 continue;
878 }
879 }
880 else
881 {
882 /* No fallthru implies a noreturn function with EH edges, or
883 something similarly bizarre. In any case, we don't need to
884 do anything. */
885 if (! e_fall)
886 continue;
887
888 /* If the fallthru block is still next, nothing to do. */
889 if (bb->aux == e_fall->dest)
890 continue;
891
892 /* A fallthru to exit block. */
893 if (e_fall->dest == EXIT_BLOCK_PTR)
894 continue;
895 }
896
897 /* We got here if we need to add a new jump insn.
898 Note force_nonfallthru can delete E_FALL and thus we have to
899 save E_FALL->src prior to the call to force_nonfallthru. */
900 src_bb = e_fall->src;
901 nb = force_nonfallthru (e_fall);
902 if (nb)
903 {
904 nb->il.rtl->visited = 1;
905 nb->aux = bb->aux;
906 bb->aux = nb;
907 /* Don't process this new block. */
908 bb = nb;
909
910 /* Make sure new bb is tagged for correct section (same as
911 fall-thru source, since you cannot fall-thru across
912 section boundaries). */
913 BB_COPY_PARTITION (src_bb, single_pred (bb));
914 if (flag_reorder_blocks_and_partition
915 && targetm.have_named_sections
916 && JUMP_P (BB_END (bb))
917 && !any_condjump_p (BB_END (bb))
918 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
919 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
920 }
921 }
922
923 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
924
925 /* Annoying special case - jump around dead jumptables left in the code. */
926 FOR_EACH_BB (bb)
927 {
928 edge e = find_fallthru_edge (bb->succs);
929
930 if (e && !can_fallthru (e->src, e->dest))
931 force_nonfallthru (e);
932 }
933
934 /* Ensure goto_locus from edges has some instructions with that locus
935 in RTL. */
936 if (!optimize)
937 FOR_EACH_BB (bb)
938 {
939 edge e;
940 edge_iterator ei;
941
942 FOR_EACH_EDGE (e, ei, bb->succs)
943 if (e->goto_locus && !(e->flags & EDGE_ABNORMAL))
944 {
945 edge e2;
946 edge_iterator ei2;
947 basic_block dest, nb;
948 rtx end;
949
950 insn = BB_END (e->src);
951 end = PREV_INSN (BB_HEAD (e->src));
952 while (insn != end
953 && (!NONDEBUG_INSN_P (insn) || INSN_LOCATOR (insn) == 0))
954 insn = PREV_INSN (insn);
955 if (insn != end
956 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
957 continue;
958 if (simplejump_p (BB_END (e->src))
959 && INSN_LOCATOR (BB_END (e->src)) == 0)
960 {
961 INSN_LOCATOR (BB_END (e->src)) = e->goto_locus;
962 continue;
963 }
964 dest = e->dest;
965 if (dest == EXIT_BLOCK_PTR)
966 {
967 /* Non-fallthru edges to the exit block cannot be split. */
968 if (!(e->flags & EDGE_FALLTHRU))
969 continue;
970 }
971 else
972 {
973 insn = BB_HEAD (dest);
974 end = NEXT_INSN (BB_END (dest));
975 while (insn != end && !NONDEBUG_INSN_P (insn))
976 insn = NEXT_INSN (insn);
977 if (insn != end && INSN_LOCATOR (insn)
978 && locator_eq (INSN_LOCATOR (insn), (int) e->goto_locus))
979 continue;
980 }
981 nb = split_edge (e);
982 if (!INSN_P (BB_END (nb)))
983 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
984 nb);
985 INSN_LOCATOR (BB_END (nb)) = e->goto_locus;
986
987 /* If there are other incoming edges to the destination block
988 with the same goto locus, redirect them to the new block as
989 well, this can prevent other such blocks from being created
990 in subsequent iterations of the loop. */
991 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
992 if (e2->goto_locus
993 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
994 && locator_eq (e->goto_locus, e2->goto_locus))
995 redirect_edge_and_branch (e2, nb);
996 else
997 ei_next (&ei2);
998 }
999 }
1000 }
1001 \f
1002 /* Perform sanity checks on the insn chain.
1003 1. Check that next/prev pointers are consistent in both the forward and
1004 reverse direction.
1005 2. Count insns in chain, going both directions, and check if equal.
1006 3. Check that get_last_insn () returns the actual end of chain. */
1007
1008 DEBUG_FUNCTION void
1009 verify_insn_chain (void)
1010 {
1011 rtx x, prevx, nextx;
1012 int insn_cnt1, insn_cnt2;
1013
1014 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
1015 x != 0;
1016 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
1017 gcc_assert (PREV_INSN (x) == prevx);
1018
1019 gcc_assert (prevx == get_last_insn ());
1020
1021 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
1022 x != 0;
1023 nextx = x, insn_cnt2++, x = PREV_INSN (x))
1024 gcc_assert (NEXT_INSN (x) == nextx);
1025
1026 gcc_assert (insn_cnt1 == insn_cnt2);
1027 }
1028 \f
1029 /* If we have assembler epilogues, the block falling through to exit must
1030 be the last one in the reordered chain when we reach final. Ensure
1031 that this condition is met. */
1032 static void
1033 fixup_fallthru_exit_predecessor (void)
1034 {
1035 edge e;
1036 basic_block bb = NULL;
1037
1038 /* This transformation is not valid before reload, because we might
1039 separate a call from the instruction that copies the return
1040 value. */
1041 gcc_assert (reload_completed);
1042
1043 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
1044 if (e)
1045 bb = e->src;
1046
1047 if (bb && bb->aux)
1048 {
1049 basic_block c = ENTRY_BLOCK_PTR->next_bb;
1050
1051 /* If the very first block is the one with the fall-through exit
1052 edge, we have to split that block. */
1053 if (c == bb)
1054 {
1055 bb = split_block (bb, NULL)->dest;
1056 bb->aux = c->aux;
1057 c->aux = bb;
1058 bb->il.rtl->footer = c->il.rtl->footer;
1059 c->il.rtl->footer = NULL;
1060 }
1061
1062 while (c->aux != bb)
1063 c = (basic_block) c->aux;
1064
1065 c->aux = bb->aux;
1066 while (c->aux)
1067 c = (basic_block) c->aux;
1068
1069 c->aux = bb;
1070 bb->aux = NULL;
1071 }
1072 }
1073
1074 /* In case there are more than one fallthru predecessors of exit, force that
1075 there is only one. */
1076
1077 static void
1078 force_one_exit_fallthru (void)
1079 {
1080 edge e, predecessor = NULL;
1081 bool more = false;
1082 edge_iterator ei;
1083 basic_block forwarder, bb;
1084
1085 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1086 if (e->flags & EDGE_FALLTHRU)
1087 {
1088 if (predecessor == NULL)
1089 predecessor = e;
1090 else
1091 {
1092 more = true;
1093 break;
1094 }
1095 }
1096
1097 if (!more)
1098 return;
1099
1100 /* Exit has several fallthru predecessors. Create a forwarder block for
1101 them. */
1102 forwarder = split_edge (predecessor);
1103 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
1104 {
1105 if (e->src == forwarder
1106 || !(e->flags & EDGE_FALLTHRU))
1107 ei_next (&ei);
1108 else
1109 redirect_edge_and_branch_force (e, forwarder);
1110 }
1111
1112 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
1113 exit block. */
1114 FOR_EACH_BB (bb)
1115 {
1116 if (bb->aux == NULL && bb != forwarder)
1117 {
1118 bb->aux = forwarder;
1119 break;
1120 }
1121 }
1122 }
1123 \f
1124 /* Return true in case it is possible to duplicate the basic block BB. */
1125
1126 /* We do not want to declare the function in a header file, since it should
1127 only be used through the cfghooks interface, and we do not want to move
1128 it to cfgrtl.c since it would require also moving quite a lot of related
1129 code. */
1130 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
1131
1132 bool
1133 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
1134 {
1135 /* Do not attempt to duplicate tablejumps, as we need to unshare
1136 the dispatch table. This is difficult to do, as the instructions
1137 computing jump destination may be hoisted outside the basic block. */
1138 if (tablejump_p (BB_END (bb), NULL, NULL))
1139 return false;
1140
1141 /* Do not duplicate blocks containing insns that can't be copied. */
1142 if (targetm.cannot_copy_insn_p)
1143 {
1144 rtx insn = BB_HEAD (bb);
1145 while (1)
1146 {
1147 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
1148 return false;
1149 if (insn == BB_END (bb))
1150 break;
1151 insn = NEXT_INSN (insn);
1152 }
1153 }
1154
1155 return true;
1156 }
1157
1158 rtx
1159 duplicate_insn_chain (rtx from, rtx to)
1160 {
1161 rtx insn, last, copy;
1162
1163 /* Avoid updating of boundaries of previous basic block. The
1164 note will get removed from insn stream in fixup. */
1165 last = emit_note (NOTE_INSN_DELETED);
1166
1167 /* Create copy at the end of INSN chain. The chain will
1168 be reordered later. */
1169 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
1170 {
1171 switch (GET_CODE (insn))
1172 {
1173 case DEBUG_INSN:
1174 case INSN:
1175 case CALL_INSN:
1176 case JUMP_INSN:
1177 /* Avoid copying of dispatch tables. We never duplicate
1178 tablejumps, so this can hit only in case the table got
1179 moved far from original jump. */
1180 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
1181 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1182 {
1183 /* Avoid copying following barrier as well if any
1184 (and debug insns in between). */
1185 rtx next;
1186
1187 for (next = NEXT_INSN (insn);
1188 next != NEXT_INSN (to);
1189 next = NEXT_INSN (next))
1190 if (!DEBUG_INSN_P (next))
1191 break;
1192 if (next != NEXT_INSN (to) && BARRIER_P (next))
1193 insn = next;
1194 break;
1195 }
1196 copy = emit_copy_of_insn_after (insn, get_last_insn ());
1197 maybe_copy_prologue_epilogue_insn (insn, copy);
1198 break;
1199
1200 case CODE_LABEL:
1201 break;
1202
1203 case BARRIER:
1204 emit_barrier ();
1205 break;
1206
1207 case NOTE:
1208 switch (NOTE_KIND (insn))
1209 {
1210 /* In case prologue is empty and function contain label
1211 in first BB, we may want to copy the block. */
1212 case NOTE_INSN_PROLOGUE_END:
1213
1214 case NOTE_INSN_DELETED:
1215 case NOTE_INSN_DELETED_LABEL:
1216 /* No problem to strip these. */
1217 case NOTE_INSN_FUNCTION_BEG:
1218 /* There is always just single entry to function. */
1219 case NOTE_INSN_BASIC_BLOCK:
1220 break;
1221
1222 case NOTE_INSN_EPILOGUE_BEG:
1223 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1224 emit_note_copy (insn);
1225 break;
1226
1227 default:
1228 /* All other notes should have already been eliminated. */
1229 gcc_unreachable ();
1230 }
1231 break;
1232 default:
1233 gcc_unreachable ();
1234 }
1235 }
1236 insn = NEXT_INSN (last);
1237 delete_insn (last);
1238 return insn;
1239 }
1240 /* Create a duplicate of the basic block BB. */
1241
1242 /* We do not want to declare the function in a header file, since it should
1243 only be used through the cfghooks interface, and we do not want to move
1244 it to cfgrtl.c since it would require also moving quite a lot of related
1245 code. */
1246 extern basic_block cfg_layout_duplicate_bb (basic_block);
1247
1248 basic_block
1249 cfg_layout_duplicate_bb (basic_block bb)
1250 {
1251 rtx insn;
1252 basic_block new_bb;
1253
1254 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
1255 new_bb = create_basic_block (insn,
1256 insn ? get_last_insn () : NULL,
1257 EXIT_BLOCK_PTR->prev_bb);
1258
1259 BB_COPY_PARTITION (new_bb, bb);
1260 if (bb->il.rtl->header)
1261 {
1262 insn = bb->il.rtl->header;
1263 while (NEXT_INSN (insn))
1264 insn = NEXT_INSN (insn);
1265 insn = duplicate_insn_chain (bb->il.rtl->header, insn);
1266 if (insn)
1267 new_bb->il.rtl->header = unlink_insn_chain (insn, get_last_insn ());
1268 }
1269
1270 if (bb->il.rtl->footer)
1271 {
1272 insn = bb->il.rtl->footer;
1273 while (NEXT_INSN (insn))
1274 insn = NEXT_INSN (insn);
1275 insn = duplicate_insn_chain (bb->il.rtl->footer, insn);
1276 if (insn)
1277 new_bb->il.rtl->footer = unlink_insn_chain (insn, get_last_insn ());
1278 }
1279
1280 return new_bb;
1281 }
1282
1283 \f
1284 /* Main entry point to this module - initialize the datastructures for
1285 CFG layout changes. It keeps LOOPS up-to-date if not null.
1286
1287 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
1288
1289 void
1290 cfg_layout_initialize (unsigned int flags)
1291 {
1292 rtx x;
1293 basic_block bb;
1294
1295 initialize_original_copy_tables ();
1296
1297 cfg_layout_rtl_register_cfg_hooks ();
1298
1299 record_effective_endpoints ();
1300
1301 /* Make sure that the targets of non local gotos are marked. */
1302 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1303 {
1304 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1305 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
1306 }
1307
1308 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
1309 }
1310
1311 /* Splits superblocks. */
1312 void
1313 break_superblocks (void)
1314 {
1315 sbitmap superblocks;
1316 bool need = false;
1317 basic_block bb;
1318
1319 superblocks = sbitmap_alloc (last_basic_block);
1320 sbitmap_zero (superblocks);
1321
1322 FOR_EACH_BB (bb)
1323 if (bb->flags & BB_SUPERBLOCK)
1324 {
1325 bb->flags &= ~BB_SUPERBLOCK;
1326 SET_BIT (superblocks, bb->index);
1327 need = true;
1328 }
1329
1330 if (need)
1331 {
1332 rebuild_jump_labels (get_insns ());
1333 find_many_sub_basic_blocks (superblocks);
1334 }
1335
1336 free (superblocks);
1337 }
1338
1339 /* Finalize the changes: reorder insn list according to the sequence specified
1340 by aux pointers, enter compensation code, rebuild scope forest. */
1341
1342 void
1343 cfg_layout_finalize (void)
1344 {
1345 #ifdef ENABLE_CHECKING
1346 verify_flow_info ();
1347 #endif
1348 force_one_exit_fallthru ();
1349 rtl_register_cfg_hooks ();
1350 if (reload_completed
1351 #ifdef HAVE_epilogue
1352 && !HAVE_epilogue
1353 #endif
1354 )
1355 fixup_fallthru_exit_predecessor ();
1356 fixup_reorder_chain ();
1357
1358 rebuild_jump_labels (get_insns ());
1359 delete_dead_jumptables ();
1360
1361 #ifdef ENABLE_CHECKING
1362 verify_insn_chain ();
1363 verify_flow_info ();
1364 #endif
1365 }
1366
1367 /* Checks whether all N blocks in BBS array can be copied. */
1368 bool
1369 can_copy_bbs_p (basic_block *bbs, unsigned n)
1370 {
1371 unsigned i;
1372 edge e;
1373 int ret = true;
1374
1375 for (i = 0; i < n; i++)
1376 bbs[i]->flags |= BB_DUPLICATED;
1377
1378 for (i = 0; i < n; i++)
1379 {
1380 /* In case we should redirect abnormal edge during duplication, fail. */
1381 edge_iterator ei;
1382 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
1383 if ((e->flags & EDGE_ABNORMAL)
1384 && (e->dest->flags & BB_DUPLICATED))
1385 {
1386 ret = false;
1387 goto end;
1388 }
1389
1390 if (!can_duplicate_block_p (bbs[i]))
1391 {
1392 ret = false;
1393 break;
1394 }
1395 }
1396
1397 end:
1398 for (i = 0; i < n; i++)
1399 bbs[i]->flags &= ~BB_DUPLICATED;
1400
1401 return ret;
1402 }
1403
1404 /* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
1405 are placed into array NEW_BBS in the same order. Edges from basic blocks
1406 in BBS are also duplicated and copies of those of them
1407 that lead into BBS are redirected to appropriate newly created block. The
1408 function assigns bbs into loops (copy of basic block bb is assigned to
1409 bb->loop_father->copy loop, so this must be set up correctly in advance)
1410 and updates dominators locally (LOOPS structure that contains the information
1411 about dominators is passed to enable this).
1412
1413 BASE is the superloop to that basic block belongs; if its header or latch
1414 is copied, we do not set the new blocks as header or latch.
1415
1416 Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
1417 also in the same order.
1418
1419 Newly created basic blocks are put after the basic block AFTER in the
1420 instruction stream, and the order of the blocks in BBS array is preserved. */
1421
1422 void
1423 copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
1424 edge *edges, unsigned num_edges, edge *new_edges,
1425 struct loop *base, basic_block after)
1426 {
1427 unsigned i, j;
1428 basic_block bb, new_bb, dom_bb;
1429 edge e;
1430
1431 /* Duplicate bbs, update dominators, assign bbs to loops. */
1432 for (i = 0; i < n; i++)
1433 {
1434 /* Duplicate. */
1435 bb = bbs[i];
1436 new_bb = new_bbs[i] = duplicate_block (bb, NULL, after);
1437 after = new_bb;
1438 bb->flags |= BB_DUPLICATED;
1439 /* Possibly set loop header. */
1440 if (bb->loop_father->header == bb && bb->loop_father != base)
1441 new_bb->loop_father->header = new_bb;
1442 /* Or latch. */
1443 if (bb->loop_father->latch == bb && bb->loop_father != base)
1444 new_bb->loop_father->latch = new_bb;
1445 }
1446
1447 /* Set dominators. */
1448 for (i = 0; i < n; i++)
1449 {
1450 bb = bbs[i];
1451 new_bb = new_bbs[i];
1452
1453 dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
1454 if (dom_bb->flags & BB_DUPLICATED)
1455 {
1456 dom_bb = get_bb_copy (dom_bb);
1457 set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
1458 }
1459 }
1460
1461 /* Redirect edges. */
1462 for (j = 0; j < num_edges; j++)
1463 new_edges[j] = NULL;
1464 for (i = 0; i < n; i++)
1465 {
1466 edge_iterator ei;
1467 new_bb = new_bbs[i];
1468 bb = bbs[i];
1469
1470 FOR_EACH_EDGE (e, ei, new_bb->succs)
1471 {
1472 for (j = 0; j < num_edges; j++)
1473 if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
1474 new_edges[j] = e;
1475
1476 if (!(e->dest->flags & BB_DUPLICATED))
1477 continue;
1478 redirect_edge_and_branch_force (e, get_bb_copy (e->dest));
1479 }
1480 }
1481
1482 /* Clear information about duplicates. */
1483 for (i = 0; i < n; i++)
1484 bbs[i]->flags &= ~BB_DUPLICATED;
1485 }
1486
1487 #include "gt-cfglayout.h"