vec.h: Update API to separate allocation mechanism from type.
authorNathan Sidwell <nathan@codesourcery.com>
Thu, 21 Apr 2005 09:18:28 +0000 (09:18 +0000)
committerNathan Sidwell <nathan@gcc.gnu.org>
Thu, 21 Apr 2005 09:18:28 +0000 (09:18 +0000)
* vec.h: Update API to separate allocation mechanism from type.
(VEC_safe_grow): New.
* vec.c (calculate_allocation): New.
(vec_gc_o_reserve, vec_heap_o_reserve): Adjust.
(vec_gc_free, vec_heap_free): Remove.
* gengtype-lex.l (DEF_VEC_): Process mult-argument macros.  Adjust.
(VEC): Likewise.
(mangle_macro_name): New.
(struct macro_def): New.
(struct macro): Add multiple argument values.
(macro_expans_end): New.
(push_macro_expansion):  Chain on new macro. Process multiple
args, create follow on expansion. Return follow on argument.
(macro_input): Deal with multiple arguments.

* tree.h: Define VEC(tree,heap) and VEC(tree,gc).
(struct tree_binfo): Adjust.
* basic-block.h: Define VEC(edge,gc).
(struct edge_def): Adjust.
(struct basic_block_def, struct edge_iterator): Likewise.
(ei_container, ei_start_1, ei_last_1): Likewise.
* cfg.c (connect_src, connect_dest): Likewise.
* cfgrtl.c (force_nonfallthru_and_redirect)
* dbxout.c (dbxout_type)
* dwarf2out.c (gen_member_die)
* lambda-code.c: Define VEC(int,gc), VEC(lambda_loop,gc).
(gcc_tree_to_linear_expression): Adjust.
(gcc_loop_to_lambda_loop, gcc_loopnest_to_lambda_loopnest,
lbv_to_gcc_expression, lle_to_gcc_expression,
lambda_loopnest_to_gcc_loopnest, can_convert_to_perfect_nest,
perfect_nestify): Likewise.
* lambda.h (gcc_loopnest_to_lambda_loopnest,
lambda_loopnest_to_gcc_loopnest): Adjust prototypes.
* profile.c (instrument_values): Adjust.
* tree-cfg.c (modified_noreturn_calls): Adjust.
(remove_fallthru_edge): Likewise.
* tree-dump.c (dequeue_and_dump): Adjust.
* tree-flow-inline.h (mark_stmt_modified): Adjust.
* tree-flow.h (modified_noreturn_calls): Adjust.
(tree_on_heap): Remove. (yay!)
(register_new_def): Adjust.
* tree-into-ssa.c: Define VEC(int,heap).
(block_defs_stack): Adjust.
(find_idf, insert_phi_nodes, register_new_def,
rewrite_initialize_block, rewrite_finalize_block,
register_new_update_single, rewrite_update_init_block,
rewrite_update_fini_block, rewrite_blocks,
ssa_rewrite_finalize_block, ssa_register_new_def,
ssa_rewrite_initialize_block, rewrite_ssa_into_ssa): Likewise.
* tree-loop-linear.c (linear_transform_loops): Adjust.
* tree-ssa-alias.c: Define VEC(fieldoff_t,heap).
(push_fields_onto_fieldstack, create_overlap_variables_for): Adjust.
* tree-ssa-dom.c (avail_exprs_stack, block_defs_stack,
stmts_to_rescan, const_and_copies_stack, nonzero_vars_stack,
vrp_variables_stack): Adjust declarations.
(tree_ssa_dominator_optimize): Adjust.
(dom_opt_initialize_block, remove_local_expressions_from_table,
restore_nonzero_vars_to_original_value,
restore_vars_to_original_value,
restore_currdefs_to_original_value, dom_opt_finalize_block,
record_var_is_nonzero, record_cond, record_const_or_copy_1,
optimize_stmt, update_rhs_and_lookup_avail_expr,
lookup_avail_expr, record_range): Likewise.
* tree-ssa-pre.c: Define VEC(basic_block,heap).
(compute_antic_aux): Adjust.
(inserted_exprs, create_expression_by_pieces,
insert_into_preds_of_block, eliminate, mark_operand_necessary,
remove_dead_inserted_code, fini_pre): Likewise.
* tree-ssa-propagate.c (interesting_ssa_edges): Adjust.
(varying_ssa_edges, add_ssa_edge, process_ssa_edge_worklist.
ssa_prop_init): Likewise.
* tree-ssa.c: Define VEC(bitmap,heap).
(verify_name_tags): Adjust.
* value-prof.c (rtl_divmod_values_to_profile): Adjust.
(insn_prefetch_values_to_profile, rtl_find_values_to_profile,
tree_divmod_values_to_profile, tree_find_values_to_profile,
value_profile_transformations): Likewise.
* value-prof.h: Define VEC(histogram_value,heap).
* varasm.c: Remove alias_pair pointer typedef, define
VEC(alias_pair,gc).
(finish_aliases_1, finish_aliases_2, assemble_alias): Adjust.

* config/pa/pa.c (typedef extern_symbol): Typedef the structure,
not a pointer to it.  Create an object vector.
(extern_symbols): Turn into an object vector.
(pa_hpux_asm_output_external, pa_hpux_file_end): Adjust.

* cp/cp-tree.h: Adjust for new VEC API.
Define VEC(tree_pair_s,gc).
(struct save_scope): Adjust.
(struct lang_type_class): Adjust.
(unemitted_tinfo_decls): Adjust.
* cp/class.c (add_method, resort_type_method_vec,
finish_struct_methods, struct find_final_overrider_data,
dfs_find_final_overrider_pre, find_final_overrider,
get_vcall_index, warn_hidden, walk_subobject_offsets,
check_methods, fixup_inline_methods, end_of_class,
warn_about_ambiguous_bases, finish_struct, build_vtbl_initializer,
add_vcall_offset): Adjust.
* cp/decl.c (xref_basetypes, finish_method): Adjust.
* cp/decl2.c (check_classfn): Adjust.
* cp/init.c (sort_mem_initializers, push_base_cleanups): Adjust.
* cp/method.c (do_build_copy_constructor): Adjust.
* cp/name-lookup.c (new_class_binding, store_binding,
store_bindings, store_class_bindings): Adjust.
* cp/name-lookup.h: Define VEC(cxx_saved_binding,gc),
VEC(cp_class_binding,gc).
(struct cp_binding_level): Adjust.
* cp/parser.c: Define VEC(cp_token_position,heap).
(struct cp_lexer): Adjust.
(cp_lexer_new_main, cp_lexer_new_from_tokens, cp_lexer_destroy,
cp_lexer_save_tokens): Adjust.
* cp/pt.c (retrieve_specialization,
check_explicit_specialization): Adjust.
* cp/rtti.c (unemitted_tinfo_decls): Adjust.
(init_rtti_processing, get_tinfo_decl, get_pseudo_ti_init,
get_pseudo_ti_desc): Adjust.
* cp/search.c (dfs_access_in_type, lookup_conversion_operator,
lookup_fnfields_1, dfs_walk_once, dfs_walk_once_accessible,
dfs_get_pure_virtuals, lookup_conversions_r, binfo_for_vbase): Adjust.
* cp/semantics.c: Define VEC(deferred_access,gc).
(push_deferring_access_checks): Adjust.
* cp/typeck2.c (abstract_virtuals_error): Adjust.

From-SVN: r98498

43 files changed:
gcc/ChangeLog
gcc/basic-block.h
gcc/cfg.c
gcc/cfgrtl.c
gcc/config/pa/pa.c
gcc/cp/ChangeLog
gcc/cp/class.c
gcc/cp/cp-tree.h
gcc/cp/decl.c
gcc/cp/decl2.c
gcc/cp/init.c
gcc/cp/method.c
gcc/cp/name-lookup.c
gcc/cp/name-lookup.h
gcc/cp/parser.c
gcc/cp/pt.c
gcc/cp/rtti.c
gcc/cp/search.c
gcc/cp/semantics.c
gcc/cp/typeck2.c
gcc/dbxout.c
gcc/dwarf2out.c
gcc/gengtype-lex.l
gcc/lambda-code.c
gcc/lambda.h
gcc/profile.c
gcc/tree-cfg.c
gcc/tree-dump.c
gcc/tree-flow-inline.h
gcc/tree-flow.h
gcc/tree-into-ssa.c
gcc/tree-loop-linear.c
gcc/tree-ssa-alias.c
gcc/tree-ssa-dom.c
gcc/tree-ssa-pre.c
gcc/tree-ssa-propagate.c
gcc/tree-ssa.c
gcc/tree.h
gcc/value-prof.c
gcc/value-prof.h
gcc/varasm.c
gcc/vec.c
gcc/vec.h

index 9a640af23018f037d860a2df98e780c10cc11d6b..9772f72785afa3c4501dfa96990897c09247a6db 100644 (file)
@@ -1,3 +1,92 @@
+2005-04-21  Nathan Sidwell  <nathan@codesourcery.com>
+
+       * vec.h: Update API to separate allocation mechanism from type.
+       (VEC_safe_grow): New.
+       * vec.c (calculate_allocation): New.
+       (vec_gc_o_reserve, vec_heap_o_reserve): Adjust.
+       (vec_gc_free, vec_heap_free): Remove.
+       * gengtype-lex.l (DEF_VEC_): Process mult-argument macros.  Adjust.
+       (VEC): Likewise.
+       (mangle_macro_name): New.
+       (struct macro_def): New.
+       (struct macro): Add multiple argument values.
+       (macro_expans_end): New.
+       (push_macro_expansion):  Chain on new macro. Process multiple
+       args, create follow on expansion. Return follow on argument.
+       (macro_input): Deal with multiple arguments.
+       
+       * tree.h: Define VEC(tree,heap) and VEC(tree,gc).
+       (struct tree_binfo): Adjust.
+       * basic-block.h: Define VEC(edge,gc).
+       (struct edge_def): Adjust.
+       (struct basic_block_def, struct edge_iterator): Likewise.
+       (ei_container, ei_start_1, ei_last_1): Likewise.
+       * cfg.c (connect_src, connect_dest): Likewise.
+       * cfgrtl.c (force_nonfallthru_and_redirect)
+       * dbxout.c (dbxout_type)
+       * dwarf2out.c (gen_member_die)
+       * lambda-code.c: Define VEC(int,gc), VEC(lambda_loop,gc).
+       (gcc_tree_to_linear_expression): Adjust.
+       (gcc_loop_to_lambda_loop, gcc_loopnest_to_lambda_loopnest,
+       lbv_to_gcc_expression, lle_to_gcc_expression,
+       lambda_loopnest_to_gcc_loopnest, can_convert_to_perfect_nest,
+       perfect_nestify): Likewise.
+       * lambda.h (gcc_loopnest_to_lambda_loopnest,
+       lambda_loopnest_to_gcc_loopnest): Adjust prototypes.
+       * profile.c (instrument_values): Adjust.
+       * tree-cfg.c (modified_noreturn_calls): Adjust.
+       (remove_fallthru_edge): Likewise.
+       * tree-dump.c (dequeue_and_dump): Adjust.
+       * tree-flow-inline.h (mark_stmt_modified): Adjust.
+       * tree-flow.h (modified_noreturn_calls): Adjust.
+       (tree_on_heap): Remove. (yay!)
+       (register_new_def): Adjust.
+       * tree-into-ssa.c: Define VEC(int,heap).
+       (block_defs_stack): Adjust.
+       (find_idf, insert_phi_nodes, register_new_def,
+       rewrite_initialize_block, rewrite_finalize_block,
+       register_new_update_single, rewrite_update_init_block,
+       rewrite_update_fini_block, rewrite_blocks,
+       ssa_rewrite_finalize_block, ssa_register_new_def,
+       ssa_rewrite_initialize_block, rewrite_ssa_into_ssa): Likewise.
+       * tree-loop-linear.c (linear_transform_loops): Adjust.
+       * tree-ssa-alias.c: Define VEC(fieldoff_t,heap).
+       (push_fields_onto_fieldstack, create_overlap_variables_for): Adjust.
+       * tree-ssa-dom.c (avail_exprs_stack, block_defs_stack,
+       stmts_to_rescan, const_and_copies_stack, nonzero_vars_stack,
+       vrp_variables_stack): Adjust declarations.
+       (tree_ssa_dominator_optimize): Adjust.
+       (dom_opt_initialize_block, remove_local_expressions_from_table,
+       restore_nonzero_vars_to_original_value,
+       restore_vars_to_original_value,
+       restore_currdefs_to_original_value, dom_opt_finalize_block,
+       record_var_is_nonzero, record_cond, record_const_or_copy_1,
+       optimize_stmt, update_rhs_and_lookup_avail_expr,
+       lookup_avail_expr, record_range): Likewise.
+       * tree-ssa-pre.c: Define VEC(basic_block,heap).
+       (compute_antic_aux): Adjust.
+       (inserted_exprs, create_expression_by_pieces, 
+       insert_into_preds_of_block, eliminate, mark_operand_necessary, 
+       remove_dead_inserted_code, fini_pre): Likewise.
+       * tree-ssa-propagate.c (interesting_ssa_edges): Adjust.
+       (varying_ssa_edges, add_ssa_edge, process_ssa_edge_worklist. 
+       ssa_prop_init): Likewise.
+       * tree-ssa.c: Define VEC(bitmap,heap).
+       (verify_name_tags): Adjust.
+       * value-prof.c (rtl_divmod_values_to_profile): Adjust.
+       (insn_prefetch_values_to_profile, rtl_find_values_to_profile,
+       tree_divmod_values_to_profile, tree_find_values_to_profile,
+       value_profile_transformations): Likewise.
+       * value-prof.h: Define VEC(histogram_value,heap).
+       * varasm.c: Remove alias_pair pointer typedef, define
+       VEC(alias_pair,gc).
+       (finish_aliases_1, finish_aliases_2, assemble_alias): Adjust.
+       
+       * config/pa/pa.c (typedef extern_symbol): Typedef the structure,
+       not a pointer to it.  Create an object vector.
+       (extern_symbols): Turn into an object vector.
+       (pa_hpux_asm_output_external, pa_hpux_file_end): Adjust.
+
 2005-04-21  Sebastian Pop  <pop@cri.ensmp.fr>
 
        PR/20742
index 7d10d94799680761bfe7d8e5cda45378db08e60e..6f295136d29365616ab1593418e9edae635b43de 100644 (file)
@@ -145,7 +145,8 @@ struct edge_def GTY(())
 };
 
 typedef struct edge_def *edge;
-DEF_VEC_GC_P(edge);
+DEF_VEC_P(edge);
+DEF_VEC_ALLOC_P(edge,gc);
 
 #define EDGE_FALLTHRU          1       /* 'Straight line' flow */
 #define EDGE_ABNORMAL          2       /* Strange flow, like computed
@@ -220,8 +221,8 @@ struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")
   tree stmt_list;
 
   /* The edges into and out of the block.  */
-  VEC(edge) *preds;
-  VEC(edge) *succs;
+  VEC(edge,gc) *preds;
+  VEC(edge,gc) *succs;
 
   /* The registers that are live on entry to this block.  */
   bitmap GTY ((skip (""))) global_live_at_start;
@@ -650,10 +651,10 @@ single_pred (basic_block bb)
 
 typedef struct {
   unsigned index;
-  VEC(edge) **container;
+  VEC(edge,gc) **container;
 } edge_iterator;
 
-static inline VEC(edge) *
+static inline VEC(edge,gc) *
 ei_container (edge_iterator i)
 {
   gcc_assert (i.container);
@@ -665,7 +666,7 @@ ei_container (edge_iterator i)
 
 /* Return an iterator pointing to the start of an edge vector.  */
 static inline edge_iterator
-ei_start_1 (VEC(edge) **ev)
+ei_start_1 (VEC(edge,gc) **ev)
 {
   edge_iterator i;
 
@@ -678,7 +679,7 @@ ei_start_1 (VEC(edge) **ev)
 /* Return an iterator pointing to the last element of an edge
    vector.  */
 static inline edge_iterator
-ei_last_1 (VEC(edge) **ev)
+ei_last_1 (VEC(edge,gc) **ev)
 {
   edge_iterator i;
 
index a38cea99c62281f0ff9d44d33fcd62fdfec2c201..85d922eae85ed62846aba77d1a70d30e36998895 100644 (file)
--- a/gcc/cfg.c
+++ b/gcc/cfg.c
@@ -205,7 +205,7 @@ expunge_block (basic_block b)
 static inline void
 connect_src (edge e)
 {
-  VEC_safe_push (edge, e->src->succs, e);
+  VEC_safe_push (edge, gc, e->src->succs, e);
 }
 
 /* Connect E to E->dest.  */
@@ -214,7 +214,7 @@ static inline void
 connect_dest (edge e)
 {
   basic_block dest = e->dest;
-  VEC_safe_push (edge, dest->preds, e);
+  VEC_safe_push (edge, gc, dest->preds, e);
   e->dest_idx = EDGE_COUNT (dest->preds) - 1;
 }
 
index 26460488621f2c6c7ad539793ef34e4ebdfb4a5e..f14e44e05a14125b2de6c7da08085ed29e348ad8 100644 (file)
@@ -1065,7 +1065,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
          
          gcc_assert (found);
          
-         VEC_safe_push (edge, bb->succs, e);
+         VEC_safe_push (edge, gc, bb->succs, e);
          make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
        }
     }
index ad6594fded6c99eb5b19e2c8a774d52a9e756f90..3e92a16b2a8242117fef04fddd7f0122163de870 100644 (file)
@@ -9022,18 +9022,18 @@ pa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
    at the end of the file if and only if SYMBOL_REF_REFERENCED_P is true.
    This avoids putting out names that are never really used.  */
 
-struct extern_symbol GTY(())
+typedef struct extern_symbol GTY(())
 {
   tree decl;
   const char *name;
-};
-typedef struct extern_symbol *extern_symbol;
+} extern_symbol;
 
 /* Define gc'd vector type for extern_symbol.  */
-DEF_VEC_GC_P(extern_symbol);
+DEF_VEC_O(extern_symbol);
+DEF_VEC_ALLOC_O(extern_symbol,gc);
 
 /* Vector of extern_symbol pointers.  */
-static GTY(()) VEC(extern_symbol) *extern_symbols;
+static GTY(()) VEC(extern_symbol,gc) *extern_symbols;
 
 #ifdef ASM_OUTPUT_EXTERNAL_REAL
 /* Mark DECL (name NAME) as an external reference (assembler output
@@ -9043,12 +9043,11 @@ static GTY(()) VEC(extern_symbol) *extern_symbols;
 void
 pa_hpux_asm_output_external (FILE *file, tree decl, const char *name)
 {
-  extern_symbol p = ggc_alloc (sizeof (struct extern_symbol));
+  extern_symbol * p = VEC_safe_push (extern_symbol, gc, extern_symbols, NULL);
 
   gcc_assert (file == asm_out_file);
   p->decl = decl;
   p->name = name;
-  VEC_safe_push (extern_symbol, extern_symbols, p);
 }
 
 /* Output text required at the end of an assembler file.
@@ -9059,7 +9058,7 @@ static void
 pa_hpux_file_end (void)
 {
   unsigned int i;
-  extern_symbol p;
+  extern_symbol *p;
 
   output_deferred_plabels ();
 
@@ -9072,7 +9071,7 @@ pa_hpux_file_end (void)
        ASM_OUTPUT_EXTERNAL_REAL (asm_out_file, decl, p->name);
     }
 
-  extern_symbols = NULL;
+  VEC_free (extern_symbol, gc, extern_symbols);
 }
 #endif
 
index d8e77cca3ff64c27e547a057e1181cded4e8ff10..7c9747fad735596f4f9a11aca7af6edef9000d26 100644 (file)
@@ -1,3 +1,42 @@
+2005-04-21  Nathan Sidwell  <nathan@codesourcery.com>
+
+       * cp-tree.h: Adjust for new VEC API.
+       Define VEC(tree_pair_s,gc).
+       (struct save_scope): Adjust.
+       (struct lang_type_class): Adjust.
+       (unemitted_tinfo_decls): Adjust.
+       * class.c (add_method, resort_type_method_vec,
+       finish_struct_methods, struct find_final_overrider_data,
+       dfs_find_final_overrider_pre, find_final_overrider,
+       get_vcall_index, warn_hidden, walk_subobject_offsets,
+       check_methods, fixup_inline_methods, end_of_class,
+       warn_about_ambiguous_bases, finish_struct, build_vtbl_initializer,
+       add_vcall_offset): Adjust.
+       * decl.c (xref_basetypes, finish_method): Adjust.
+       * decl2.c (check_classfn): Adjust.
+       * init.c (sort_mem_initializers, push_base_cleanups): Adjust.
+       * method.c (do_build_copy_constructor): Adjust.
+       * name-lookup.c (new_class_binding, store_binding, 
+       store_bindings, store_class_bindings): Adjust.
+       * name-lookup.h: Define VEC(cxx_saved_binding,gc),
+       VEC(cp_class_binding,gc).
+       (struct cp_binding_level): Adjust.
+       * parser.c: Define VEC(cp_token_position,heap).
+       (struct cp_lexer): Adjust.
+       (cp_lexer_new_main, cp_lexer_new_from_tokens, cp_lexer_destroy,
+       cp_lexer_save_tokens): Adjust.
+       * pt.c (retrieve_specialization,
+       check_explicit_specialization): Adjust.
+       * rtti.c (unemitted_tinfo_decls): Adjust.
+       (init_rtti_processing, get_tinfo_decl, get_pseudo_ti_init,
+       get_pseudo_ti_desc): Adjust.
+       * search.c (dfs_access_in_type, lookup_conversion_operator,
+       lookup_fnfields_1, dfs_walk_once, dfs_walk_once_accessible,
+       dfs_get_pure_virtuals, lookup_conversions_r, binfo_for_vbase): Adjust.
+       * semantics.c: Define VEC(deferred_access,gc).
+       (push_deferring_access_checks): Adjust.
+       * typeck2.c (abstract_virtuals_error): Adjust.
+
 2005-04-20  Ian Lance Taylor  <ian@airs.com>
 
        * cp-tree.def: Add STMT_EXPR.
index df0587e677af8acce6749213d2cd11ec98da0076..2f2bd231a5cc573a07877c7dd00686501854811c 100644 (file)
@@ -887,7 +887,7 @@ add_method (tree type, tree method)
   tree overload;
   bool template_conv_p = false;
   bool conv_p;
-  VEC(tree) *method_vec;
+  VEC(tree,gc) *method_vec;
   bool complete_p;
   bool insert_p = false;
   tree current_fns;
@@ -909,7 +909,7 @@ add_method (tree type, tree method)
         allocate at least two (for constructors and destructors), and
         we're going to end up with an assignment operator at some
         point as well.  */
-      method_vec = VEC_alloc (tree, 8);
+      method_vec = VEC_alloc (tree, gc, 8);
       /* Create slots for constructors and destructors.  */
       VEC_quick_push (tree, method_vec, NULL_TREE);
       VEC_quick_push (tree, method_vec, NULL_TREE);
@@ -1052,7 +1052,7 @@ add_method (tree type, tree method)
     {
       /* We only expect to add few methods in the COMPLETE_P case, so
         just make room for one more method in that case.  */
-      if (VEC_reserve (tree, method_vec, complete_p ? 1 : -1))
+      if (VEC_reserve (tree, gc, method_vec, complete_p ? -1 : 1))
        CLASSTYPE_METHOD_VEC (type) = method_vec;
       if (slot == VEC_length (tree, method_vec))
        VEC_quick_push (tree, method_vec, overload);
@@ -1663,7 +1663,7 @@ resort_type_method_vec (void* obj,
                         gt_pointer_operator new_value,
                         void* cookie)
 {
-  VEC(tree) *method_vec = (VEC(tree) *) obj;
+  VEC(tree,gc) *method_vec = (VEC(tree,gc) *) obj;
   int len = VEC_length (tree, method_vec);
   size_t slot;
   tree fn;
@@ -1695,7 +1695,7 @@ static void
 finish_struct_methods (tree t)
 {
   tree fn_fields;
-  VEC(tree) *method_vec;
+  VEC(tree,gc) *method_vec;
   int slot, len;
 
   method_vec = CLASSTYPE_METHOD_VEC (t);
@@ -1814,7 +1814,7 @@ typedef struct find_final_overrider_data_s {
   /* The candidate overriders.  */
   tree candidates;
   /* Path to most derived.  */
-  VEC (tree) *path;
+  VEC(tree,heap) *path;
 } find_final_overrider_data;
 
 /* Add the overrider along the current path to FFOD->CANDIDATES.
@@ -1873,7 +1873,7 @@ dfs_find_final_overrider_pre (tree binfo, void *data)
 
   if (binfo == ffod->declaring_base)
     dfs_find_final_overrider_1 (binfo, ffod, VEC_length (tree, ffod->path));
-  VEC_safe_push (tree, ffod->path, binfo);
+  VEC_safe_push (tree, heap, ffod->path, binfo);
 
   return NULL_TREE;
 }
@@ -1922,12 +1922,12 @@ find_final_overrider (tree derived, tree binfo, tree fn)
   ffod.fn = fn;
   ffod.declaring_base = binfo;
   ffod.candidates = NULL_TREE;
-  ffod.path = VEC_alloc (tree, 30);
+  ffod.path = VEC_alloc (tree, heap, 30);
 
   dfs_walk_all (derived, dfs_find_final_overrider_pre,
                dfs_find_final_overrider_post, &ffod);
 
-  VEC_free (tree, ffod.path);
+  VEC_free (tree, heap, ffod.path);
   
   /* If there was no winner, issue an error message.  */
   if (!ffod.candidates || TREE_CHAIN (ffod.candidates))
@@ -1946,7 +1946,7 @@ find_final_overrider (tree derived, tree binfo, tree fn)
 static tree
 get_vcall_index (tree fn, tree type)
 {
-  VEC (tree_pair_s) *indices = CLASSTYPE_VCALL_INDICES (type);
+  VEC(tree_pair_s,gc) *indices = CLASSTYPE_VCALL_INDICES (type);
   tree_pair_p p;
   unsigned ix;
 
@@ -2350,7 +2350,7 @@ check_for_override (tree decl, tree ctype)
 void
 warn_hidden (tree t)
 {
-  VEC(tree) *method_vec = CLASSTYPE_METHOD_VEC (t);
+  VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (t);
   tree fns;
   size_t i;
 
@@ -3200,7 +3200,7 @@ walk_subobject_offsets (tree type,
       if (abi_version_at_least (2) && CLASSTYPE_VBASECLASSES (type))
        {
          unsigned ix;
-         VEC (tree) *vbases;
+         VEC(tree,gc) *vbases;
 
          /* Iterate through the virtual base classes of TYPE.  In G++
             3.2, we included virtual bases in the direct base class
@@ -3678,7 +3678,7 @@ check_methods (tree t)
        {
          TYPE_POLYMORPHIC_P (t) = 1;
          if (DECL_PURE_VIRTUAL_P (x))
-           VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (t), x);
+           VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
        }
       /* All user-declared destructors are non-trivial.  */
       if (DECL_DESTRUCTOR_P (x))
@@ -4202,7 +4202,7 @@ static void
 fixup_inline_methods (tree type)
 {
   tree method = TYPE_METHODS (type);
-  VEC (tree) *friends;
+  VEC(tree,gc) *friends;
   unsigned ix;
 
   if (method && TREE_CODE (method) == TREE_VEC)
@@ -4370,7 +4370,7 @@ static tree
 end_of_class (tree t, int include_virtuals_p)
 {
   tree result = size_zero_node;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   tree binfo;
   tree base_binfo;
   tree offset;
@@ -4417,7 +4417,7 @@ static void
 warn_about_ambiguous_bases (tree t)
 {
   int i;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   tree basetype;
   tree binfo;
   tree base_binfo;
@@ -5129,7 +5129,7 @@ finish_struct (tree t, tree attributes)
       CLASSTYPE_PURE_VIRTUALS (t) = NULL;
       for (x = TYPE_METHODS (t); x; x = TREE_CHAIN (x))
        if (DECL_PURE_VIRTUAL_P (x))
-         VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (t), x);
+         VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
       complete_vars (t);
     }
   else
@@ -7130,7 +7130,7 @@ build_vtbl_initializer (tree binfo,
   vtbl_init_data vid;
   unsigned ix;
   tree vbinfo;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   
   /* Initialize VID.  */
   memset (&vid, 0, sizeof (vid));
@@ -7598,7 +7598,7 @@ add_vcall_offset (tree orig_fn, tree binfo, vtbl_init_data *vid)
      offset.  */
   if (vid->binfo == TYPE_BINFO (vid->derived))
     {
-      tree_pair_p elt = VEC_safe_push (tree_pair_s,
+      tree_pair_p elt = VEC_safe_push (tree_pair_s, gc,
                                       CLASSTYPE_VCALL_INDICES (vid->derived),
                                       NULL);
       elt->purpose = orig_fn;
index a27ee193747c0f8653f48075eba8aea69281ffdc..e7bf1106a83344069343b1a77b77f4f9de413126 100644 (file)
@@ -656,7 +656,7 @@ extern GTY(()) tree cp_global_trees[CPTI_MAX];
 
 struct saved_scope GTY(())
 {
-  VEC(cxx_saved_binding) *old_bindings;
+  VEC(cxx_saved_binding,gc) *old_bindings;
   tree old_namespace;
   tree decl_ns_list;
   tree class_name;
@@ -972,7 +972,8 @@ typedef struct tree_pair_s GTY (())
   tree value;
 } tree_pair_s;
 typedef tree_pair_s *tree_pair_p;
-DEF_VEC_GC_O (tree_pair_s);
+DEF_VEC_O (tree_pair_s);
+DEF_VEC_ALLOC_O (tree_pair_s,gc);
 
 /* This is a few header flags for 'struct lang_type'.  Actually,
    all but the first are used only for lang_type_class; they
@@ -1059,15 +1060,15 @@ struct lang_type_class GTY(())
   unsigned dummy : 12;
 
   tree primary_base;
-  VEC (tree_pair_s) *vcall_indices;
+  VEC(tree_pair_s,gc) *vcall_indices;
   tree vtables;
   tree typeinfo_var;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   binding_table nested_udts;
   tree as_base;
-  VEC (tree) *pure_virtuals;
+  VEC(tree,gc) *pure_virtuals;
   tree friend_classes;
-  VEC (tree) * GTY((reorder ("resort_type_method_vec"))) methods;
+  VEC(tree,gc) * GTY((reorder ("resort_type_method_vec"))) methods;
   tree key_method;
   tree decl_list;
   tree template_info;
@@ -4022,7 +4023,7 @@ extern void finish_repo (void);
 
 /* in rtti.c */
 /* A vector of all tinfo decls that haven't been emitted yet.  */
-extern GTY(()) VEC(tree) *unemitted_tinfo_decls;
+extern GTY(()) VEC(tree,gc) *unemitted_tinfo_decls;
 
 extern void init_rtti_processing (void);
 extern tree build_typeid (tree);
index 4fa6b0282f48eefe578f9540738b22970ac3c839..2a3c30cd71d5106c2158e44eba31ec35372b7b2c 100644 (file)
@@ -9340,7 +9340,7 @@ xref_basetypes (tree ref, tree base_list)
 
   if (max_bases)
     {
-      BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, max_bases);
+      BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, gc, max_bases);
       /* An aggregate cannot have baseclasses.  */
       CLASSTYPE_NON_AGGREGATE (ref) = 1;
 
@@ -9356,7 +9356,7 @@ xref_basetypes (tree ref, tree base_list)
 
   if (max_vbases)
     {
-      CLASSTYPE_VBASECLASSES (ref) = VEC_alloc (tree, max_vbases);
+      CLASSTYPE_VBASECLASSES (ref) = VEC_alloc (tree, gc, max_vbases);
 
       if (TYPE_FOR_JAVA (ref))
        error ("Java class %qT cannot have virtual bases", ref);
@@ -10866,7 +10866,7 @@ finish_method (tree decl)
      for String.cc in libg++.  */
   if (DECL_FRIEND_P (fndecl))
     {
-      VEC_safe_push (tree, CLASSTYPE_INLINE_FRIENDS (current_class_type),
+      VEC_safe_push (tree, gc, CLASSTYPE_INLINE_FRIENDS (current_class_type),
                     fndecl);
       decl = void_type_node;
     }
index 0aa81117a7fab9f0e7f986a57ab8f65e55ee99a8..6cc68e249fecc51102aa383c1741c549cbf05e7c 100644 (file)
@@ -622,7 +622,7 @@ check_classfn (tree ctype, tree function, tree template_parms)
   ix = class_method_index_for_fn (complete_type (ctype), function);
   if (ix >= 0)
     {
-      VEC(tree) *methods = CLASSTYPE_METHOD_VEC (ctype);
+      VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (ctype);
       tree fndecls, fndecl = 0;
       bool is_conv_op;
       tree pushed_scope;
index 726309acc6a092baee72b851e58b2e2d808f8791..0abf1b2587440f6c74d007020630d833495367c1 100644 (file)
@@ -465,7 +465,7 @@ sort_mem_initializers (tree t, tree mem_inits)
   tree base, binfo, base_binfo;
   tree sorted_inits;
   tree next_subobject;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   int i;
   int uses_unions_p;
 
@@ -2867,7 +2867,7 @@ push_base_cleanups (void)
   int i;
   tree member;
   tree expr;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
 
   /* Run destructors for all virtual baseclasses.  */
   if (CLASSTYPE_VBASECLASSES (current_class_type))
index 9036f64c80286a6375941c6086039d107c010b04..7e4e95663801feae3d9a3c1a296aa6e7744ca90f 100644 (file)
@@ -537,7 +537,7 @@ do_build_copy_constructor (tree fndecl)
       int cvquals = cp_type_quals (TREE_TYPE (parm));
       int i;
       tree binfo, base_binfo;
-      VEC (tree) *vbases;
+      VEC(tree,gc) *vbases;
 
       /* Initialize all the base-classes with the parameter converted
         to their type so that we get their copy constructor and not
index fda7d345b1f80bef050fadbefeb059d99ddb0620..d2cf1ce3ca022f937f7a752d1a16a10f6bc98104 100644 (file)
@@ -306,7 +306,7 @@ new_class_binding (tree name, tree value, tree type, cxx_scope *scope)
     {
       cp_class_binding *old_base;
       old_base = VEC_index (cp_class_binding, scope->class_shadowed, 0);
-      if (VEC_reserve (cp_class_binding, scope->class_shadowed, -1))
+      if (VEC_reserve (cp_class_binding, gc, scope->class_shadowed, 1))
        {
          /* Fixup the current bindings, as they might have moved.  */
          size_t i;
@@ -325,7 +325,7 @@ new_class_binding (tree name, tree value, tree type, cxx_scope *scope)
       cb = VEC_quick_push (cp_class_binding, scope->class_shadowed, NULL);
     }
   else
-    cb = VEC_safe_push (cp_class_binding, scope->class_shadowed, NULL);
+    cb = VEC_safe_push (cp_class_binding, gc, scope->class_shadowed, NULL);
   
   cb->identifier = name;
   binding = &cb->base;
@@ -4741,7 +4741,7 @@ struct saved_scope *scope_chain;
    *OLD_BINDINGS.  */
 
 static void
-store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
+store_binding (tree id, VEC(cxx_saved_binding,gc) **old_bindings)
 {
   cxx_saved_binding *saved;
 
@@ -4753,7 +4753,7 @@ store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
   
   IDENTIFIER_MARKED (id) = 1;
 
-  saved = VEC_safe_push (cxx_saved_binding, *old_bindings, NULL);
+  saved = VEC_safe_push (cxx_saved_binding, gc, *old_bindings, NULL);
   saved->identifier = id;
   saved->binding = IDENTIFIER_BINDING (id);
   saved->real_type_value = REAL_IDENTIFIER_TYPE_VALUE (id);
@@ -4761,7 +4761,7 @@ store_binding (tree id, VEC(cxx_saved_binding) **old_bindings)
 }
 
 static void
-store_bindings (tree names, VEC(cxx_saved_binding) **old_bindings)
+store_bindings (tree names, VEC(cxx_saved_binding,gc) **old_bindings)
 {
   tree t;
 
@@ -4784,8 +4784,8 @@ store_bindings (tree names, VEC(cxx_saved_binding) **old_bindings)
    objects, rather than a TREE_LIST.  */
 
 static void
-store_class_bindings (VEC(cp_class_binding) *names, 
-                     VEC(cxx_saved_binding) **old_bindings)
+store_class_bindings (VEC(cp_class_binding,gc) *names, 
+                     VEC(cxx_saved_binding,gc) **old_bindings)
 {
   size_t i;
   cp_class_binding *cb;
index 451e6ecffa3589a68cb54f942b2686a5766731b6..5aeb80b7338e583ee8d21bbfcfde12814702eb90 100644 (file)
@@ -89,7 +89,8 @@ typedef struct cxx_saved_binding GTY(())
   tree real_type_value;
 } cxx_saved_binding;
 
-DEF_VEC_GC_O(cxx_saved_binding);
+DEF_VEC_O(cxx_saved_binding);
+DEF_VEC_ALLOC_O(cxx_saved_binding,gc);
 
 extern tree identifier_type_value (tree);
 extern void set_identifier_type_value (tree, tree);
@@ -145,7 +146,8 @@ typedef struct cp_class_binding GTY(())
   tree identifier;
 } cp_class_binding;
 
-DEF_VEC_GC_O(cp_class_binding);
+DEF_VEC_O(cp_class_binding);
+DEF_VEC_ALLOC_O(cp_class_binding,gc);
 
 /* For each binding contour we allocate a binding_level structure
    which records the names defined in that contour.
@@ -200,7 +202,7 @@ struct cp_binding_level GTY(())
 
     /* For the binding level corresponding to a class, the entities
        declared in the class or its base classes.  */
-    VEC(cp_class_binding) *class_shadowed;
+    VEC(cp_class_binding,gc) *class_shadowed;
 
     /* Similar to class_shadowed, but for IDENTIFIER_TYPE_VALUE, and
        is used for all binding levels. The TREE_PURPOSE is the name of
index 8a1137290e7dbd7f2db3a85dcd289c65a8046eea..cb09fa1995f8157e6605f2ec4d1a171a182abb2a 100644 (file)
@@ -66,7 +66,8 @@ typedef struct cp_token GTY (())
 
 /* We use a stack of token pointer for saving token sets.  */
 typedef struct cp_token *cp_token_position;
-DEF_VEC_MALLOC_P (cp_token_position);
+DEF_VEC_P (cp_token_position);
+DEF_VEC_ALLOC_P (cp_token_position,heap);
 
 static const cp_token eof_token =
 {
@@ -104,7 +105,7 @@ typedef struct cp_lexer GTY (())
      called.  The top entry is the most recent position at which we
      began saving tokens.  If the stack is non-empty, we are saving
      tokens.  */
-  VEC (cp_token_position) *GTY ((skip)) saved_tokens;
+  VEC(cp_token_position,heap) *GTY ((skip)) saved_tokens;
 
   /* True if we should output debugging information.  */
   bool debugging_p;
@@ -260,7 +261,8 @@ cp_lexer_new_main (void)
   /* Initially we are not debugging.  */
   lexer->debugging_p = false;
 #endif /* ENABLE_CHECKING */
-  lexer->saved_tokens = VEC_alloc (cp_token_position, CP_SAVED_TOKEN_STACK);
+  lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
+                                  CP_SAVED_TOKEN_STACK);
         
   /* Create the buffer.  */
   alloc = CP_LEXER_BUFFER_SIZE;
@@ -314,7 +316,8 @@ cp_lexer_new_from_tokens (cp_token_cache *cache)
   lexer->next_token = first == last ? (cp_token *)&eof_token : first;
   lexer->last_token = last;
   
-  lexer->saved_tokens = VEC_alloc (cp_token_position, CP_SAVED_TOKEN_STACK);
+  lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
+                                  CP_SAVED_TOKEN_STACK);
 
 #ifdef ENABLE_CHECKING
   /* Initially we are not debugging.  */
@@ -332,7 +335,7 @@ cp_lexer_destroy (cp_lexer *lexer)
 {
   if (lexer->buffer)
     ggc_free (lexer->buffer);
-  VEC_free (cp_token_position, lexer->saved_tokens);
+  VEC_free (cp_token_position, heap, lexer->saved_tokens);
   ggc_free (lexer);
 }
 
@@ -621,7 +624,8 @@ cp_lexer_save_tokens (cp_lexer* lexer)
   if (cp_lexer_debugging_p (lexer))
     fprintf (cp_lexer_debug_stream, "cp_lexer: saving tokens\n");
 
-  VEC_safe_push (cp_token_position, lexer->saved_tokens, lexer->next_token);
+  VEC_safe_push (cp_token_position, heap,
+                lexer->saved_tokens, lexer->next_token);
 }
 
 /* Commit to the portion of the token stream most recently saved.  */
index a832846050ded67f1133481497972c6f28c67bb0..aaecabcbc456d322bd953b9307de67b048eeac5f 100644 (file)
@@ -822,7 +822,7 @@ retrieve_specialization (tree tmpl, tree args,
     {
       tree class_template;
       tree class_specialization;
-      VEC(tree) *methods;
+      VEC(tree,gc) *methods;
       tree fns;
       int idx;
 
@@ -1967,7 +1967,7 @@ check_explicit_specialization (tree declarator,
            }
          else
            {
-             VEC(tree) *methods;
+             VEC(tree,gc) *methods;
              tree ovl;
 
              /* For a type-conversion operator, we cannot do a
index 45f0b087a87b60a688c392ee69ac357df32d2238..9368bfee998ac6fd351cb875b5820662089ea981 100644 (file)
@@ -75,7 +75,7 @@ Boston, MA 02111-1307, USA.  */
 #define TINFO_REAL_NAME(NODE) TREE_PURPOSE (NODE)
 
 /* A vector of all tinfo decls that haven't yet been emitted.  */
-VEC (tree) *unemitted_tinfo_decls;
+VEC(tree,gc) *unemitted_tinfo_decls;
 
 static tree build_headof (tree);
 static tree ifnonnull (tree, tree);
@@ -119,7 +119,7 @@ init_rtti_processing (void)
     = build_qualified_type (type_info_type, TYPE_QUAL_CONST);
   type_info_ptr_type = build_pointer_type (const_type_info_type_node);
 
-  unemitted_tinfo_decls = VEC_alloc (tree, 124);
+  unemitted_tinfo_decls = VEC_alloc (tree, gc, 124);
   
   create_tinfo_types ();
 }
@@ -364,7 +364,7 @@ get_tinfo_decl (tree type)
       pushdecl_top_level_and_finish (d, NULL_TREE);
 
       /* Add decl to the global array of tinfo decls.  */
-      VEC_safe_push (tree, unemitted_tinfo_decls, d);
+      VEC_safe_push (tree, gc, unemitted_tinfo_decls, d);
     }
 
   return d;
@@ -1003,7 +1003,7 @@ get_pseudo_ti_init (tree type, tree var_desc)
                      | (CLASSTYPE_DIAMOND_SHAPED_P (type) << 1));
          tree binfo = TYPE_BINFO (type);
           int nbases = BINFO_N_BASE_BINFOS (binfo);
-         VEC (tree) *base_accesses = BINFO_BASE_ACCESSES (binfo);
+         VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
           tree base_inits = NULL_TREE;
           int ix;
           
@@ -1152,7 +1152,7 @@ get_pseudo_ti_desc (tree type)
       else
        {
          tree binfo = TYPE_BINFO (type);
-         VEC (tree) *base_accesses = BINFO_BASE_ACCESSES (binfo);
+         VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
          tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
          int num_bases = BINFO_N_BASE_BINFOS (binfo);
          
index f6a9b577dfc0c6387a4b56b7c742702d11b64a44..45668a14a94a99acc947fb6b8ed6b8fb885eaa9f 100644 (file)
@@ -641,7 +641,7 @@ dfs_access_in_type (tree binfo, void *data)
        {
          int i;
          tree base_binfo;
-         VEC (tree) *accesses;
+         VEC(tree,gc) *accesses;
          
          /* Otherwise, scan our baseclasses, and pick the most favorable
             access.  */
@@ -1314,7 +1314,7 @@ lookup_conversion_operator (tree class_type, tree type)
     {
       int i;
       tree fn;
-      VEC(tree) *methods = CLASSTYPE_METHOD_VEC (class_type);
+      VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (class_type);
       
       for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
           VEC_iterate (tree, methods, i, fn); ++i)
@@ -1345,7 +1345,7 @@ lookup_conversion_operator (tree class_type, tree type)
 int
 lookup_fnfields_1 (tree type, tree name)
 {
-  VEC(tree) *method_vec;
+  VEC(tree,gc) *method_vec;
   tree fn;
   tree tmp;
   size_t i;
@@ -1658,7 +1658,7 @@ dfs_walk_once (tree binfo, tree (*pre_fn) (tree, void *),
          /* We are at the top of the hierarchy, and can use the
              CLASSTYPE_VBASECLASSES list for unmarking the virtual
              bases.  */
-         VEC (tree) *vbases;
+         VEC(tree,gc) *vbases;
          unsigned ix;
          tree base_binfo;
          
@@ -1766,7 +1766,7 @@ dfs_walk_once_accessible (tree binfo, bool friends_p,
          /* We are at the top of the hierarchy, and can use the
              CLASSTYPE_VBASECLASSES list for unmarking the virtual
              bases.  */
-         VEC (tree) *vbases;
+         VEC(tree,gc) *vbases;
          unsigned ix;
          tree base_binfo;
          
@@ -2000,7 +2000,7 @@ dfs_get_pure_virtuals (tree binfo, void *data)
           virtuals;
           virtuals = TREE_CHAIN (virtuals))
        if (DECL_PURE_VIRTUAL_P (BV_FN (virtuals)))
-         VEC_safe_push (tree, CLASSTYPE_PURE_VIRTUALS (type),
+         VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (type),
                         BV_FN (virtuals));
     }
 
@@ -2270,7 +2270,7 @@ lookup_conversions_r (tree binfo,
   tree child_tpl_convs = NULL_TREE;
   unsigned i;
   tree base_binfo;
-  VEC(tree) *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
+  VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
   tree conv;
 
   /* If we have no conversion operators, then don't look.  */
@@ -2523,7 +2523,7 @@ binfo_for_vbase (tree base, tree t)
 {
   unsigned ix;
   tree binfo;
-  VEC (tree) *vbases;
+  VEC(tree,gc) *vbases;
   
   for (vbases = CLASSTYPE_VBASECLASSES (t), ix = 0;
        VEC_iterate (tree, vbases, ix, binfo); ix++)
index ac678d5cdd5f116ad50ea1e32b5b7de7032f69fa..20e94223b364bbc1e282c4b334b0559ac6b8b02d 100644 (file)
@@ -139,10 +139,11 @@ typedef struct deferred_access GTY(())
   enum deferring_kind deferring_access_checks_kind;
   
 } deferred_access;
-DEF_VEC_GC_O (deferred_access);
+DEF_VEC_O (deferred_access);
+DEF_VEC_ALLOC_O (deferred_access,gc);
 
 /* Data for deferred access checking.  */
-static GTY(()) VEC (deferred_access) *deferred_access_stack;
+static GTY(()) VEC(deferred_access,gc) *deferred_access_stack;
 static GTY(()) unsigned deferred_access_no_check;
 
 /* Save the current deferred access states and start deferred
@@ -159,7 +160,7 @@ push_deferring_access_checks (deferring_kind deferring)
     {
       deferred_access *ptr;
 
-      ptr = VEC_safe_push (deferred_access, deferred_access_stack, NULL);
+      ptr = VEC_safe_push (deferred_access, gc, deferred_access_stack, NULL);
       ptr->deferred_access_checks = NULL_TREE;
       ptr->deferring_access_checks_kind = deferring;
     }
index daa23993425d5259fddf73eaadcd0db2206a46f5..81e34d76dbeddc7b86a6e2f787b8f03273e66594 100644 (file)
@@ -235,7 +235,7 @@ complete_type_check_abstract (tree type)
 int
 abstract_virtuals_error (tree decl, tree type)
 {
-  VEC (tree) *pure;
+  VEC(tree,gc) *pure;
   
   /* This function applies only to classes. Any other entity can never
      be abstract.  */
index f2f5eb8d9e9f2223c89276c5f0c8224ee99c6734..4cbe16b603faf7c43dae548224c1e3dbe7b1a2d6 100644 (file)
@@ -2063,7 +2063,7 @@ dbxout_type (tree type, int full)
          {
            int i;
            tree child;
-           VEC (tree) *accesses = BINFO_BASE_ACCESSES (binfo);
+           VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
            
            if (use_gnu_debug_info_extensions)
              {
index c32c545477f423ac028d88a60cb2015df9ab5b87..1c5ce45cabb07a76f7b2fc1d3ba2eb8e5f52e0cb 100644 (file)
@@ -12004,7 +12004,7 @@ gen_member_die (tree type, dw_die_ref context_die)
   /* First output info about the base classes.  */
   if (binfo)
     {
-      VEC (tree) *accesses = BINFO_BASE_ACCESSES (binfo);
+      VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
       int i;
       tree base;
 
index 096af89a8e3167bfb50e3b551ecf027f5753ed87..f844a5eef1008fb4102f594b9c068cfcd2aa3c29 100644 (file)
@@ -33,8 +33,10 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
 #define YY_INPUT(BUF,RESULT,SIZE) ((RESULT) = macro_input (BUF,SIZE))
 
 static unsigned macro_input (char *buffer, unsigned);
-static void push_macro_expansion (const char *, unsigned,
-                                 const char *, unsigned);
+static const char *push_macro_expansion (const char *, unsigned,
+                                        const char *, unsigned);
+static char *mangle_macro_name (const char *, unsigned,
+                                       const char *, unsigned);
 static void update_lineno (const char *l, size_t len);
 
 struct fileloc lexer_line;
@@ -237,33 +239,40 @@ ITYPE     {IWORD}({WS}{IWORD})*
   return ENT_YACCUNION;
 }
 
-^"DEF_VEC_"[[:alnum:]_]*{WS}?"("{WS}?{ID}{WS}?")" {
+^"DEF_VEC_"[[:alnum:]_]*{WS}?"("{WS}?{ID}{WS}?(","{WS}?{ID}{WS}?)*")" {
   char *macro, *arg;
   unsigned macro_len, arg_len;
   char *ptr = yytext;
+  const char *additional;
   type_p t;
 
-  /* Locate the macro and argument strings.  */
-  macro = ptr;
-  while (*ptr != '(' && !ISSPACE (*ptr))
-    ptr++;
-  macro_len = ptr - macro;
-  while (*ptr == '(' || ISSPACE (*ptr))
-    ptr++;
-  arg = ptr;
-  while (*ptr != ')' && !ISSPACE (*ptr))
-    ptr++;
-  arg_len = ptr - arg;
+  /* Find the macro name.  */
+  for (macro = ptr; *ptr != '(' && !ISSPACE (*ptr); ptr++)
+    continue;
+  for (macro_len = ptr - macro; !(ISALNUM (*ptr) || *ptr == '_'); ptr++)
+    continue;
 
-  /* Push the macro for later expansion.  */
-  push_macro_expansion (macro, macro_len, arg, arg_len);
+  /* Find the argument(s).  */
+  for (arg = ptr; *ptr != ')'; ptr++)
+    continue;
+  arg_len = ptr - arg;
 
   /* Create the struct and typedef.  */
-  ptr = (char *) xmemdup ("VEC_", 4, 4 + arg_len + 1);
-  memcpy (&ptr[4], arg, arg_len);
-  ptr[4 + arg_len] = 0;
+  ptr = mangle_macro_name ("VEC", 3, arg, arg_len);
+
   t = find_structure (ptr, 0);
   do_typedef (ptr, t, &lexer_line);
+
+  /* Push the macro for later expansion.  */
+  additional = push_macro_expansion (macro, macro_len, arg, arg_len);
+
+  if (additional)
+    {
+      ptr = mangle_macro_name (ptr, strlen (ptr),
+                              additional, strlen (additional));
+      t = find_structure (ptr, 0);
+      do_typedef (ptr, t, &lexer_line);
+    }
 }
 
 <in_struct>{
@@ -299,24 +308,23 @@ ITYPE     {IWORD}({WS}{IWORD})*
   return SCALAR;
 }
 
-"VEC"{WS}?"("{WS}?{ID}{WS}?")" {
+"VEC"{WS}?"("{WS}?{ID}{WS}?(","{WS}?{ID}{WS}?)*")" {
   char *macro, *arg;
   unsigned macro_len, arg_len;
   char *ptr = yytext;
 
-  macro = ptr;
-  while (*ptr != '(' && !ISSPACE (*ptr)) /* )*/
-    ptr++;
-  macro_len = ptr - macro;
-  while (*ptr == '(' || ISSPACE (*ptr))
-    ptr++;
-  arg = ptr;
-  while (*ptr != ')' && !ISSPACE (*ptr))
-    ptr++;
+  /* Find the macro name */
+  for (macro = ptr; *ptr != '(' && !ISSPACE (*ptr); ptr++)
+    continue;
+  for (macro_len = ptr - macro; !(ISALNUM(*ptr) || *ptr == '_'); ptr++)
+    continue;
+
+  /* Find the arguments.  */
+  for (arg = ptr; *ptr != ')'; ptr++)
+    continue;
   arg_len = ptr - arg;
-  ptr = (char *) xmemdup (macro, macro_len, macro_len + arg_len + 2);
-  ptr[macro_len] = '_';
-  memcpy (&ptr[macro_len+1], arg, arg_len);
+
+  ptr = mangle_macro_name (macro, macro_len, arg, arg_len);
   yylval.s = ptr;
   return ID;
 }
@@ -411,14 +419,43 @@ ITYPE     {IWORD}({WS}{IWORD})*
 
 /* Deal with the expansion caused by the DEF_VEC_x macros.  */
 
-typedef struct macro
+/* Mangle a macro and argument list as done by cpp concatenation in
+   the compiler proper.  */
+static char *
+mangle_macro_name (const char *macro, unsigned macro_len,
+                  const char *arg, unsigned arg_len)
+{
+  char *ptr = (char *) xmemdup (macro, macro_len, macro_len + arg_len + 2);
+
+  /* Now copy and concatenate each argument */
+  while (arg_len)
+    {
+      ptr[macro_len++] = '_';
+      for (; arg_len && (ISALNUM(*arg) || *arg == '_'); arg_len--)
+        ptr[macro_len++] = *arg++;
+      for (; arg_len && !(ISALNUM(*arg) || *arg == '_'); arg_len--)
+        arg++;
+    }
+  ptr[macro_len] = 0;
+
+  return ptr;
+}
+
+typedef struct macro_def
 {
   const char *name;
   const char *expansion;
+  const char *additional;
+} macro_def_t;
+
+typedef struct macro
+{
+  const macro_def_t *def;
   struct macro *next;
+  const char *args[10];
 } macro_t;
 
-static const macro_t macro_defs[] = 
+static const macro_def_t macro_defs[] = 
 {
 #define IN_GENGTYPE 1
 #include "vec.h"
@@ -427,11 +464,12 @@ static const macro_t macro_defs[] =
 
 /* Chain of macro expansions to do at end of scanning.  */
 static macro_t *macro_expns;
+static macro_t *macro_expns_end;
 
 /* Push macro NAME (NAME_LEN) with argument ARG (ARG_LEN) onto the
    expansion queue.  We ensure NAME is known at this point.  */
 
-static void
+static const char *
 push_macro_expansion (const char *name, unsigned name_len,
                      const char *arg, unsigned arg_len)
 {
@@ -442,15 +480,51 @@ push_macro_expansion (const char *name, unsigned name_len,
         && !memcmp (name, macro_defs[ix].name, name_len))
       {
         macro_t *expansion = XNEW (macro_t);
-
-       expansion->next = macro_expns;
-       expansion->name = (char *) xmemdup (arg, arg_len, arg_len+1);
-       expansion->expansion = macro_defs[ix].expansion;
-       macro_expns = expansion;
-       return;
+        char *args;
+       unsigned argno, last_arg;
+
+       expansion->def = &macro_defs[ix];
+       expansion->next = NULL;
+       args = (char *) xmemdup (arg, arg_len, arg_len+1);
+       args[arg_len] = 0;
+        for (argno = 0; *args;)
+         {
+           expansion->args[argno++] = args;
+           while (*args && (ISALNUM (*args) || *args == '_'))
+             args++;
+           if (argno == 1)
+             expansion->args[argno++] = "base";
+           if (!*args)
+             break;
+           *args++ = 0;
+           while (*args && !(ISALNUM (*args) || *args == '_'))
+             args++;
+          }
+       last_arg = argno;
+        for (; argno != 10; argno++)
+         expansion->args[argno] = NULL;
+       if (macro_expns_end)
+          macro_expns_end->next = expansion;
+       else
+         macro_expns = expansion;
+       macro_expns_end = expansion;
+       if (macro_defs[ix].additional)
+         {
+           macro_t *expn2 = XNEW (macro_t);
+            memcpy (expn2, expansion, sizeof (*expn2));
+           expansion = expn2;
+           expansion->def += 1;
+           expansion->args[last_arg++] = macro_defs[ix].additional;
+           macro_expns_end->next = expansion;
+           macro_expns_end = expansion;
+         }
+        if (last_arg > 2 && strcmp (expansion->args[last_arg - 1], "heap"))
+         expansion->args[last_arg++] = "GTY (())";
+       return macro_defs[ix].additional;
       }
   error_at_line (&lexer_line, "unrecognized macro `%.*s(%.*s)'",
                 name_len, name, arg_len, arg);
+  return NULL;
 }
 
 /* Attempt to read some input.  Use fread until we're at the end of
@@ -472,15 +546,30 @@ macro_input (char *buffer, unsigned size)
       const char *expn;
       unsigned len;
 
-      for (expn = macro_expns->expansion; *expn; expn++)
+      for (expn = macro_expns->def->expansion; *expn; expn++)
         {
          if (*expn == '#')
            {
+             int argno;
+
+             argno = expn[1] - '0';
+             expn += 1;
+
+             /* Remove inserted space? */
              if (buffer[result-1] == ' ' && buffer[result-2] == '_')
                result--;
-             len = strlen (macro_expns->name);
-             memcpy (&buffer[result], macro_expns->name, len);
-             result += len;
+
+             /* Insert the argument value */
+             if (macro_expns->args[argno])
+               {
+                 len = strlen (macro_expns->args[argno]);
+                 memcpy (&buffer[result], macro_expns->args[argno], len);
+                 result += len;
+               }
+
+             /* Skip next space? */
+             if (expn[1] == ' ' && expn[2] == '_')
+               expn++;
            }
          else
            {
@@ -492,6 +581,8 @@ macro_input (char *buffer, unsigned size)
       if (result > size)
         YY_FATAL_ERROR ("buffer too small to expand macro");
       macro_expns = macro_expns->next;
+      if (!macro_expns)
+        macro_expns_end = NULL;
     }
   return result;
 }
index 50815f981b1a12284ec13139b7d9874ecb5dc2ab..da24e95255e80d768d39abfdd96d92cf814c2a8a 100644 (file)
  Fourier-Motzkin elimination is used to compute the bounds of the base space
  of the lattice.  */
 
+/* FIXME: I'm sure the vectors used here could be heap allocated.
+   There certainly should be explicit VEC_frees, either way.  (nathan
+   2005/04/14) */
 
-DEF_VEC_GC_P(int);
+DEF_VEC_P(int);
+DEF_VEC_ALLOC_P(int,gc);
 
 static bool perfect_nestify (struct loops *, 
-                            struct loop *, VEC (tree) *, 
-                            VEC (tree) *, VEC (int) *, VEC (tree) *);
+                            struct loop *, VEC(tree,gc) *, 
+                            VEC(tree,gc) *, VEC(int,gc) *, VEC(tree,gc) *);
 /* Lattice stuff that is internal to the code generation algorithm.  */
 
 typedef struct
@@ -1152,8 +1156,8 @@ lambda_loopnest_transform (lambda_loopnest nest, lambda_trans_matrix trans)
 
 static lambda_linear_expression
 gcc_tree_to_linear_expression (int depth, tree expr,
-                              VEC(tree) *outerinductionvars,
-                              VEC(tree) *invariants, int extra)
+                              VEC(tree,gc) *outerinductionvars,
+                              VEC(tree,gc) *invariants, int extra)
 {
   lambda_linear_expression lle = NULL;
   switch (TREE_CODE (expr))
@@ -1248,12 +1252,12 @@ invariant_in_loop_and_outer_loops (struct loop *loop, tree op)
 
 static lambda_loop
 gcc_loop_to_lambda_loop (struct loop *loop, int depth,
-                        VEC (tree) ** invariants,
+                        VEC(tree,gc) ** invariants,
                         tree * ourinductionvar,
-                        VEC (tree) * outerinductionvars,
-                        VEC (tree) ** lboundvars,
-                        VEC (tree) ** uboundvars,
-                        VEC (int) ** steps)
+                        VEC(tree,gc) * outerinductionvars,
+                        VEC(tree,gc) ** lboundvars,
+                        VEC(tree,gc) ** uboundvars,
+                        VEC(int,gc) ** steps)
 {
   tree phi;
   tree exit_cond;
@@ -1403,10 +1407,10 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
   /* One part of the test may be a loop invariant tree.  */
   if (TREE_CODE (TREE_OPERAND (test, 1)) == SSA_NAME
       && invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 1)))
-    VEC_safe_push (tree, *invariants, TREE_OPERAND (test, 1));
+    VEC_safe_push (tree, gc, *invariants, TREE_OPERAND (test, 1));
   else if (TREE_CODE (TREE_OPERAND (test, 0)) == SSA_NAME
           && invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 0)))
-    VEC_safe_push (tree, *invariants, TREE_OPERAND (test, 0));
+    VEC_safe_push (tree, gc, *invariants, TREE_OPERAND (test, 0));
   
   /* The non-induction variable part of the test is the upper bound variable.
    */
@@ -1438,9 +1442,9 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
                                          *invariants, extra);
   uboundresult = build (PLUS_EXPR, TREE_TYPE (uboundvar), uboundvar,
                        build_int_cst (TREE_TYPE (uboundvar), extra));
-  VEC_safe_push (tree, *uboundvars, uboundresult);
-  VEC_safe_push (tree, *lboundvars, lboundvar);
-  VEC_safe_push (int, *steps, stepint);
+  VEC_safe_push (tree, gc, *uboundvars, uboundresult);
+  VEC_safe_push (tree, gc, *lboundvars, lboundvar);
+  VEC_safe_push (int, gc, *steps, stepint);
   if (!ubound)
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1488,7 +1492,9 @@ find_induction_var_from_exit_cond (struct loop *loop)
   return ivarop;
 }
 
-DEF_VEC_GC_P(lambda_loop);
+DEF_VEC_P(lambda_loop);
+DEF_VEC_ALLOC_P(lambda_loop,gc);
+
 /* Generate a lambda loopnest from a gcc loopnest LOOP_NEST.
    Return the new loop nest.  
    INDUCTIONVARS is a pointer to an array of induction variables for the
@@ -1499,18 +1505,18 @@ DEF_VEC_GC_P(lambda_loop);
 lambda_loopnest
 gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
                                 struct loop * loop_nest,
-                                VEC (tree) **inductionvars,
-                                VEC (tree) **invariants,
+                                VEC(tree,gc) **inductionvars,
+                                VEC(tree,gc) **invariants,
                                 bool need_perfect_nest)
 {
   lambda_loopnest ret;
   struct loop *temp;
   int depth = 0;
   size_t i;
-  VEC (lambda_loop) *loops = NULL;
-  VEC (tree) *uboundvars = NULL;
-  VEC (tree) *lboundvars  = NULL;
-  VEC (int) *steps = NULL;
+  VEC(lambda_loop,gc) *loops = NULL;
+  VEC(tree,gc) *uboundvars = NULL;
+  VEC(tree,gc) *lboundvars  = NULL;
+  VEC(int,gc) *steps = NULL;
   lambda_loop newloop;
   tree inductionvar = NULL;
   
@@ -1524,8 +1530,8 @@ gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
                                         &steps);
       if (!newloop)
        return NULL;
-      VEC_safe_push (tree, *inductionvars, inductionvar);
-      VEC_safe_push (lambda_loop, loops, newloop);
+      VEC_safe_push (tree, gc, *inductionvars, inductionvar);
+      VEC_safe_push (lambda_loop, gc, loops, newloop);
       temp = temp->inner;
     }
   if (need_perfect_nest)
@@ -1559,7 +1565,7 @@ gcc_loopnest_to_lambda_loopnest (struct loops *currloops,
 
 static tree
 lbv_to_gcc_expression (lambda_body_vector lbv, 
-                      tree type, VEC (tree) *induction_vars, 
+                      tree type, VEC(tree,gc) *induction_vars, 
                       tree * stmts_to_insert)
 {
   tree stmts, stmt, resvar, name;
@@ -1642,15 +1648,15 @@ static tree
 lle_to_gcc_expression (lambda_linear_expression lle,
                       lambda_linear_expression offset,
                       tree type,
-                      VEC(tree) *induction_vars,
-                      VEC(tree) *invariants,
+                      VEC(tree,gc) *induction_vars,
+                      VEC(tree,gc) *invariants,
                       enum tree_code wrap, tree * stmts_to_insert)
 {
   tree stmts, stmt, resvar, name;
   size_t i;
   tree_stmt_iterator tsi;
   tree iv, invar;
-  VEC(tree) *results = NULL;
+  VEC(tree,gc) *results = NULL;
 
   name = NULL_TREE;
   /* Create a statement list and a linear expression temporary.  */
@@ -1801,7 +1807,7 @@ lle_to_gcc_expression (lambda_linear_expression lle,
          tsi = tsi_last (stmts);
          tsi_link_after (&tsi, stmt, TSI_CONTINUE_LINKING);
        }
-      VEC_safe_push (tree, results, name);
+      VEC_safe_push (tree, gc, results, name);
     }
 
   /* Again, out of laziness, we don't handle this case yet.  It's not
@@ -1839,8 +1845,8 @@ lle_to_gcc_expression (lambda_linear_expression lle,
 
 void
 lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
-                                VEC(tree) *old_ivs,
-                                VEC(tree) *invariants,
+                                VEC(tree,gc) *old_ivs,
+                                VEC(tree,gc) *invariants,
                                 lambda_loopnest new_loopnest,
                                 lambda_trans_matrix transform)
 {
@@ -1848,7 +1854,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
   struct loop *temp;
   size_t i = 0;
   size_t depth = 0;
-  VEC(tree) *new_ivs = NULL;
+  VEC(tree,gc) *new_ivs = NULL;
   tree oldiv;
   
   block_stmt_iterator bsi;
@@ -1883,7 +1889,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
       ivvar = create_tmp_var (type, "lnivtmp");
       add_referenced_tmp_var (ivvar);
 
-      VEC_safe_push (tree, new_ivs, ivvar);
+      VEC_safe_push (tree, gc, new_ivs, ivvar);
 
       newloop = LN_LOOPS (new_loopnest)[i];
 
@@ -2207,7 +2213,7 @@ stmt_uses_op (tree stmt, tree op)
 
 static bool
 can_convert_to_perfect_nest (struct loop *loop,
-                            VEC (tree) *loopivs)
+                            VEC(tree,gc) *loopivs)
 {
   basic_block *bbs;
   tree exit_condition, phi;
@@ -2307,10 +2313,10 @@ can_convert_to_perfect_nest (struct loop *loop,
 static bool
 perfect_nestify (struct loops *loops,
                 struct loop *loop,
-                VEC (tree) *lbounds,
-                VEC (tree) *ubounds,
-                VEC (int) *steps,
-                VEC (tree) *loopivs)
+                VEC(tree,gc) *lbounds,
+                VEC(tree,gc) *ubounds,
+                VEC(int,gc) *steps,
+                VEC(tree,gc) *loopivs)
 {
   basic_block *bbs;
   tree exit_condition;
@@ -2325,7 +2331,7 @@ perfect_nestify (struct loops *loops,
   tree uboundvar;
   tree stmt;
   tree oldivvar, ivvar, ivvarinced;
-  VEC (tree) *phis = NULL;
+  VEC(tree,gc) *phis = NULL;
 
   if (!can_convert_to_perfect_nest (loop, loopivs))
     return false;
@@ -2339,8 +2345,9 @@ perfect_nestify (struct loops *loops,
   /* Push the exit phi nodes that we are moving.  */
   for (phi = phi_nodes (olddest); phi; phi = PHI_CHAIN (phi))
     {
-      VEC_safe_push (tree, phis, PHI_RESULT (phi));
-      VEC_safe_push (tree, phis, PHI_ARG_DEF (phi, 0));
+      VEC_reserve (tree, gc, phis, 2);
+      VEC_quick_push (tree, phis, PHI_RESULT (phi));
+      VEC_quick_push (tree, phis, PHI_ARG_DEF (phi, 0));
     }
   e = redirect_edge_and_branch (single_succ_edge (preheaderbb), headerbb);
 
index 98fe6bd1ca67698edce84101f1eb71ca1a04f36b..6e145ad4ce7cc86ad4711aabb22fdeb1757e66a4 100644 (file)
@@ -196,11 +196,11 @@ lambda_body_vector lambda_body_vector_compute_new (lambda_trans_matrix,
 void print_lambda_body_vector (FILE *, lambda_body_vector);
 lambda_loopnest gcc_loopnest_to_lambda_loopnest (struct loops *,
                                                 struct loop *,
-                                                VEC(tree) **,
-                                                VEC(tree) **,
+                                                VEC(tree,gc) **,
+                                                VEC(tree,gc) **,
                                                 bool);
-void lambda_loopnest_to_gcc_loopnest (struct loop *, VEC(tree) *,
-                                     VEC(tree) *,
+void lambda_loopnest_to_gcc_loopnest (struct loop *, VEC(tree,gc) *,
+                                     VEC(tree,gc) *,
                                      lambda_loopnest, 
                                      lambda_trans_matrix);
 
index 6065558abe9e43f81de6c549ae7183f5c04faed5..4ab2553188b45a4dc2a722e6b33cae873e2e719f 100644 (file)
@@ -223,7 +223,7 @@ instrument_values (histogram_values values)
          gcc_unreachable ();
        }
     }
-  VEC_free (histogram_value, values);
+  VEC_free (histogram_value, heap, values);
 }
 \f
 
index 28af511d175f6c7588a791705e29d5b4c1babb2b..17680cfc2f3a9a69e473ac5db9fcc563b25b276c 100644 (file)
@@ -110,7 +110,7 @@ static void make_goto_expr_edges (basic_block);
 static edge tree_redirect_edge_and_branch (edge, basic_block);
 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
 static void split_critical_edges (void);
-static bool remove_fallthru_edge (VEC(edge) *);
+static bool remove_fallthru_edge (VEC(edge,gc) *);
 
 /* Various helpers.  */
 static inline bool stmt_starts_bb_p (tree, tree);
@@ -2015,7 +2015,7 @@ remove_bb (basic_block bb)
    happens, all the instructions after the call are no longer
    reachable and must be deleted as dead.  */
 
-VEC(tree) *modified_noreturn_calls;
+VEC(tree,gc) *modified_noreturn_calls;
 
 /* Try to remove superfluous control structures.  */
 
@@ -2172,7 +2172,7 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
 /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
 
 static bool
-remove_fallthru_edge (VEC(edge) *ev)
+remove_fallthru_edge (VEC(edge,gc) *ev)
 {
   edge_iterator ei;
   edge e;
index d636c0c45cc607e374a0b64828e0145b9b74acb5..d81c4b8d64c288bd670e22b82acee2327c209574 100644 (file)
@@ -254,7 +254,7 @@ dequeue_and_dump (dump_info_p di)
     {
       unsigned ix;
       tree base;
-      VEC (tree) *accesses = BINFO_BASE_ACCESSES (t);
+      VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (t);
 
       dump_child ("type", BINFO_TYPE (t));
 
index f41448bc0e94e5b6da1635e784d8f8597cf9e072..0199206f02c20b935a3348a6705368ef51513d6b 100644 (file)
@@ -151,7 +151,7 @@ mark_stmt_modified (tree t)
   if (ann == NULL)
     ann = create_stmt_ann (t);
   else if (noreturn_call_p (t))
-    VEC_safe_push (tree, modified_noreturn_calls, t);
+    VEC_safe_push (tree, gc, modified_noreturn_calls, t);
   ann->modified = 1;
 }
 
index 8cdadd11947616ad04f4ff28e3fc1dc12e763f0c..992f08a71c4edda585ed0612ae77347fac446e20 100644 (file)
@@ -340,7 +340,7 @@ union tree_ann_d GTY((desc ("ann_type ((tree_ann_t)&%h)")))
   struct stmt_ann_d GTY((tag ("STMT_ANN"))) stmt;
 };
 
-extern GTY(()) VEC(tree) *modified_noreturn_calls;
+extern GTY(()) VEC(tree,gc) *modified_noreturn_calls;
 
 typedef union tree_ann_d *tree_ann_t;
 typedef struct var_ann_d *var_ann_t;
@@ -581,9 +581,6 @@ extern tree make_rename_temp (tree, const char *);
 extern void record_vars (tree);
 extern bool block_may_fallthru (tree block);
 
-typedef tree tree_on_heap;
-DEF_VEC_MALLOC_P (tree_on_heap);
-
 /* In tree-ssa-alias.c  */
 extern void dump_may_aliases_for (FILE *, tree);
 extern void debug_may_aliases_for (tree);
@@ -623,7 +620,7 @@ extern bool tree_ssa_useless_type_conversion (tree);
 extern bool tree_ssa_useless_type_conversion_1 (tree, tree);
 extern void verify_ssa (bool);
 extern void delete_tree_ssa (void);
-extern void register_new_def (tree, VEC (tree_on_heap) **);
+extern void register_new_def (tree, VEC(tree,heap) **);
 extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
 extern bool stmt_references_memory_p (tree);
 
index a836aa2cab81fcd097da4e584f8ebcfdb37fdf8a..58f5f4cdabfb4458daad35f38af9bb957d7b97c9 100644 (file)
@@ -98,10 +98,13 @@ static htab_t def_blocks;
 
    - A NULL node at the top entry is used to mark the last node
      associated with the current block.  */
-static VEC(tree_on_heap) *block_defs_stack;
+static VEC(tree,heap) *block_defs_stack;
 
-/* Basic block vectors used in this file ought to be allocated in the heap.  */
-DEF_VEC_MALLOC_P(int);
+/* Basic block vectors used in this file ought to be allocated in the
+   heap.  We use pointer vector, because ints can be easily passed by
+   value.  */
+DEF_VEC_P(int);
+DEF_VEC_ALLOC_P(int,heap);
 
 /* Set of existing SSA names being replaced by update_ssa.  */
 static sbitmap old_ssa_names;
@@ -683,10 +686,10 @@ find_idf (bitmap def_blocks, bitmap *dfs)
 {
   bitmap_iterator bi;
   unsigned bb_index;
-  VEC(int) *work_stack;
+  VEC(int,heap) *work_stack;
   bitmap phi_insertion_points;
 
-  work_stack = VEC_alloc (int, n_basic_blocks);
+  work_stack = VEC_alloc (int, heap, n_basic_blocks);
   phi_insertion_points = BITMAP_ALLOC (NULL);
 
   /* Seed the work list with all the blocks in DEF_BLOCKS.  */
@@ -719,12 +722,12 @@ find_idf (bitmap def_blocks, bitmap *dfs)
          /* Use a safe push because if there is a definition of VAR
             in every basic block, then WORK_STACK may eventually have
             more than N_BASIC_BLOCK entries.  */
-         VEC_safe_push (int, work_stack, bb_index);
+         VEC_safe_push (int, heap, work_stack, bb_index);
          bitmap_set_bit (phi_insertion_points, bb_index);
        }
     }
 
-  VEC_free (int, work_stack);
+  VEC_free (int, heap, work_stack);
 
   return phi_insertion_points;
 }
@@ -892,7 +895,7 @@ insert_phi_nodes (bitmap *dfs, bitmap names_to_rename)
    into the stack pointed by BLOCK_DEFS_P.  */
 
 void
-register_new_def (tree def, VEC (tree_on_heap) **block_defs_p)
+register_new_def (tree def, VEC(tree,heap) **block_defs_p)
 {
   tree var = SSA_NAME_VAR (def);
   tree currdef;
@@ -918,7 +921,7 @@ register_new_def (tree def, VEC (tree_on_heap) **block_defs_p)
      definitions for all the variables defined in the block after a recursive
      visit to all its immediately dominated blocks.  If there is no current
      reaching definition, then just record the underlying _DECL node.  */
-  VEC_safe_push (tree_on_heap, *block_defs_p, currdef ? currdef : var);
+  VEC_safe_push (treeheap, *block_defs_p, currdef ? currdef : var);
 
   /* Set the current reaching definition for VAR to be DEF.  */
   set_current_def (var, def);
@@ -963,7 +966,7 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
     fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
 
   /* Mark the unwind point for this block.  */
-  VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+  VEC_safe_push (treeheap, block_defs_stack, NULL_TREE);
 
   /* Step 1.  Register new definitions for every PHI node in the block.
      Conceptually, all the PHI nodes are executed in parallel and each PHI
@@ -1089,9 +1092,9 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
                        basic_block bb ATTRIBUTE_UNUSED)
 {
   /* Restore CURRDEFS to its original state.  */
-  while (VEC_length (tree_on_heap, block_defs_stack) > 0)
+  while (VEC_length (tree, block_defs_stack) > 0)
     {
-      tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
+      tree tmp = VEC_pop (tree, block_defs_stack);
       tree saved_def, var;
 
       if (tmp == NULL_TREE)
@@ -1249,8 +1252,9 @@ register_new_update_single (tree new_name, tree old_name)
      restore the reaching definitions for all the variables
      defined in the block after a recursive visit to all its
      immediately dominated blocks.  */
-  VEC_safe_push (tree_on_heap, block_defs_stack, currdef);
-  VEC_safe_push (tree_on_heap, block_defs_stack, old_name);
+  VEC_reserve (tree, heap, block_defs_stack, 2);
+  VEC_quick_push (tree, block_defs_stack, currdef);
+  VEC_quick_push (tree, block_defs_stack, old_name);
 
   /* Set the current reaching definition for OLD_NAME to be
      NEW_NAME.  */
@@ -1292,7 +1296,7 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
             bb->index);
 
   /* Mark the unwind point for this block.  */
-  VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+  VEC_safe_push (treeheap, block_defs_stack, NULL_TREE);
 
   /* Mark the LHS if any of the arguments flows through an abnormal
      edge.  */
@@ -1362,9 +1366,9 @@ static void
 rewrite_update_fini_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
                           basic_block bb ATTRIBUTE_UNUSED)
 {
-  while (VEC_length (tree_on_heap, block_defs_stack) > 0)
+  while (VEC_length (tree, block_defs_stack) > 0)
     {
-      tree var = VEC_pop (tree_on_heap, block_defs_stack);
+      tree var = VEC_pop (tree, block_defs_stack);
       tree saved_def;
       
       /* NULL indicates the unwind stop point for this block (see
@@ -1372,7 +1376,7 @@ rewrite_update_fini_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
       if (var == NULL)
        return;
 
-      saved_def = VEC_pop (tree_on_heap, block_defs_stack);
+      saved_def = VEC_pop (tree, block_defs_stack);
       set_current_def (var, saved_def);
     }
 }
@@ -1603,7 +1607,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
   else
     gcc_unreachable ();
 
-  block_defs_stack = VEC_alloc (tree_on_heap, 10);
+  block_defs_stack = VEC_alloc (treeheap, 10);
 
   /* Initialize the dominator walker.  */
   init_walk_dominator_tree (&walk_data);
@@ -1629,8 +1633,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
       def_blocks = NULL;
     }
   
-  VEC_free (tree_on_heap, block_defs_stack);
-  block_defs_stack = NULL;
+  VEC_free (tree, heap, block_defs_stack);
 
   timevar_pop (TV_TREE_SSA_REWRITE_BLOCKS);
 }
@@ -2855,15 +2858,15 @@ ssa_rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   /* Step 5.  Restore the current reaching definition for each variable
      referenced in the block (in reverse order).  */
-  while (VEC_length (tree_on_heap, block_defs_stack) > 0)
+  while (VEC_length (tree, block_defs_stack) > 0)
     {
-      tree var = VEC_pop (tree_on_heap, block_defs_stack);
+      tree var = VEC_pop (tree, block_defs_stack);
       tree saved_def;
       
       if (var == NULL)
        break;
 
-      saved_def = VEC_pop (tree_on_heap, block_defs_stack);
+      saved_def = VEC_pop (tree, block_defs_stack);
       set_current_def (var, saved_def);
     }
 }
@@ -2894,8 +2897,9 @@ ssa_register_new_def (tree var, tree def)
      later used by the dominator tree callbacks to restore the reaching
      definitions for all the variables defined in the block after a recursive
      visit to all its immediately dominated blocks.  */
-  VEC_safe_push (tree_on_heap, block_defs_stack, currdef);
-  VEC_safe_push (tree_on_heap, block_defs_stack, var);
+  VEC_reserve (tree, heap, block_defs_stack, 2);
+  VEC_quick_push (tree, block_defs_stack, currdef);
+  VEC_quick_push (tree, block_defs_stack, var);
 
   /* Set the current reaching definition for VAR to be DEF.  */
   set_current_def (var, def);
@@ -2999,7 +3003,7 @@ ssa_rewrite_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
     fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
 
   /* Mark the unwind point for this block.  */
-  VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+  VEC_safe_push (treeheap, block_defs_stack, NULL_TREE);
 
   FOR_EACH_EDGE (e, ei, bb->preds)
     if (e->flags & EDGE_ABNORMAL)
@@ -3194,7 +3198,7 @@ rewrite_ssa_into_ssa (void)
   mark_def_sites_global_data.names_to_rename = snames_to_rename;
   walk_data.global_data = &mark_def_sites_global_data;
 
-  block_defs_stack = VEC_alloc (tree_on_heap, 10);
+  block_defs_stack = VEC_alloc (treeheap, 10);
 
   /* We do not have any local data.  */
   walk_data.block_local_data_size = 0;
@@ -3284,7 +3288,6 @@ rewrite_ssa_into_ssa (void)
 
   BITMAP_FREE (to_rename);
   
-  VEC_free (tree_on_heap, block_defs_stack);
-  block_defs_stack = NULL;
+  VEC_free (tree, heap, block_defs_stack);
   timevar_pop (TV_TREE_SSA_OTHER);
 }
index f2bd75d3766bf126be8267d3ce85edf93f65ff96..b7e57a63603b0e88365a5a88b494faffb9c7c7d7 100644 (file)
@@ -243,6 +243,9 @@ void
 linear_transform_loops (struct loops *loops)
 {
   unsigned int i;
+  VEC(tree,gc) *oldivs = NULL;  /* FIXME:These should really be on the
+                                  heap.  (nathan 2005/04/15)*/
+  VEC(tree,gc) *invariants = NULL;  /* FIXME:Likewise. */
   
   for (i = 1; i < loops->num; i++)
     {
@@ -251,8 +254,6 @@ linear_transform_loops (struct loops *loops)
       varray_type dependence_relations;
       struct loop *loop_nest = loops->parray[i];
       struct loop *temp;
-      VEC (tree) *oldivs = NULL;
-      VEC (tree) *invariants = NULL;
       lambda_loopnest before, after;
       lambda_trans_matrix trans;
       bool problem = false;
@@ -273,6 +274,8 @@ linear_transform_loops (struct loops *loops)
            } */
       if (!loop_nest || !loop_nest->inner)
        continue;
+      VEC_truncate (tree, oldivs, 0);
+      VEC_truncate (tree, invariants, 0);
       depth = 1;
       for (temp = loop_nest->inner; temp; temp = temp->inner)
        {
@@ -365,11 +368,11 @@ linear_transform_loops (struct loops *loops)
                                       after, trans);
       if (dump_file)
        fprintf (dump_file, "Successfully transformed loop.\n");
-      oldivs = NULL;
-      invariants = NULL;
       free_dependence_relations (dependence_relations);
       free_data_refs (datarefs);
     }
+  VEC_free (tree, gc, oldivs);
+  VEC_free (tree, gc, invariants);
   scev_reset ();
   update_ssa (TODO_update_ssa);
   rewrite_into_loop_closed_ssa (NULL);
index b926113ded878ab2f8777a2fa00b3898a576f96d..d78ab53dbc464ed37293dbdc4ac01432191f3b02 100644 (file)
@@ -2791,7 +2791,9 @@ typedef struct fieldoff
   HOST_WIDE_INT offset;  
 } *fieldoff_t;
 
-DEF_VEC_MALLOC_P(fieldoff_t);
+DEF_VEC_P (fieldoff_t);  /* FIXME: This can be a vector of struct
+                           fieldoff objects (nathan 2005/04/15)  */
+DEF_VEC_ALLOC_P(fieldoff_t,heap);
 
 /* Return the position, in bits, of FIELD_DECL from the beginning of its
    structure. 
@@ -2816,7 +2818,7 @@ bitpos_of_field (const tree fdecl)
    than just the immediately containing structure.  */
 
 static void
-push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack, 
+push_fields_onto_fieldstack (tree type, VEC(fieldoff_t,heap) **fieldstack, 
                             HOST_WIDE_INT offset)
 {
   fieldoff_t pair;
@@ -2838,7 +2840,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
          pair = xmalloc (sizeof (struct fieldoff));
          pair->field = field;
          pair->offset = offset;
-         VEC_safe_push (fieldoff_t, *fieldstack, pair);
+         VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
        }
     }
   else if (TREE_CODE (field) == FIELD_DECL)
@@ -2846,7 +2848,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
       pair = xmalloc (sizeof (struct fieldoff));
       pair->field = field;
       pair->offset = offset + bitpos_of_field (field);
-      VEC_safe_push (fieldoff_t, *fieldstack, pair);
+      VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
     }
   for (field = TREE_CHAIN (field); field; field = TREE_CHAIN (field))
     {
@@ -2867,7 +2869,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
              pair = xmalloc (sizeof (struct fieldoff));
              pair->field = field;
              pair->offset = offset + bitpos_of_field (field);
-             VEC_safe_push (fieldoff_t, *fieldstack, pair);
+             VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
            }
        }
       else
@@ -2875,7 +2877,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_t) **fieldstack,
          pair = xmalloc (sizeof (struct fieldoff));
          pair->field = field;
          pair->offset = offset + bitpos_of_field (field);
-         VEC_safe_push (fieldoff_t, *fieldstack, pair);
+         VEC_safe_push (fieldoff_t, heap, *fieldstack, pair);
        }
     }
 }
@@ -2944,7 +2946,7 @@ fieldoff_compare (const void *pa, const void *pb)
 static void
 create_overlap_variables_for (tree var)
 {
-  VEC(fieldoff_t) *fieldstack = NULL;
+  VEC(fieldoff_t,heap) *fieldstack = NULL;
   used_part_t up;
   size_t uid = var_ann (var)->uid;
 
@@ -3019,7 +3021,7 @@ create_overlap_variables_for (tree var)
              fo = VEC_pop (fieldoff_t, fieldstack);
              free (fo);
            }
-         VEC_free (fieldoff_t, fieldstack);
+         VEC_free (fieldoff_t, heap, fieldstack);
          return;
        }
       /* Otherwise, create the variables.  */
@@ -3109,7 +3111,7 @@ create_overlap_variables_for (tree var)
 
     }
 
-  VEC_free (fieldoff_t, fieldstack);
+  VEC_free (fieldoff_t, heap, fieldstack);
 }
 
 
index 1d4f9b0b1c20d731beb64addcb50af89b394bc4f..429ba768f5d4bb9ba2834a0a96b5655c83492512 100644 (file)
@@ -94,7 +94,7 @@ static htab_t avail_exprs;
    (null).  When we finish processing the block, we pop off entries and
    remove the expressions from the global hash table until we hit the
    marker.  */
-static VEC(tree_on_heap) *avail_exprs_stack;
+static VEC(tree,heap) *avail_exprs_stack;
 
 /* Stack of trees used to restore the global currdefs to its original
    state after completing optimization of a block and its dominator children.
@@ -107,7 +107,7 @@ static VEC(tree_on_heap) *avail_exprs_stack;
 
    A NULL node is used to mark the last node associated with the
    current block.  */
-static VEC(tree_on_heap) *block_defs_stack;
+static VEC(tree,heap) *block_defs_stack;
 
 /* Stack of statements we need to rescan during finalization for newly
    exposed variables.
@@ -116,7 +116,7 @@ static VEC(tree_on_heap) *block_defs_stack;
    expressions are removed from AVAIL_EXPRS.  Else we may change the
    hash code for an expression and be unable to find/remove it from
    AVAIL_EXPRS.  */
-static VEC(tree_on_heap) *stmts_to_rescan;
+static VEC(tree,heap) *stmts_to_rescan;
 
 /* Structure for entries in the expression hash table.
 
@@ -148,7 +148,7 @@ struct expr_hash_elt
 
    A NULL entry is used to mark the end of pairs which need to be
    restored during finalization of this block.  */
-static VEC(tree_on_heap) *const_and_copies_stack;
+static VEC(tree,heap) *const_and_copies_stack;
 
 /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
    know their exact value.  */
@@ -159,7 +159,7 @@ static bitmap nonzero_vars;
 
    A NULL entry is used to mark the end of names needing their 
    entry in NONZERO_VARS cleared during finalization of this block.  */
-static VEC(tree_on_heap) *nonzero_vars_stack;
+static VEC(tree,heap) *nonzero_vars_stack;
 
 /* Track whether or not we have changed the control flow graph.  */
 static bool cfg_altered;
@@ -254,7 +254,7 @@ struct vrp_hash_elt
    list to determine which variables need their VRP data updated.
 
    A NULL entry marks the end of the SSA_NAMEs associated with this block.  */
-static VEC(tree_on_heap) *vrp_variables_stack;
+static VEC(tree,heap) *vrp_variables_stack;
 
 struct eq_expr_value
 {
@@ -382,12 +382,12 @@ tree_ssa_dominator_optimize (void)
   /* Create our hash tables.  */
   avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
   vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
-  avail_exprs_stack = VEC_alloc (tree_on_heap, 20);
-  block_defs_stack = VEC_alloc (tree_on_heap, 20);
-  const_and_copies_stack = VEC_alloc (tree_on_heap, 20);
-  nonzero_vars_stack = VEC_alloc (tree_on_heap, 20);
-  vrp_variables_stack = VEC_alloc (tree_on_heap, 20);
-  stmts_to_rescan = VEC_alloc (tree_on_heap, 20);
+  avail_exprs_stack = VEC_alloc (treeheap, 20);
+  block_defs_stack = VEC_alloc (treeheap, 20);
+  const_and_copies_stack = VEC_alloc (treeheap, 20);
+  nonzero_vars_stack = VEC_alloc (treeheap, 20);
+  vrp_variables_stack = VEC_alloc (treeheap, 20);
+  stmts_to_rescan = VEC_alloc (treeheap, 20);
   nonzero_vars = BITMAP_ALLOC (NULL);
   need_eh_cleanup = BITMAP_ALLOC (NULL);
 
@@ -545,12 +545,12 @@ tree_ssa_dominator_optimize (void)
   BITMAP_FREE (nonzero_vars);
   BITMAP_FREE (need_eh_cleanup);
   
-  VEC_free (tree_on_heap, block_defs_stack);
-  VEC_free (tree_on_heap, avail_exprs_stack);
-  VEC_free (tree_on_heap, const_and_copies_stack);
-  VEC_free (tree_on_heap, nonzero_vars_stack);
-  VEC_free (tree_on_heap, vrp_variables_stack);
-  VEC_free (tree_on_heap, stmts_to_rescan);
+  VEC_free (treeheap, block_defs_stack);
+  VEC_free (treeheap, avail_exprs_stack);
+  VEC_free (treeheap, const_and_copies_stack);
+  VEC_free (treeheap, nonzero_vars_stack);
+  VEC_free (treeheap, vrp_variables_stack);
+  VEC_free (treeheap, stmts_to_rescan);
 }
 
 static bool
@@ -850,11 +850,11 @@ dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   /* Push a marker on the stacks of local information so that we know how
      far to unwind when we finalize this block.  */
-  VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
-  VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
-  VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
-  VEC_safe_push (tree_on_heap, nonzero_vars_stack, NULL_TREE);
-  VEC_safe_push (tree_on_heap, vrp_variables_stack, NULL_TREE);
+  VEC_safe_push (treeheap, avail_exprs_stack, NULL_TREE);
+  VEC_safe_push (treeheap, block_defs_stack, NULL_TREE);
+  VEC_safe_push (treeheap, const_and_copies_stack, NULL_TREE);
+  VEC_safe_push (treeheap, nonzero_vars_stack, NULL_TREE);
+  VEC_safe_push (treeheap, vrp_variables_stack, NULL_TREE);
 
   record_equivalences_from_incoming_edge (bb);
 
@@ -910,10 +910,10 @@ static void
 remove_local_expressions_from_table (void)
 {
   /* Remove all the expressions made available in this block.  */
-  while (VEC_length (tree_on_heap, avail_exprs_stack) > 0)
+  while (VEC_length (tree, avail_exprs_stack) > 0)
     {
       struct expr_hash_elt element;
-      tree expr = VEC_pop (tree_on_heap, avail_exprs_stack);
+      tree expr = VEC_pop (tree, avail_exprs_stack);
 
       if (expr == NULL_TREE)
        break;
@@ -929,9 +929,9 @@ remove_local_expressions_from_table (void)
 static void
 restore_nonzero_vars_to_original_value (void)
 {
-  while (VEC_length (tree_on_heap, nonzero_vars_stack) > 0)
+  while (VEC_length (tree, nonzero_vars_stack) > 0)
     {
-      tree name = VEC_pop (tree_on_heap, nonzero_vars_stack);
+      tree name = VEC_pop (tree, nonzero_vars_stack);
 
       if (name == NULL)
        break;
@@ -947,16 +947,16 @@ restore_nonzero_vars_to_original_value (void)
 static void
 restore_vars_to_original_value (void)
 {
-  while (VEC_length (tree_on_heap, const_and_copies_stack) > 0)
+  while (VEC_length (tree, const_and_copies_stack) > 0)
     {
       tree prev_value, dest;
 
-      dest = VEC_pop (tree_on_heap, const_and_copies_stack);
+      dest = VEC_pop (tree, const_and_copies_stack);
 
       if (dest == NULL)
        break;
 
-      prev_value = VEC_pop (tree_on_heap, const_and_copies_stack);
+      prev_value = VEC_pop (tree, const_and_copies_stack);
       SSA_NAME_VALUE (dest) =  prev_value;
     }
 }
@@ -967,9 +967,9 @@ static void
 restore_currdefs_to_original_value (void)
 {
   /* Restore CURRDEFS to its original state.  */
-  while (VEC_length (tree_on_heap, block_defs_stack) > 0)
+  while (VEC_length (tree, block_defs_stack) > 0)
     {
-      tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
+      tree tmp = VEC_pop (tree, block_defs_stack);
       tree saved_def, var;
 
       if (tmp == NULL_TREE)
@@ -1050,9 +1050,9 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
          /* Push a marker onto the available expression stack so that we
             unwind any expressions related to the TRUE arm before processing
             the false arm below.  */
-         VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
-         VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
-         VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
+         VEC_safe_push (treeheap, avail_exprs_stack, NULL_TREE);
+         VEC_safe_push (treeheap, block_defs_stack, NULL_TREE);
+         VEC_safe_push (treeheap, const_and_copies_stack, NULL_TREE);
 
          edge_info = true_edge->aux;
 
@@ -1154,9 +1154,9 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
      To be efficient, we note which variables have had their values
      constrained in this block.  So walk over each variable in the
      VRP_VARIABLEs array.  */
-  while (VEC_length (tree_on_heap, vrp_variables_stack) > 0)
+  while (VEC_length (tree, vrp_variables_stack) > 0)
     {
-      tree var = VEC_pop (tree_on_heap, vrp_variables_stack);
+      tree var = VEC_pop (tree, vrp_variables_stack);
       struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
       void **slot;
 
@@ -1192,15 +1192,15 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
 
   /* If we queued any statements to rescan in this block, then
      go ahead and rescan them now.  */
-  while (VEC_length (tree_on_heap, stmts_to_rescan) > 0)
+  while (VEC_length (tree, stmts_to_rescan) > 0)
     {
-      tree stmt = VEC_last (tree_on_heap, stmts_to_rescan);
+      tree stmt = VEC_last (tree, stmts_to_rescan);
       basic_block stmt_bb = bb_for_stmt (stmt);
 
       if (stmt_bb != bb)
        break;
 
-      VEC_pop (tree_on_heap, stmts_to_rescan);
+      VEC_pop (tree, stmts_to_rescan);
       mark_new_vars_to_rename (stmt);
     }
 }
@@ -1436,7 +1436,7 @@ record_var_is_nonzero (tree var)
 
   /* Record this SSA_NAME so that we can reset the global table
      when we leave this block.  */
-  VEC_safe_push (tree_on_heap, nonzero_vars_stack, var);
+  VEC_safe_push (treeheap, nonzero_vars_stack, var);
 }
 
 /* Enter a statement into the true/false expression hash table indicating
@@ -1455,7 +1455,7 @@ record_cond (tree cond, tree value)
   if (*slot == NULL)
     {
       *slot = (void *) element;
-      VEC_safe_push (tree_on_heap, avail_exprs_stack, cond);
+      VEC_safe_push (treeheap, avail_exprs_stack, cond);
     }
   else
     free (element);
@@ -1594,8 +1594,9 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
 {
   SSA_NAME_VALUE (x) = y;
 
-  VEC_safe_push (tree_on_heap, const_and_copies_stack, prev_x);
-  VEC_safe_push (tree_on_heap, const_and_copies_stack, x);
+  VEC_reserve (tree, heap, const_and_copies_stack, 2);
+  VEC_quick_push (tree, const_and_copies_stack, prev_x);
+  VEC_quick_push (tree, const_and_copies_stack, x);
 }
 
 
@@ -3125,7 +3126,7 @@ optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
     }
 
   if (may_have_exposed_new_symbols)
-    VEC_safe_push (tree_on_heap, stmts_to_rescan, bsi_stmt (si));
+    VEC_safe_push (treeheap, stmts_to_rescan, bsi_stmt (si));
 }
 
 /* Replace the RHS of STMT with NEW_RHS.  If RHS can be found in the
@@ -3177,7 +3178,7 @@ update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert)
      we found a copy of this statement in the second hash table lookup
      we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs.  */
   if (insert)
-    VEC_pop (tree_on_heap, avail_exprs_stack);
+    VEC_pop (tree, avail_exprs_stack);
 
   /* And make sure we record the fact that we modified this
      statement.  */
@@ -3253,7 +3254,7 @@ lookup_avail_expr (tree stmt, bool insert)
   if (*slot == NULL)
     {
       *slot = (void *) element;
-      VEC_safe_push (tree_on_heap, avail_exprs_stack,
+      VEC_safe_push (treeheap, avail_exprs_stack,
                     stmt ? stmt : element->rhs);
       return NULL_TREE;
     }
@@ -3393,7 +3394,7 @@ record_range (tree cond, basic_block bb)
        VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
       
       VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
-      VEC_safe_push (tree_on_heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
+      VEC_safe_push (treeheap, vrp_variables_stack, TREE_OPERAND (cond, 0));
     }
 }
 
index 9029dbe3de304550008f7837ffb3ee3945ddc43e..0e705c301df6f13372b0eca979a083189d32ae81 100644 (file)
@@ -1112,7 +1112,8 @@ clean (value_set_t set)
     }
 }
 
-DEF_VEC_MALLOC_P (basic_block);
+DEF_VEC_P (basic_block);
+DEF_VEC_ALLOC_P (basic_block, heap);
 static sbitmap has_abnormal_preds;
 
 /* Compute the ANTIC set for BLOCK.
@@ -1162,15 +1163,15 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
      them.  */
   else
     {
-      VEC (basic_block) * worklist;
+      VEC(basic_block, heap) * worklist;
       edge e;
       size_t i;
       basic_block bprime, first;
       edge_iterator ei;
 
-      worklist = VEC_alloc (basic_block, 2);
+      worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
       FOR_EACH_EDGE (e, ei, block->succs)
-       VEC_safe_push (basic_block, worklist, e->dest);
+       VEC_quick_push (basic_block, worklist, e->dest);
       first = VEC_index (basic_block, worklist, 0);
       set_copy (ANTIC_OUT, ANTIC_IN (first));
 
@@ -1187,7 +1188,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
              node = next;
            }
        }
-      VEC_free (basic_block, worklist);
+      VEC_free (basic_block, heap, worklist);
     }
 
   /* Generate ANTIC_OUT - TMP_GEN.  */
@@ -1271,7 +1272,7 @@ compute_antic (void)
     fprintf (dump_file, "compute_antic required %d iterations\n", num_iterations);
 }
 
-static VEC(tree_on_heap) *inserted_exprs;
+static VEC(tree,heap) *inserted_exprs;
 /* Find a leader for an expression, or generate one using
    create_expression_by_pieces if it's ANTIC but
    complex.  
@@ -1367,7 +1368,7 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
        TREE_OPERAND (newexpr, 0) = name;
        tsi = tsi_last (stmts);
        tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
-       VEC_safe_push (tree_on_heap, inserted_exprs, newexpr);
+       VEC_safe_push (treeheap, inserted_exprs, newexpr);
        pre_stats.insertions++;
        break;
       }
@@ -1415,7 +1416,7 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
        NECESSARY (newexpr) = 0;
        tsi = tsi_last (stmts);
        tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
-       VEC_safe_push (tree_on_heap, inserted_exprs, newexpr);
+       VEC_safe_push (treeheap, inserted_exprs, newexpr);
        pre_stats.insertions++;
 
        break;
@@ -1533,7 +1534,7 @@ insert_into_preds_of_block (basic_block block, value_set_node_t node,
   add_referenced_tmp_var (temp);
   temp = create_phi_node (temp, block);
   NECESSARY (temp) = 0; 
-  VEC_safe_push (tree_on_heap, inserted_exprs, temp);
+  VEC_safe_push (treeheap, inserted_exprs, temp);
   FOR_EACH_EDGE (pred, ei, block->preds)
     add_phi_arg (temp, avail[pred->src->index], pred);
   
@@ -2111,10 +2112,11 @@ eliminate (void)
    this may be a bit faster, and we may want critical edges kept split.  */
 
 /* If OP's defining statement has not already been determined to be necessary,
-   mark that statement necessary. and place it on the WORKLIST.  */ 
+   mark that statement necessary. Return the stmt, if it is newly
+   necessary.  */ 
 
-static inline void
-mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
+static inline tree
+mark_operand_necessary (tree op)
 {
   tree stmt;
 
@@ -2125,10 +2127,10 @@ mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
 
   if (NECESSARY (stmt)
       || IS_EMPTY_STMT (stmt))
-    return;
+    return NULL;
 
   NECESSARY (stmt) = 1;
-  VEC_safe_push (tree_on_heap, *worklist, stmt);
+  return stmt;
 }
 
 /* Because we don't follow exactly the standard PRE algorithm, and decide not
@@ -2139,18 +2141,19 @@ mark_operand_necessary (tree op, VEC(tree_on_heap) **worklist)
 static void
 remove_dead_inserted_code (void)
 {
-  VEC (tree_on_heap) *worklist = NULL;
+  VEC(tree,heap) *worklist = NULL;
   int i;
   tree t;
 
-  for (i = 0; VEC_iterate (tree_on_heap, inserted_exprs, i, t); i++)
+  worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
+  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
     {
       if (NECESSARY (t))
-       VEC_safe_push (tree_on_heap, worklist, t);
+       VEC_quick_push (tree, worklist, t);
     }
-  while (VEC_length (tree_on_heap, worklist) > 0)
+  while (VEC_length (tree, worklist) > 0)
     {
-      t = VEC_pop (tree_on_heap, worklist);
+      t = VEC_pop (tree, worklist);
       if (TREE_CODE (t) == PHI_NODE)
        {
          /* PHI nodes are somewhat special in that each PHI alternative has
@@ -2160,11 +2163,17 @@ remove_dead_inserted_code (void)
             predecessor block associated with each PHI alternative as
             necessary.  */
          int k;
+
+         VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
          for (k = 0; k < PHI_NUM_ARGS (t); k++)
             {
              tree arg = PHI_ARG_DEF (t, k);
              if (TREE_CODE (arg) == SSA_NAME)
-               mark_operand_necessary (arg, &worklist);
+               {
+                 arg = mark_operand_necessary (arg);
+                 if (arg)
+                   VEC_quick_push (tree, worklist, arg);
+               }
            }
        }
       else
@@ -2181,10 +2190,14 @@ remove_dead_inserted_code (void)
             links).  */
 
          FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
-           mark_operand_necessary (use, &worklist);
+           {
+             tree n = mark_operand_necessary (use);
+             if (n)
+               VEC_safe_push (tree, heap, worklist, n);
+           }
        }
     }
-  for (i = 0; VEC_iterate (tree_on_heap, inserted_exprs, i, t); i++)
+  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
     {
       if (!NECESSARY (t))
        {
@@ -2205,7 +2218,7 @@ remove_dead_inserted_code (void)
            }
        }
     }
-  VEC_free (tree_on_heap, worklist);
+  VEC_free (treeheap, worklist);
 }
 /* Initialize data structures used by PRE.  */
 
@@ -2272,7 +2285,7 @@ fini_pre (bool do_fre)
   basic_block bb;
   unsigned int i;
 
-  VEC_free (tree_on_heap, inserted_exprs);
+  VEC_free (treeheap, inserted_exprs);
   bitmap_obstack_release (&grand_bitmap_obstack);
   free_alloc_pool (value_set_pool);
   free_alloc_pool (bitmap_set_pool);
index 0cac9f462dfcad15ddfd38d278e2bd4d11827dbf..24e6b4dbeaa4da7cda41be4da665cdc6378de6cb 100644 (file)
@@ -143,7 +143,7 @@ static sbitmap bb_in_list;
    definition has changed.  SSA edges are def-use edges in the SSA
    web.  For each D-U edge, we store the target statement or PHI node
    U.  */
-static GTY(()) VEC(tree) *interesting_ssa_edges;
+static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
 
 /* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
    list of SSA edges is split into two.  One contains all SSA edges
@@ -159,7 +159,7 @@ static GTY(()) VEC(tree) *interesting_ssa_edges;
    don't use a separate worklist for VARYING edges, we end up with
    situations where lattice values move from
    UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
-static GTY(()) VEC(tree) *varying_ssa_edges;
+static GTY(()) VEC(tree,gc) *varying_ssa_edges;
 
 
 /* Return true if the block worklist empty.  */
@@ -244,9 +244,9 @@ add_ssa_edge (tree var, bool is_varying)
        {
          STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1;
          if (is_varying)
-           VEC_safe_push (tree, varying_ssa_edges, use_stmt);
+           VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt);
          else
-           VEC_safe_push (tree, interesting_ssa_edges, use_stmt);
+           VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt);
        }
     }
 }
@@ -342,7 +342,7 @@ simulate_stmt (tree stmt)
    SSA edge is added to it in simulate_stmt.  */
 
 static void
-process_ssa_edge_worklist (VEC(tree) **worklist)
+process_ssa_edge_worklist (VEC(tree,gc) **worklist)
 {
   /* Drain the entire worklist.  */
   while (VEC_length (tree, *worklist) > 0)
@@ -462,8 +462,8 @@ ssa_prop_init (void)
   size_t i;
 
   /* Worklists of SSA edges.  */
-  interesting_ssa_edges = VEC_alloc (tree, 20);
-  varying_ssa_edges = VEC_alloc (tree, 20);
+  interesting_ssa_edges = VEC_alloc (tree, gc, 20);
+  varying_ssa_edges = VEC_alloc (tree, gc, 20);
 
   executable_blocks = sbitmap_alloc (last_basic_block);
   sbitmap_zero (executable_blocks);
@@ -506,8 +506,8 @@ ssa_prop_init (void)
 static void
 ssa_prop_fini (void)
 {
-  VEC_free (tree, interesting_ssa_edges);
-  VEC_free (tree, varying_ssa_edges);
+  VEC_free (tree, gc, interesting_ssa_edges);
+  VEC_free (tree, gc, varying_ssa_edges);
   cfg_blocks = NULL;
   sbitmap_free (bb_in_list);
   sbitmap_free (executable_blocks);
index 8efbdde0b916979a7aa5a29973d27093096f7814..1538910993e4f03cf67671b88934aba5c4051b57 100644 (file)
@@ -493,7 +493,8 @@ err:
   internal_error ("verify_flow_sensitive_alias_info failed.");
 }
 
-DEF_VEC_MALLOC_P (bitmap);
+DEF_VEC_P (bitmap);
+DEF_VEC_ALLOC_P (bitmap,heap);
 
 /* Verify that all name tags have different points to sets.
    This algorithm takes advantage of the fact that every variable with the
@@ -512,8 +513,8 @@ verify_name_tags (void)
   size_t i;  
   size_t j;
   bitmap first, second;  
-  VEC (tree) *name_tag_reps = NULL;
-  VEC (bitmap) *pt_vars_for_reps = NULL;
+  VEC(tree,heap) *name_tag_reps = NULL;
+  VEC(bitmap,heap) *pt_vars_for_reps = NULL;
   bitmap type_aliases = BITMAP_ALLOC (NULL);
 
   /* First we compute the name tag representatives and their points-to sets.  */
@@ -539,8 +540,8 @@ verify_name_tags (void)
       if (pi->pt_vars == NULL)
        continue;
 
-      VEC_safe_push (tree, name_tag_reps, ptr);
-      VEC_safe_push (bitmap, pt_vars_for_reps, pi->pt_vars);
+      VEC_safe_push (tree, heap, name_tag_reps, ptr);
+      VEC_safe_push (bitmap, heap, pt_vars_for_reps, pi->pt_vars);
 
       /* Verify that alias set of PTR's type tag is a superset of the
         alias set of PTR's name tag.  */
@@ -605,7 +606,10 @@ verify_name_tags (void)
        }
     } 
 
-  VEC_free (bitmap, pt_vars_for_reps);
+  /* We do not have to free the bitmaps or trees in the vectors, as
+     they are not owned by us.  */
+  VEC_free (bitmap, heap, pt_vars_for_reps);
+  VEC_free (tree, heap, name_tag_reps);
   BITMAP_FREE (type_aliases);
   return;
   
index 9c1fd6c2df7f75ed2f709bb135c418d9f557ca1c..33d4d29f1ec10c1039a8f212dadb92a700dff9c7 100644 (file)
@@ -158,8 +158,10 @@ extern const unsigned char tree_code_length[];
 
 extern const char *const tree_code_name[];
 
-/* A garbage collected vector of trees.  */
-DEF_VEC_GC_P(tree);
+/* A vectors of trees.  */
+DEF_VEC_P(tree);
+DEF_VEC_ALLOC_P(tree,gc);
+DEF_VEC_ALLOC_P(tree,heap);
 
 \f
 /* Classify which part of the compiler has defined a given builtin function.
@@ -1853,13 +1855,13 @@ struct tree_binfo GTY (())
   tree vtable;
   tree virtuals;
   tree vptr_field;
-  VEC(tree) *base_accesses;
+  VEC(tree,gc) *base_accesses;
   tree inheritance;
 
   tree vtt_subvtt;
   tree vtt_vptr;
 
-  VEC(tree) base_binfos;
+  VEC(tree,none) base_binfos;
 };
 
 \f
index 0a3b26bff1cbe0cbace4c2da7211fea7203c33f0..9652c23a1c7f7a1976ca99c4f841435c5326c5cc 100644 (file)
@@ -182,7 +182,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
          hist->hvalue.rtl.insn = insn;
          hist->type = HIST_TYPE_POW2;
          hist->hdata.pow2.may_be_other = 1;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_safe_push (histogram_value, heap, *values, hist);
        }
 
       /* Check whether the divisor is not in fact a constant.  */
@@ -194,7 +194,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
          hist->hvalue.rtl.seq = NULL_RTX;
          hist->hvalue.rtl.insn = insn;
          hist->type = HIST_TYPE_SINGLE_VALUE;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_safe_push (histogram_value, heap, *values, hist);
        }
 
       /* For mod, check whether it is not often a noop (or replaceable by
@@ -214,7 +214,7 @@ rtl_divmod_values_to_profile (rtx insn, histogram_values *values)
          hist->type = HIST_TYPE_INTERVAL;
          hist->hdata.intvl.int_start = 0;
          hist->hdata.intvl.steps = 2;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_safe_push (histogram_value, heap, *values, hist);
        }
       return;
 
@@ -305,7 +305,7 @@ insn_prefetch_values_to_profile (rtx insn, histogram_values* values)
   hist->hvalue.rtl.seq = NULL_RTX;
   hist->hvalue.rtl.insn = insn;
   hist->type = HIST_TYPE_CONST_DELTA;
-  VEC_safe_push (histogram_value, *values, hist);
+  VEC_safe_push (histogram_value, heap, *values, hist);
 
   return true;
 }
@@ -330,19 +330,18 @@ rtl_find_values_to_profile (histogram_values *values)
 {
   rtx insn;
   unsigned i, libcall_level;
+  histogram_value hist;
 
   life_analysis (NULL, PROP_DEATH_NOTES);
 
-  *values = VEC_alloc (histogram_value, 0);
+  *values = NULL;
   libcall_level = 0;
   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
     rtl_values_to_profile (insn, values);
   static_values = *values;
 
-  for (i = 0; i < VEC_length (histogram_value, *values); i++)
+  for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
     {
-      histogram_value hist = VEC_index (histogram_value, *values, i);
-
       switch (hist->type)
        {
        case HIST_TYPE_INTERVAL:
@@ -1665,6 +1664,8 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
       op1 = TREE_OPERAND (op, 0);
       op2 = TREE_OPERAND (op, 1);
 
+      VEC_reserve (histogram_value, heap, *values, 3);
+      
       /* Check for a special case where the divisor is power(s) of 2.
          This is more aggressive than the RTL version, under the
         assumption that later phases will reduce / or % by power of 2
@@ -1676,7 +1677,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
          hist->hvalue.tree.stmt = stmt;
          hist->type = HIST_TYPE_POW2;
          hist->hdata.pow2.may_be_other = 1;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_quick_push (histogram_value, *values, hist);
        }
 
       /* Check for the case where the divisor is the same value most
@@ -1687,7 +1688,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
          hist->hvalue.tree.value = op2;
          hist->hvalue.tree.stmt = stmt;
          hist->type = HIST_TYPE_SINGLE_VALUE;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_quick_push (histogram_value, *values, hist);
        }
 
       /* For mod, check whether it is not often a noop (or replaceable by
@@ -1700,7 +1701,7 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
          hist->type = HIST_TYPE_INTERVAL;
          hist->hdata.intvl.int_start = 0;
          hist->hdata.intvl.steps = 2;
-         VEC_safe_push (histogram_value, *values, hist);
+         VEC_quick_push (histogram_value, *values, hist);
        }
       return;
 
@@ -1725,8 +1726,9 @@ tree_find_values_to_profile (histogram_values *values)
   block_stmt_iterator bsi;
   tree stmt;
   unsigned int i;
-
-  *values = VEC_alloc (histogram_value, 0);
+  histogram_value hist;
+  
+  *values = NULL;
   FOR_EACH_BB (bb)
     for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
       {
@@ -1735,10 +1737,8 @@ tree_find_values_to_profile (histogram_values *values)
       }
   static_values = *values;
   
-  for (i = 0; i < VEC_length (histogram_value, *values); i++)
+  for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
     {
-      histogram_value hist = VEC_index (histogram_value, *values, i);
-
       switch (hist->type)
         {
        case HIST_TYPE_INTERVAL:
@@ -1817,6 +1817,6 @@ bool
 value_profile_transformations (void)
 {
   bool retval = (value_prof_hooks->value_profile_transformations) ();
-  VEC_free (histogram_value, static_values);
+  VEC_free (histogram_value, heap, static_values);
   return retval;
 }
index 08f4d0043e3e96870e5ead3ecd2965a98295de9c..8121a089c9113681c9aa0443bde7b1819d3555db 100644 (file)
@@ -75,9 +75,10 @@ struct histogram_value_t
 
 typedef struct histogram_value_t *histogram_value;
 
-DEF_VEC_MALLOC_P(histogram_value);
+DEF_VEC_P(histogram_value);
+DEF_VEC_ALLOC_P(histogram_value,heap);
 
-typedef VEC(histogram_value) *histogram_values;
+typedef VEC(histogram_value,heap) *histogram_values;
 
 /* Hooks registration.  */
 extern void rtl_register_value_prof_hooks (void);
index a6e67e4caefa9513b0d23790fcf867c9e6f7f768..6d3f443c8f3fec52cd4eaebd6223444325d4aad6 100644 (file)
@@ -4426,17 +4426,17 @@ globalize_decl (tree decl)
    of an alias.  This requires that the decl have been defined.  Aliases
    that precede their definition have to be queued for later processing.  */
 
-struct alias_pair GTY(())
+typedef struct alias_pair GTY(())
 {
   tree decl;
   tree target;
-};
-typedef struct alias_pair *alias_pair;
+} alias_pair;
 
 /* Define gc'd vector type.  */
-DEF_VEC_GC_P(alias_pair);
+DEF_VEC_O(alias_pair);
+DEF_VEC_ALLOC_O(alias_pair,gc);
 
-static GTY(()) VEC(alias_pair) *alias_pairs;
+static GTY(()) VEC(alias_pair,gc) *alias_pairs;
 
 /* Given an assembly name, find the decl it is associated with.  At the
    same time, mark it needed for cgraph.  */
@@ -4534,7 +4534,7 @@ void
 finish_aliases_1 (void)
 {
   unsigned i;
-  alias_pair p;
+  alias_pair *p;
 
   for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
     {
@@ -4558,12 +4558,12 @@ void
 finish_aliases_2 (void)
 {
   unsigned i;
-  alias_pair p;
+  alias_pair *p;
 
   for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
     do_assemble_alias (p->decl, p->target);
 
-  alias_pairs = NULL;
+  VEC_truncate (alias_pair, alias_pairs, 0);
 }
 
 /* Emit an assembler directive to make the symbol for DECL an alias to
@@ -4610,12 +4610,9 @@ assemble_alias (tree decl, tree target)
     do_assemble_alias (decl, target);
   else
     {
-      alias_pair p;
-
-      p = ggc_alloc (sizeof (struct alias_pair));
+      alias_pair *p = VEC_safe_push (alias_pair, gc, alias_pairs, NULL);
       p->decl = decl;
       p->target = target;
-      VEC_safe_push (alias_pair, alias_pairs, p);
     }
 }
 
index c638ead9bf5e9de7d2159926e6a09615e7cb8e96..67643b5abd29a16a49154517ad0fa8ac99503953 100644 (file)
--- a/gcc/vec.c
+++ b/gcc/vec.c
@@ -34,9 +34,54 @@ struct vec_prefix
   void *vec[1];
 };
 
-/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
-   0.  If RESERVE < 0 increase the current allocation exponentially.
-   VEC can be NULL, to create a new vector.  */
+/* Calculate the new ALLOC value, making sure that abs(RESERVE) slots
+   are free.  If RESERVE < 0 grow exactly, otherwise grow
+   exponentially.  */
+
+static inline unsigned
+calculate_allocation (const struct vec_prefix *pfx, int reserve)
+{
+  unsigned alloc = 0;
+  unsigned num = 0;
+
+  if (pfx)
+    {
+      alloc = pfx->alloc;
+      num = pfx->num;
+    }
+  else if (!reserve)
+    /* If there's no prefix, and we've not requested anything, then we
+       will create a NULL vector.  */
+    return 0;
+  
+  /* We must have run out of room.  */
+  gcc_assert (alloc - num < (unsigned)(reserve < 0 ? -reserve : reserve));
+  
+  if (reserve < 0)
+    /* Exact size.  */
+    alloc = num + -reserve;
+  else
+    {
+      /* Exponential growth. */
+      if (!alloc)
+       alloc = 4;
+      else if (alloc < 16)
+       /* Double when small.  */
+       alloc = alloc * 2;
+      else
+       /* Grow slower when large.  */
+       alloc = (alloc * 3 / 2);
+      
+      /* If this is still too small, set it to the right size. */
+      if (alloc < num + reserve)
+       alloc = num + reserve;
+    }
+  return alloc;
+}
+
+/* Ensure there are at least abs(RESERVE) free slots in VEC.  If
+   RESERVE < 0 grow exactly, else grow exponentially.  As a special
+   case, if VEC is NULL, and RESERVE is 0, no vector will be created. */
 
 void *
 vec_gc_p_reserve (void *vec, int reserve MEM_STAT_DECL)
@@ -46,27 +91,19 @@ vec_gc_p_reserve (void *vec, int reserve MEM_STAT_DECL)
                           PASS_MEM_STAT);
 }
 
-/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
-   0.  If RESERVE < 0, increase the current allocation exponentially.
-   VEC can be NULL, in which case a new vector is created.  The
-   vector's trailing array is at VEC_OFFSET offset and consists of
-   ELT_SIZE sized elements.  */
+/* As vec_gc_p_reserve, but for object vectors.  The vector's trailing
+   array is at VEC_OFFSET offset and consists of ELT_SIZE sized
+   elements.  */
 
 void *
 vec_gc_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
                   MEM_STAT_DECL)
 {
   struct vec_prefix *pfx = vec;
-  unsigned alloc = pfx ? pfx->num : 0;
-
-  if (reserve >= 0)
-    alloc += reserve;
-  else if (alloc)
-    alloc *= 2;
-  else
-    alloc = 4;
-
-  gcc_assert (!pfx || pfx->alloc < alloc);
+  unsigned alloc = alloc = calculate_allocation (pfx, reserve);
+  
+  if (!alloc)
+    return NULL;
   
   vec = ggc_realloc_stat (vec, vec_offset + alloc * elt_size PASS_MEM_STAT);
   ((struct vec_prefix *)vec)->alloc = alloc;
@@ -76,17 +113,7 @@ vec_gc_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
   return vec;
 }
 
-/* Explicitly release a vector.  */
-
-void
-vec_gc_free (void *vec)
-{
-  ggc_free (vec);
-}
-
-/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
-   0.  If RESERVE < 0 increase the current allocation exponentially.
-   VEC can be NULL, to create a new vector.  */
+/* As for vec_gc_p_reserve, but for heap allocated vectors.  */
 
 void *
 vec_heap_p_reserve (void *vec, int reserve MEM_STAT_DECL)
@@ -96,27 +123,17 @@ vec_heap_p_reserve (void *vec, int reserve MEM_STAT_DECL)
                             PASS_MEM_STAT);
 }
 
-/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
-   0.  If RESERVE < 0, increase the current allocation exponentially.
-   VEC can be NULL, in which case a new vector is created.  The
-   vector's trailing array is at VEC_OFFSET offset and consists of
-   ELT_SIZE sized elements.  */
+/* As for vec_gc_o_reserve, but for heap allocated vectors.  */
 
 void *
 vec_heap_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
                    MEM_STAT_DECL)
 {
   struct vec_prefix *pfx = vec;
-  unsigned alloc = pfx ? pfx->num : 0;
-
-  if (reserve >= 0)
-    alloc += reserve;
-  else if (alloc)
-    alloc *= 2;
-  else
-    alloc = 4;
+  unsigned alloc = calculate_allocation (pfx, reserve);
 
-  gcc_assert (!pfx || pfx->alloc < alloc);
+  if (!alloc)
+    return NULL;
   
   vec = xrealloc (vec, vec_offset + alloc * elt_size);
   ((struct vec_prefix *)vec)->alloc = alloc;
@@ -126,14 +143,6 @@ vec_heap_o_reserve (void *vec, int reserve, size_t vec_offset, size_t elt_size
   return vec;
 }
 
-/* Explicitly release a vector.  */
-
-void
-vec_heap_free (void *vec)
-{
-  free (vec);
-}
-
 #if ENABLE_CHECKING
 /* Issue a vector domain error, and then fall over.  */
 
index 945a4137f6b172e4e872709e1b3735fb2b9364cc..43aeb817d64034c11e5ab242cc3aa5bfd0a97350 100644 (file)
--- a/gcc/vec.h
+++ b/gcc/vec.h
@@ -37,7 +37,9 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    latter case the pointers are dereferenced and the objects copied
    into the vector.  Therefore, when using a vector of pointers, the
    objects pointed to must be long lived, but when dealing with a
-   vector of objects, the source objects need not be.
+   vector of objects, the source objects need not be.  The vector of
+   pointers API is also appropriate for small register sized objects
+   like integers.
 
    There are both 'index' and 'iterate' accessors.  The iterator
    returns a boolean iteration condition and updates the iteration
@@ -63,9 +65,13 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    vector, if needed.  Reallocation causes an exponential increase in
    vector size.  If you know you will be adding N elements, it would
    be more efficient to use the reserve operation before adding the
-   elements with the 'quick' operation.  You may also use the reserve
-   operation with a -1 operand, to gain control over exactly when
-   reallocation occurs.
+   elements with the 'quick' operation.  This will ensure there are at
+   least as many elements as you ask for, it will exponentially
+   increase if there are too few spare slots.  If you want reserve a
+   specific number of slots, but do not want the exponential increase
+   (for instance, you know this is the last allocation), use a
+   negative number for reservation.  You can also create a vector of a
+   specific size from the get go.
 
    You should prefer the push and pop operations, as they append and
    remove from the end of the vector. If you need to remove several
@@ -74,39 +80,51 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    vector.  There are two remove operations, one which preserves the
    element ordering 'ordered_remove', and one which does not
    'unordered_remove'.  The latter function copies the end element
-   into the removed slot, rather than invoke a memmove operation.
-   The 'lower_bound' function will determine where to place an item in the
+   into the removed slot, rather than invoke a memmove operation.  The
+   'lower_bound' function will determine where to place an item in the
    array using insert that will maintain sorted order.
 
-   Both garbage collected and explicitly managed vector types are
-   creatable.  The allocation mechanism is specified when the type is
-   defined, and is therefore part of the type.
+   When a vector type is defined, first a non-memory managed version
+   is created.  You can then define either or both garbage collected
+   and heap allocated versions.  The allocation mechanism is specified
+   when the type is defined, and is therefore part of the type.  If
+   you need both gc'd and heap allocated versions, you still must have
+   *exactly* one definition of the common non-memory managed base vector.
    
    If you need to directly manipulate a vector, then the 'address'
    accessor will return the address of the start of the vector.  Also
    the 'space' predicate will tell you whether there is spare capacity
    in the vector.  You will not normally need to use these two functions.
    
-   Vector types are defined using a DEF_VEC_{GC,MALLOC}_{O,P}(TYPEDEF)
-   macro, and variables of vector type are declared using a
-   VEC(TYPEDEF) macro.  The tags GC and MALLOC specify the allocation
-   method -- garbage collected or explicit malloc/free calls.  The
-   characters O and P indicate whether TYPEDEF is a pointer (P) or
-   object (O) type.
+   Vector types are defined using a DEF_VEC_{O,P}(TYPEDEF) macro, to
+   get the non-memory allocation version, and then a
+   DEF_VEC_ALLOC_{O,P}(TYPEDEF,ALLOC) macro to get memory managed
+   vectors.  Variables of vector type are declared using a
+   VEC(TYPEDEF,ALLOC) macro.  The ALLOC argument specifies the
+   allocation strategy, and can be either 'gc' or 'heap' for garbage
+   collected and heap allocated respectively.  It can be 'none' to get
+   a vector that must be explicitly allocated (for instance as a
+   trailing array of another structure).  The characters O and P
+   indicate whether TYPEDEF is a pointer (P) or object (O) type.  Be
+   careful to pick the correct one, as you'll get an awkward and
+   inefficient API if you get the wrong one.  There is a check, which
+   results in a compile-time warning, for the P versions, but there is
+   no check for the O versions, as that is not possible in plain C.
 
    An example of their use would be,
 
-   DEF_VEC_GC_P(tree); // define a gc'd vector of tree pointers.  This must
-                       // appear at file scope.
+   DEF_VEC_P(tree);   // non-managed tree vector.
+   DEF_VEC_ALLOC_P(tree,gc);   // gc'd vector of tree pointers.  This must
+                               // appear at file scope.
 
    struct my_struct {
-     VEC(tree) *v;      // A (pointer to) a vector of tree pointers.
+     VEC(tree,gc) *v;      // A (pointer to) a vector of tree pointers.
    };
 
    struct my_struct *s;
 
    if (VEC_length(tree,s->v)) { we have some contents }
-   VEC_safe_push(tree,s->v,decl); // append some decl onto the end
+   VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
    for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
      { do something with elt }
 
@@ -114,9 +132,11 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
 
 /* Macros to invoke API calls.  A single macro works for both pointer
    and object vectors, but the argument and return types might well be
-   different.  In each macro, TDEF is the typedef of the vector
-   elements.  Some of these macros pass the vector, V, by reference
-   (by taking its address), this is noted in the descriptions.  */
+   different.  In each macro, T is the typedef of the vector elements,
+   and A is the allocation strategy.  The allocation strategy is only
+   present when it is required.  Some of these macros pass the vector,
+   V, by reference (by taking its address), this is noted in the
+   descriptions.  */
 
 /* Length of vector
    unsigned VEC_T_length(const VEC(T) *v);
@@ -124,7 +144,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    Return the number of active elements in V.  V can be NULL, in which
    case zero is returned.  */
 
-#define VEC_length(TDEF,V)     (VEC_OP(TDEF,length)(V))
+#define VEC_length(T,V)        (VEC_OP(T,base,length)(VEC_BASE(V)))
 
 /* Get the final element of the vector.
    T VEC_T_last(VEC(T) *v); // Pointer
@@ -132,7 +152,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
 
    Return the final element.  If V is empty,  abort.  */
 
-#define VEC_last(TDEF,V)       (VEC_OP(TDEF,last)(V VEC_CHECK_INFO))
+#define VEC_last(T,V)  (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
 
 /* Index into vector
    T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
@@ -141,7 +161,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    Return the IX'th element.  If IX is outside the domain of V,
    abort.  */
 
-#define VEC_index(TDEF,V,I)    (VEC_OP(TDEF,index)(V,I VEC_CHECK_INFO))
+#define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
 
 /* Iterate over vector
    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
@@ -154,22 +174,22 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
      for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
        continue;  */
 
-#define VEC_iterate(TDEF,V,I,P)        (VEC_OP(TDEF,iterate)(V,I,&(P)))
+#define VEC_iterate(T,V,I,P)   (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
 
 /* Allocate new vector.
-   VEC(T) *VEC_T_alloc(int reserve);
+   VEC(T,A) *VEC_T_A_alloc(int reserve);
 
    Allocate a new vector with space for RESERVE objects.  If RESERVE
-   is <= 0, a default number of slots are created.  */
+   is zero, NO vector is created.  */
 
-#define VEC_alloc(TDEF,A)      (VEC_OP(TDEF,alloc)(A MEM_STAT_INFO))
+#define VEC_alloc(T,A,N)       (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
 
 /* Free a vector.
-   void VEC_T_alloc(VEC(T) *&);
+   void VEC_T_A_free(VEC(T,A) *&);
 
    Free a vector and set it to NULL.  */
 
-#define VEC_free(TDEF,V)       (VEC_OP(TDEF,free)(&V))
+#define VEC_free(T,A,V)        (VEC_OP(T,A,free)(&V))
 
 /* Use these to determine the required size and initialization of a
    vector embedded within another structure (as the final member).
@@ -179,34 +199,34 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    
    These allow the caller to perform the memory allocation.  */
 
-#define VEC_embedded_size(TDEF,A)      (VEC_OP(TDEF,embedded_size)(A))
-#define VEC_embedded_init(TDEF,O,A)    (VEC_OP(TDEF,embedded_init)(O,A))
+#define VEC_embedded_size(T,N)  (VEC_OP(T,base,embedded_size)(N))
+#define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
 
 /* Determine if a vector has additional capacity.
    
    int VEC_T_space (VEC(T) *v,int reserve)
 
-   If V has space for RESERVE additional entries, return nonzero.  If
-   RESERVE is < 0, ensure there is at least one space slot.  You
+   If V has space for RESERVE additional entries, return nonzero.  You
    usually only need to use this if you are doing your own vector
    reallocation, for instance on an embedded vector.  This returns
    nonzero in exactly the same circumstances that VEC_T_reserve
    will.  */
 
-#define VEC_space(TDEF,V,R)    (VEC_OP(TDEF,space)(V,R))
+#define VEC_space(T,V,R) \
+       (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
 
 /* Reserve space.
-   int VEC_T_reserve(VEC(T) *&v, int reserve);
+   int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
 
-   Ensure that V has at least RESERVE slots available, if RESERVE is
-   >= 0.  If RESERVE < 0, ensure that there is at least one spare
-   slot.  These differ in their reallocation behavior, the first will
-   not create additional headroom, but the second mechanism will
-   perform the usual exponential headroom increase.  Note this can
-   cause V to be reallocated.  Returns nonzero iff reallocation
-   actually occurred.  */
+   Ensure that V has at least abs(RESERVE) slots available.  The
+   signedness of RESERVE determines the reallocation behavior.  A
+   negative value will not create additional headroom beyond that
+   requested.  A positive value will create additional headroom.  Note
+   this can cause V to be reallocated.  Returns nonzero iff
+   reallocation actually occurred.  */
 
-#define VEC_reserve(TDEF,V,R)  (VEC_OP(TDEF,reserve)(&(V),R MEM_STAT_INFO))
+#define VEC_reserve(T,A,V,R)   \
+       (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
 
 /* Push object with no reallocation
    T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
@@ -217,19 +237,19 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    case NO initialization is performed.  Aborts if there is
    insufficient space in the vector.  */
 
-#define VEC_quick_push(TDEF,V,O)       \
-       (VEC_OP(TDEF,quick_push)(V,O VEC_CHECK_INFO))
+#define VEC_quick_push(T,V,O)  \
+       (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
 
 /* Push object with reallocation
-   T *VEC_T_safe_push (VEC(T) *&v, T obj); // Pointer
-   T *VEC_T_safe_push (VEC(T) *&v, T *obj); // Object
+   T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
+   T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
    
    Push a new element onto the end, returns a pointer to the slot
    filled in. For object vectors, the new value can be NULL, in which
    case NO initialization is performed.  Reallocates V, if needed.  */
 
-#define VEC_safe_push(TDEF,V,O)                \
-       (VEC_OP(TDEF,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
+#define VEC_safe_push(T,A,V,O)         \
+       (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
 
 /* Pop element off end
    T VEC_T_pop (VEC(T) *v);            // Pointer
@@ -238,15 +258,26 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    Pop the last element off the end. Returns the element popped, for
    pointer vectors.  */
 
-#define VEC_pop(TDEF,V)                        (VEC_OP(TDEF,pop)(V VEC_CHECK_INFO))
+#define VEC_pop(T,V)   (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
 
 /* Truncate to specific length
    void VEC_T_truncate (VEC(T) *v, unsigned len);
    
-   Set the length as specified.  This is an O(1) operation.  */
+   Set the length as specified.  The new length must be less than or
+   equal to the current length.  This is an O(1) operation.  */
 
-#define VEC_truncate(TDEF,V,I)         \
-       (VEC_OP(TDEF,truncate)(V,I VEC_CHECK_INFO))
+#define VEC_truncate(T,V,I)            \
+       (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
+
+/* Grow to a specific length.
+   void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
+
+   Grow the vector to a specific length.  The LEN must be as
+   long or longer than the current length.  The new elements are
+   uninitialized.  */
+
+#define VEC_safe_grow(T,A,V,I)         \
+       (VEC_OP(T,A,grow)(&(V),I VEC_CHECK_INFO))
 
 /* Replace element
    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
@@ -258,8 +289,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    NULL, in which case no overwriting of the slot is actually
    performed.  */
 
-#define VEC_replace(TDEF,V,I,O)                \
-       (VEC_OP(TDEF,replace)(V,I,O VEC_CHECK_INFO))
+#define VEC_replace(T,V,I,O)           \
+       (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
 
 /* Insert object with no reallocation
    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
@@ -270,20 +301,20 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    NULL, in which case no initialization of the inserted slot takes
    place. Aborts if there is insufficient space.  */
 
-#define VEC_quick_insert(TDEF,V,I,O)   \
-       (VEC_OP(TDEF,quick_insert)(V,I,O VEC_CHECK_INFO))
+#define VEC_quick_insert(T,V,I,O)      \
+       (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
 
 /* Insert object with reallocation
-   T *VEC_T_safe_insert (VEC(T) *&v, unsigned ix, T val); // Pointer
-   T *VEC_T_safe_insert (VEC(T) *&v, unsigned ix, T *val); // Object
+   T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
+   T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
    
    Insert an element, VAL, at the IXth position of V. Return a pointer
    to the slot created.  For vectors of object, the new value can be
    NULL, in which case no initialization of the inserted slot takes
    place. Reallocate V, if necessary.  */
 
-#define VEC_safe_insert(TDEF,V,I,O)    \
-       (VEC_OP(TDEF,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
+#define VEC_safe_insert(T,A,V,I,O)     \
+       (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
      
 /* Remove element retaining order
    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
@@ -293,8 +324,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    remaining elements is preserved.  For pointer vectors returns the
    removed object.  This is an O(N) operation due to a memmove.  */
 
-#define VEC_ordered_remove(TDEF,V,I)   \
-       (VEC_OP(TDEF,ordered_remove)(V,I VEC_CHECK_INFO))
+#define VEC_ordered_remove(T,V,I)      \
+       (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
 
 /* Remove element destroying order
    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
@@ -304,8 +335,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    remaining elements is destroyed.  For pointer vectors returns the
    removed object.  This is an O(1) operation.  */
 
-#define VEC_unordered_remove(TDEF,V,I) \
-       (VEC_OP(TDEF,unordered_remove)(V,I VEC_CHECK_INFO))
+#define VEC_unordered_remove(T,V,I)    \
+       (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
 
 /* Get the address of the array of elements
    T *VEC_T_address (VEC(T) v)
@@ -313,7 +344,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    If you need to directly manipulate the array (for instance, you
    want to feed it to qsort), use this accessor.  */
 
-#define VEC_address(TDEF,V)            (VEC_OP(TDEF,address)(V))
+#define VEC_address(T,V)               (VEC_OP(T,base,address)(VEC_BASE(V)))
 
 /* Find the first index in the vector not less than the object.
    unsigned VEC_T_lower_bound (VEC(T) *v, const T val, 
@@ -325,25 +356,26 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
    changing the ordering of V.  LESSTHAN is a function that returns
    true if the first argument is strictly less than the second.  */
    
-#define VEC_lower_bound(TDEF,V,O,LT)    \
-       (VEC_OP(TDEF,lower_bound)(V,O,LT VEC_CHECK_INFO))
+#define VEC_lower_bound(T,V,O,LT)    \
+       (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
 
 #if !IN_GENGTYPE
 /* Reallocate an array of elements with prefix.  */
 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
-extern void vec_gc_free (void *);
+extern void ggc_free (void *);
+#define vec_gc_free(V) ggc_free (V)
 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
-extern void vec_heap_free (void *);
+#define vec_heap_free(V) free (V)
 
 #if ENABLE_CHECKING
 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
 #define VEC_CHECK_PASS ,file_,line_,function_
      
-#define VEC_ASSERT(EXPR,OP,TDEF) \
-  (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(TDEF)), 0))
+#define VEC_ASSERT(EXPR,OP,T,A) \
+  (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
 
 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
      ATTRIBUTE_NORETURN;
@@ -352,63 +384,74 @@ extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
 #define VEC_CHECK_INFO
 #define VEC_CHECK_DECL
 #define VEC_CHECK_PASS
-#define VEC_ASSERT(EXPR,OP,TYPE) (void)(EXPR)
+#define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
 #endif
 
-#define VEC(TDEF) VEC_##TDEF
-#define VEC_OP(TDEF,OP) VEC_OP_(VEC(TDEF),OP)
-#define VEC_OP_(VEC,OP) VEC_OP__(VEC,OP)
-#define VEC_OP__(VEC,OP) VEC ## _ ## OP
+#define VEC(T,A) VEC_##T##_##A
+#define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
 #else  /* IN_GENGTYPE */
-#define VEC(TDEF) VEC_ TDEF
+#define VEC(T,A) VEC_ T _ A
 #define VEC_STRINGIFY(X) VEC_STRINGIFY_(X)
 #define VEC_STRINGIFY_(X) #X
 #undef GTY
 #endif /* IN_GENGTYPE */
 
-#define VEC_TDEF(TDEF)                                                   \
-typedef struct VEC (TDEF) GTY(())                                        \
+/* Base of vector type, not user visible.  */     
+#define VEC_T(T,B)                                                       \
+typedef struct VEC(T,B) GTY(())                                                  \
 {                                                                        \
   unsigned num;                                                                  \
   unsigned alloc;                                                        \
-  TDEF GTY ((length ("%h.num"))) vec[1];                                 \
-} VEC (TDEF)
+  T GTY ((length ("%h.num"))) vec[1];                                    \
+} VEC(T,B)
+
+/* Derived vector type, user visible.  */
+#define VEC_TA(T,B,A,GTY)                                                \
+typedef struct VEC(T,A) GTY                                              \
+{                                                                        \
+  VEC(T,B) base;                                                         \
+} VEC(T,A)
+
+/* Convert to base type.  */
+#define VEC_BASE(P)  ((P) ? &(P)->base : 0)
 
 /* Vector of pointer to object.  */
 #if IN_GENGTYPE
-{"DEF_VEC_GC_P", VEC_STRINGIFY (VEC_TDEF (#)) ";", NULL},
-{"DEF_VEC_MALLOC_P", "", NULL},
+{"DEF_VEC_P", VEC_STRINGIFY (VEC_T(#0,#1)) ";", "none"},
+{"DEF_VEC_ALLOC_P", VEC_STRINGIFY (VEC_TA (#0,#1,#2,#3)) ";", NULL},
 #else
-#define DEF_VEC_GC_P(TDEF) DEF_VEC_P(TDEF,gc)
-#define DEF_VEC_MALLOC_P(TDEF) DEF_VEC_P(TDEF,heap)
   
-#define DEF_VEC_P(TDEF,a)                                                \
-VEC_TDEF (TDEF);                                                         \
+#define DEF_VEC_P(T)                                                     \
+VEC_T(T,base);                                                           \
                                                                          \
-static inline unsigned VEC_OP (TDEF,length)                              \
-     (const VEC (TDEF) *vec_)                                            \
+static inline void VEC_OP (T,must,be_a_pointer_or_integer) (void)        \
+{                                                                        \
+  (void)((T)0 == (void *)0);                                             \
+}                                                                        \
+                                                                         \
+static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_)   \
 {                                                                        \
   return vec_ ? vec_->num : 0;                                           \
 }                                                                        \
                                                                          \
-static inline TDEF VEC_OP (TDEF,last)                                    \
-     (const VEC (TDEF) *vec_ VEC_CHECK_DECL)                             \
+static inline T VEC_OP (T,base,last)                                     \
+     (const VEC(T,base) *vec_ VEC_CHECK_DECL)                            \
 {                                                                        \
-  VEC_ASSERT (vec_ && vec_->num, "last", TDEF);                                  \
+  VEC_ASSERT (vec_ && vec_->num, "last", T, base);                       \
                                                                          \
   return vec_->vec[vec_->num - 1];                                       \
 }                                                                        \
                                                                          \
-static inline TDEF VEC_OP (TDEF,index)                                   \
-     (const VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)               \
+static inline T VEC_OP (T,base,index)                                    \
+     (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)              \
 {                                                                        \
-  VEC_ASSERT (vec_ && ix_ < vec_->num, "index", TDEF);                   \
+  VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base);                \
                                                                          \
   return vec_->vec[ix_];                                                 \
 }                                                                        \
                                                                          \
-static inline int VEC_OP (TDEF,iterate)                                          \
-     (const VEC (TDEF) *vec_, unsigned ix_, TDEF *ptr)                   \
+static inline int VEC_OP (T,base,iterate)                                \
+     (const VEC(T,base) *vec_, unsigned ix_, T *ptr)                     \
 {                                                                        \
   if (vec_ && ix_ < vec_->num)                                           \
     {                                                                    \
@@ -422,170 +465,103 @@ static inline int VEC_OP (TDEF,iterate)                                   \
     }                                                                    \
 }                                                                        \
                                                                          \
-static inline VEC (TDEF) *VEC_OP (TDEF,alloc)                            \
-     (int alloc_ MEM_STAT_DECL)                                                  \
-{                                                                        \
-  return (VEC (TDEF) *) vec_##a##_p_reserve (NULL, alloc_ - !alloc_ PASS_MEM_STAT);\
-}                                                                        \
-                                                                         \
-static inline void VEC_OP (TDEF,free)                                    \
-     (VEC (TDEF) **vec_)                                                 \
-{                                                                        \
-  vec_##a##_free (*vec_);                                                \
-  *vec_ = NULL;                                                                  \
-}                                                                        \
-                                                                         \
-static inline size_t VEC_OP (TDEF,embedded_size)                         \
+static inline size_t VEC_OP (T,base,embedded_size)                       \
      (int alloc_)                                                        \
 {                                                                        \
-  return offsetof (VEC(TDEF),vec) + alloc_ * sizeof(TDEF);               \
+  return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T);                \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,embedded_init)                           \
-     (VEC (TDEF) *vec_, int alloc_)                                      \
+static inline void VEC_OP (T,base,embedded_init)                         \
+     (VEC(T,base) *vec_, int alloc_)                                     \
 {                                                                        \
   vec_->num = 0;                                                         \
   vec_->alloc = alloc_;                                                          \
 }                                                                        \
                                                                          \
-static inline int VEC_OP (TDEF,space)                                    \
-     (VEC (TDEF) *vec_, int alloc_)                                      \
+static inline int VEC_OP (T,base,space)                                          \
+     (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL)                      \
 {                                                                        \
-  return vec_ ? ((vec_)->alloc - (vec_)->num                             \
-                >= (unsigned)(alloc_ < 0 ? 1 : alloc_)) : !alloc_;       \
-}                                                                        \
-                                                                         \
-static inline int VEC_OP (TDEF,reserve)                                          \
-     (VEC (TDEF) **vec_, int alloc_ MEM_STAT_DECL)                       \
-{                                                                        \
-  int extend = !VEC_OP (TDEF,space) (*vec_, alloc_);                     \
-                                                                         \
-  if (extend)                                                            \
-    *vec_ = (VEC (TDEF) *) vec_##a##_p_reserve (*vec_, alloc_ PASS_MEM_STAT);   \
-                                                                         \
-  return extend;                                                         \
+  VEC_ASSERT (alloc_ >= 0, "space", T, base);                            \
+  return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;   \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,quick_push)                             \
-     (VEC (TDEF) *vec_, TDEF obj_ VEC_CHECK_DECL)                        \
+static inline T *VEC_OP (T,base,quick_push)                              \
+     (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL)                                  \
 {                                                                        \
-  TDEF *slot_;                                                           \
+  T *slot_;                                                              \
                                                                          \
-  VEC_ASSERT (vec_->num < vec_->alloc, "push", TDEF);                    \
+  VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base);                 \
   slot_ = &vec_->vec[vec_->num++];                                       \
   *slot_ = obj_;                                                         \
                                                                          \
   return slot_;                                                                  \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,safe_push)                              \
-     (VEC (TDEF) **vec_, TDEF obj_ VEC_CHECK_DECL MEM_STAT_DECL)                 \
+static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL)   \
 {                                                                        \
-  VEC_OP (TDEF,reserve) (vec_, -1 PASS_MEM_STAT);                        \
-                                                                         \
-  return VEC_OP (TDEF,quick_push) (*vec_, obj_ VEC_CHECK_PASS);                  \
-}                                                                        \
+  T obj_;                                                                \
                                                                          \
-static inline TDEF VEC_OP (TDEF,pop)                                     \
-     (VEC (TDEF) *vec_ VEC_CHECK_DECL)                                   \
-{                                                                        \
-  TDEF obj_;                                                             \
-                                                                         \
-  VEC_ASSERT (vec_->num, "pop", TDEF);                                   \
+  VEC_ASSERT (vec_->num, "pop", T, base);                                \
   obj_ = vec_->vec[--vec_->num];                                         \
                                                                          \
   return obj_;                                                           \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,truncate)                                \
-     (VEC (TDEF) *vec_, unsigned size_ VEC_CHECK_DECL)                   \
+static inline void VEC_OP (T,base,truncate)                              \
+     (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL)                          \
 {                                                                        \
-  VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", TDEF);     \
+  VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base);          \
   if (vec_)                                                              \
     vec_->num = size_;                                                   \
 }                                                                        \
                                                                          \
-static inline TDEF VEC_OP (TDEF,replace)                                 \
-     (VEC (TDEF) *vec_, unsigned ix_, TDEF obj_ VEC_CHECK_DECL)                  \
+static inline T VEC_OP (T,base,replace)                                          \
+     (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL)            \
 {                                                                        \
-  TDEF old_obj_;                                                         \
+  T old_obj_;                                                            \
                                                                          \
-  VEC_ASSERT (ix_ < vec_->num, "replace", TDEF);                         \
+  VEC_ASSERT (ix_ < vec_->num, "replace", T, base);                      \
   old_obj_ = vec_->vec[ix_];                                             \
   vec_->vec[ix_] = obj_;                                                 \
                                                                          \
   return old_obj_;                                                       \
 }                                                                        \
                                                                          \
-static inline unsigned VEC_OP (TDEF,lower_bound)                       \
-     (VEC (TDEF) *vec_, const TDEF obj_, bool (*lessthan_)(const TDEF, const TDEF) VEC_CHECK_DECL) \
-{                                                                      \
-   unsigned int len_ = VEC_OP (TDEF, length) (vec_);                   \
-   unsigned int half_, middle_;                                                \
-   unsigned int first_ = 0;                                            \
-   while (len_ > 0)                                                    \
-     {                                                                 \
-        TDEF middle_elem_;                                             \
-        half_ = len_ >> 1;                                             \
-        middle_ = first_;                                              \
-        middle_ += half_;                                              \
-        middle_elem_ = VEC_OP (TDEF, index) (vec_, middle_ VEC_CHECK_PASS); \
-        if (lessthan_ (middle_elem_, obj_))                            \
-          {                                                            \
-             first_ = middle_;                                         \
-             ++first_;                                                 \
-             len_ = len_ - half_ - 1;                                  \
-          }                                                            \
-        else                                                           \
-          len_ = half_;                                                        \
-     }                                                                 \
-   return first_;                                                      \
-}                                                                      \
-                                                                       \
-static inline TDEF *VEC_OP (TDEF,quick_insert)                         \
-     (VEC (TDEF) *vec_, unsigned ix_, TDEF obj_ VEC_CHECK_DECL)                  \
-{                                                                        \
-  TDEF *slot_;                                                           \
-                                                                         \
-  VEC_ASSERT (vec_->num < vec_->alloc, "insert", TDEF);                          \
-  VEC_ASSERT (ix_ <= vec_->num, "insert", TDEF);                         \
+static inline T *VEC_OP (T,base,quick_insert)                            \
+     (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL)            \
+{                                                                        \
+  T *slot_;                                                              \
+                                                                         \
+  VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base);               \
+  VEC_ASSERT (ix_ <= vec_->num, "insert", T, base);                      \
   slot_ = &vec_->vec[ix_];                                               \
-  memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (TDEF));       \
+  memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));                  \
   *slot_ = obj_;                                                         \
                                                                          \
   return slot_;                                                                  \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,safe_insert)                            \
-     (VEC (TDEF) **vec_, unsigned ix_, TDEF obj_                         \
-       VEC_CHECK_DECL MEM_STAT_DECL)                                     \
+static inline T VEC_OP (T,base,ordered_remove)                           \
+     (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                    \
 {                                                                        \
-  VEC_OP (TDEF,reserve) (vec_, -1 PASS_MEM_STAT);                        \
-                                                                         \
-  return VEC_OP (TDEF,quick_insert) (*vec_, ix_, obj_ VEC_CHECK_PASS);   \
-}                                                                        \
+  T *slot_;                                                              \
+  T obj_;                                                                \
                                                                          \
-static inline TDEF VEC_OP (TDEF,ordered_remove)                                  \
-     (VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
-{                                                                        \
-  TDEF *slot_;                                                           \
-  TDEF obj_;                                                             \
-                                                                         \
-  VEC_ASSERT (ix_ < vec_->num, "remove", TDEF);                                  \
+  VEC_ASSERT (ix_ < vec_->num, "remove", T, base);                       \
   slot_ = &vec_->vec[ix_];                                               \
   obj_ = *slot_;                                                         \
-  memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (TDEF));               \
+  memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));          \
                                                                          \
   return obj_;                                                           \
 }                                                                        \
                                                                          \
-static inline TDEF VEC_OP (TDEF,unordered_remove)                        \
-     (VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
+static inline T VEC_OP (T,base,unordered_remove)                         \
+     (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                    \
 {                                                                        \
-  TDEF *slot_;                                                           \
-  TDEF obj_;                                                             \
+  T *slot_;                                                              \
+  T obj_;                                                                \
                                                                          \
-  VEC_ASSERT (ix_ < vec_->num, "remove", TDEF);                                  \
+  VEC_ASSERT (ix_ < vec_->num, "remove", T, base);                       \
   slot_ = &vec_->vec[ix_];                                               \
   obj_ = *slot_;                                                         \
   *slot_ = vec_->vec[--vec_->num];                                       \
@@ -593,51 +569,133 @@ static inline TDEF VEC_OP (TDEF,unordered_remove)                          \
   return obj_;                                                           \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,address)                                \
-     (VEC (TDEF) *vec_)                                                          \
+static inline T *VEC_OP (T,base,address)                                 \
+     (VEC(T,base) *vec_)                                                 \
 {                                                                        \
   return vec_ ? vec_->vec : 0;                                           \
 }                                                                        \
                                                                          \
+static inline unsigned VEC_OP (T,base,lower_bound)                       \
+     (VEC(T,base) *vec_, const T obj_,                                   \
+      bool (*lessthan_)(const T, const T) VEC_CHECK_DECL)                \
+{                                                                        \
+   unsigned int len_ = VEC_OP (T,base, length) (vec_);                   \
+   unsigned int half_, middle_;                                                  \
+   unsigned int first_ = 0;                                              \
+   while (len_ > 0)                                                      \
+     {                                                                   \
+        T middle_elem_;                                                          \
+        half_ = len_ >> 1;                                               \
+        middle_ = first_;                                                \
+        middle_ += half_;                                                \
+        middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
+        if (lessthan_ (middle_elem_, obj_))                              \
+          {                                                              \
+             first_ = middle_;                                           \
+             ++first_;                                                   \
+             len_ = len_ - half_ - 1;                                    \
+          }                                                              \
+        else                                                             \
+          len_ = half_;                                                          \
+     }                                                                   \
+   return first_;                                                        \
+}                                                                        \
+                                                                         \
+VEC_TA(T,base,none,)
+  
+#define DEF_VEC_ALLOC_P(T,A)                                             \
+VEC_TA(T,base,A,);                                                       \
+                                                                         \
+static inline VEC(T,A) *VEC_OP (T,A,alloc)                               \
+     (int alloc_ MEM_STAT_DECL)                                                  \
+{                                                                        \
+  /* We must request exact size allocation, hence the negation.  */      \
+  return (VEC(T,A) *) vec_##A##_p_reserve (NULL, -alloc_ PASS_MEM_STAT);  \
+}                                                                        \
+                                                                         \
+static inline void VEC_OP (T,A,free)                                     \
+     (VEC(T,A) **vec_)                                                   \
+{                                                                        \
+  if (*vec_)                                                             \
+    vec_##A##_free (*vec_);                                              \
+  *vec_ = NULL;                                                                  \
+}                                                                        \
+                                                                         \
+static inline int VEC_OP (T,A,reserve)                                   \
+     (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)                  \
+{                                                                        \
+  int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_),                          \
+                                      alloc_ < 0 ? -alloc_ : alloc_      \
+                                      VEC_CHECK_PASS);                   \
+                                                                         \
+  if (extend)                                                            \
+    *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
+                                                                         \
+  return extend;                                                         \
+}                                                                        \
+                                                                         \
+static inline void VEC_OP (T,A,safe_grow)                                \
+     (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)           \
+{                                                                        \
+  VEC_ASSERT (size_ >= 0                                                 \
+             && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
+                                                "grow", T, A);           \
+  VEC_OP (T,A,reserve) (vec_, (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) - size_ \
+                       VEC_CHECK_PASS PASS_MEM_STAT);                    \
+  VEC_BASE (*vec_)->num = size_;                                         \
+}                                                                        \
+                                                                         \
+static inline T *VEC_OP (T,A,safe_push)                                          \
+     (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL)              \
+{                                                                        \
+  VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);           \
+                                                                         \
+  return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
+}                                                                        \
+                                                                         \
+static inline T *VEC_OP (T,A,safe_insert)                                \
+     (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL)  \
+{                                                                        \
+  VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);           \
+                                                                         \
+  return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_        \
+                                      VEC_CHECK_PASS);                   \
+}                                                                        \
+                                                                         \
 struct vec_swallow_trailing_semi
 #endif
 
 /* Vector of object.  */
 #if IN_GENGTYPE
-{"DEF_VEC_GC_O", VEC_STRINGIFY (VEC_TDEF (#)) ";", NULL},
-{"DEF_VEC_MALLOC_O", "", NULL},
+{"DEF_VEC_O", VEC_STRINGIFY (VEC_T(#0,#1)) ";", "none"},
+{"DEF_VEC_ALLOC_O", VEC_STRINGIFY (VEC_TA(#0,#1,#2,#3)) ";", NULL},
 #else
   
-#define DEF_VEC_GC_O(TDEF) DEF_VEC_O(TDEF,gc)
-#define DEF_VEC_MALLOC_O(TDEF) DEF_VEC_O(TDEF,heap)
-
-#define DEF_VEC_O(TDEF,a)                                                \
-VEC_TDEF (TDEF);                                                         \
+#define DEF_VEC_O(T)                                                     \
+VEC_T(T,base);                                                           \
                                                                          \
-static inline unsigned VEC_OP (TDEF,length)                              \
-     (const VEC (TDEF) *vec_)                                            \
+static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_)          \
 {                                                                        \
   return vec_ ? vec_->num : 0;                                           \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,last)                                   \
-     (VEC (TDEF) *vec_ VEC_CHECK_DECL)                                   \
+static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL)  \
 {                                                                        \
-  VEC_ASSERT (vec_ && vec_->num, "last", TDEF);                                  \
+  VEC_ASSERT (vec_ && vec_->num, "last", T, base);                       \
                                                                          \
   return &vec_->vec[vec_->num - 1];                                      \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,index)                                          \
-     (VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
+static inline T *VEC_OP (T,base,index)                                   \
+     (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                    \
 {                                                                        \
-  VEC_ASSERT (vec_ && ix_ < vec_->num, "index", TDEF);                   \
+  VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base);                \
                                                                          \
   return &vec_->vec[ix_];                                                \
 }                                                                        \
                                                                          \
-static inline int VEC_OP (TDEF,iterate)                                          \
-     (VEC (TDEF) *vec_, unsigned ix_, TDEF **ptr)                        \
+static inline int VEC_OP (T,base,iterate)                                \
+     (VEC(T,base) *vec_, unsigned ix_, T **ptr)                                  \
 {                                                                        \
   if (vec_ && ix_ < vec_->num)                                           \
     {                                                                    \
@@ -651,60 +709,32 @@ static inline int VEC_OP (TDEF,iterate)                                     \
     }                                                                    \
 }                                                                        \
                                                                          \
-static inline VEC (TDEF) *VEC_OP (TDEF,alloc)                            \
-     (int alloc_ MEM_STAT_DECL)                                                  \
-{                                                                        \
-  return (VEC (TDEF) *) vec_##a##_o_reserve (NULL, alloc_ - !alloc_,     \
-                                       offsetof (VEC(TDEF),vec), sizeof (TDEF)\
-                                       PASS_MEM_STAT);                   \
-}                                                                        \
-                                                                         \
-static inline void VEC_OP (TDEF,free)                                    \
-     (VEC (TDEF) **vec_)                                                 \
-{                                                                        \
-  vec_##a##_free (*vec_);                                                \
-  *vec_ = NULL;                                                                  \
-}                                                                        \
-                                                                         \
-static inline size_t VEC_OP (TDEF,embedded_size)                         \
+static inline size_t VEC_OP (T,base,embedded_size)                       \
      (int alloc_)                                                        \
 {                                                                        \
-  return offsetof (VEC(TDEF),vec) + alloc_ * sizeof(TDEF);               \
+  return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T);                \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,embedded_init)                           \
-     (VEC (TDEF) *vec_, int alloc_)                                      \
+static inline void VEC_OP (T,base,embedded_init)                         \
+     (VEC(T,base) *vec_, int alloc_)                                     \
 {                                                                        \
   vec_->num = 0;                                                         \
   vec_->alloc = alloc_;                                                          \
 }                                                                        \
                                                                          \
-static inline int VEC_OP (TDEF,space)                                    \
-     (VEC (TDEF) *vec_, int alloc_)                                      \
+static inline int VEC_OP (T,base,space)                                          \
+     (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL)                      \
 {                                                                        \
-  return vec_ ? ((vec_)->alloc - (vec_)->num                             \
-                >= (unsigned)(alloc_ < 0 ? 1 : alloc_)) : !alloc_;       \
+  VEC_ASSERT (alloc_ >= 0, "space", T, base);                            \
+  return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;   \
 }                                                                        \
                                                                          \
-static inline int VEC_OP (TDEF,reserve)                                          \
-     (VEC (TDEF) **vec_, int alloc_ MEM_STAT_DECL)                       \
+static inline T *VEC_OP (T,base,quick_push)                              \
+     (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL)                   \
 {                                                                        \
-  int extend = !VEC_OP (TDEF,space) (*vec_, alloc_);                     \
-                                                                         \
-  if (extend)                                                            \
-    *vec_ = (VEC (TDEF) *) vec_##a##_o_reserve (*vec_, alloc_,           \
-                          offsetof (VEC(TDEF),vec), sizeof (TDEF)        \
-                          PASS_MEM_STAT);                                \
-                                                                         \
-  return extend;                                                         \
-}                                                                        \
-                                                                         \
-static inline TDEF *VEC_OP (TDEF,quick_push)                             \
-     (VEC (TDEF) *vec_, const TDEF *obj_ VEC_CHECK_DECL)                 \
-{                                                                        \
-  TDEF *slot_;                                                           \
+  T *slot_;                                                              \
                                                                          \
-  VEC_ASSERT (vec_->num < vec_->alloc, "push", TDEF);                    \
+  VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base);                 \
   slot_ = &vec_->vec[vec_->num++];                                       \
   if (obj_)                                                              \
     *slot_ = *obj_;                                                      \
@@ -712,35 +742,26 @@ static inline TDEF *VEC_OP (TDEF,quick_push)                                \
   return slot_;                                                                  \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,safe_push)                              \
-     (VEC (TDEF) **vec_, const TDEF *obj_ VEC_CHECK_DECL MEM_STAT_DECL)   \
-{                                                                        \
-  VEC_OP (TDEF,reserve) (vec_, -1 PASS_MEM_STAT);                        \
-                                                                         \
-  return VEC_OP (TDEF,quick_push) (*vec_, obj_ VEC_CHECK_PASS);                  \
-}                                                                        \
-                                                                         \
-static inline void VEC_OP (TDEF,pop)                                     \
-     (VEC (TDEF) *vec_ VEC_CHECK_DECL)                                   \
+static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
 {                                                                        \
-  VEC_ASSERT (vec_->num, "pop", TDEF);                                   \
+  VEC_ASSERT (vec_->num, "pop", T, base);                                \
   --vec_->num;                                                           \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,truncate)                                \
-     (VEC (TDEF) *vec_, unsigned size_ VEC_CHECK_DECL)                   \
+static inline void VEC_OP (T,base,truncate)                              \
+     (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL)                          \
 {                                                                        \
-  VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", TDEF);     \
+  VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base);          \
   if (vec_)                                                              \
     vec_->num = size_;                                                   \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,replace)                                \
-     (VEC (TDEF) *vec_, unsigned ix_, const TDEF *obj_ VEC_CHECK_DECL)   \
+static inline T *VEC_OP (T,base,replace)                                 \
+     (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL)     \
 {                                                                        \
-  TDEF *slot_;                                                           \
+  T *slot_;                                                              \
                                                                          \
-  VEC_ASSERT (ix_ < vec_->num, "replace", TDEF);                         \
+  VEC_ASSERT (ix_ < vec_->num, "replace", T, base);                      \
   slot_ = &vec_->vec[ix_];                                               \
   if (obj_)                                                              \
     *slot_ = *obj_;                                                      \
@@ -748,76 +769,137 @@ static inline TDEF *VEC_OP (TDEF,replace)                                  \
   return slot_;                                                                  \
 }                                                                        \
                                                                          \
-static inline unsigned VEC_OP (TDEF,lower_bound)                       \
-     (VEC (TDEF) *vec_, const TDEF *obj_, bool (*lessthan_)(const TDEF *, const TDEF *) VEC_CHECK_DECL) \
-{                                                                      \
-   unsigned int len_ = VEC_OP (TDEF, length) (vec_);                   \
-   unsigned int half_, middle_;                                                \
-   unsigned int first_ = 0;                                            \
-   while (len_ > 0)                                                    \
-     {                                                                 \
-        TDEF *middle_elem_;                                            \
-        half_ = len_ >> 1;                                             \
-        middle_ = first_;                                              \
-        middle_ += half_;                                              \
-        middle_elem_ = VEC_OP (TDEF, index) (vec_, middle_ VEC_CHECK_PASS); \
-        if (lessthan_ (middle_elem_, obj_))                            \
-          {                                                            \
-             first_ = middle_;                                         \
-             ++first_;                                                 \
-             len_ = len_ - half_ - 1;                                  \
-          }                                                            \
-        else                                                           \
-          len_ = half_;                                                        \
-     }                                                                 \
-   return first_;                                                      \
-}                                                                      \
-                                                                       \
-static inline TDEF *VEC_OP (TDEF,quick_insert)                         \
-     (VEC (TDEF) *vec_, unsigned ix_, const TDEF *obj_ VEC_CHECK_DECL) \
-{                                                                        \
-  TDEF *slot_;                                                           \
-                                                                         \
-  VEC_ASSERT (vec_->num < vec_->alloc, "insert", TDEF);                          \
-  VEC_ASSERT (ix_ <= vec_->num, "insert", TDEF);                         \
+static inline T *VEC_OP (T,base,quick_insert)                            \
+     (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL)     \
+{                                                                        \
+  T *slot_;                                                              \
+                                                                         \
+  VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base);               \
+  VEC_ASSERT (ix_ <= vec_->num, "insert", T, base);                      \
   slot_ = &vec_->vec[ix_];                                               \
-  memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (TDEF));       \
+  memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));                  \
   if (obj_)                                                              \
     *slot_ = *obj_;                                                      \
                                                                          \
   return slot_;                                                                  \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,safe_insert)                            \
-     (VEC (TDEF) **vec_, unsigned ix_, const TDEF *obj_                          \
-               VEC_CHECK_DECL MEM_STAT_DECL)                             \
+static inline void VEC_OP (T,base,ordered_remove)                        \
+     (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                    \
 {                                                                        \
-  VEC_OP (TDEF,reserve) (vec_, -1 PASS_MEM_STAT);                        \
+  T *slot_;                                                              \
                                                                          \
-  return VEC_OP (TDEF,quick_insert) (*vec_, ix_, obj_ VEC_CHECK_PASS);   \
+  VEC_ASSERT (ix_ < vec_->num, "remove", T, base);                       \
+  slot_ = &vec_->vec[ix_];                                               \
+  memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));                  \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,ordered_remove)                                  \
-     (VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
+static inline void VEC_OP (T,base,unordered_remove)                      \
+     (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL)                    \
 {                                                                        \
-  TDEF *slot_;                                                           \
+  VEC_ASSERT (ix_ < vec_->num, "remove", T, base);                       \
+  vec_->vec[ix_] = vec_->vec[--vec_->num];                               \
+}                                                                        \
                                                                          \
-  VEC_ASSERT (ix_ < vec_->num, "remove", TDEF);                                  \
-  slot_ = &vec_->vec[ix_];                                               \
-  memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (TDEF));       \
+static inline T *VEC_OP (T,base,address)                                 \
+     (VEC(T,base) *vec_)                                                 \
+{                                                                        \
+  return vec_ ? vec_->vec : 0;                                           \
 }                                                                        \
                                                                          \
-static inline void VEC_OP (TDEF,unordered_remove)                        \
-     (VEC (TDEF) *vec_, unsigned ix_ VEC_CHECK_DECL)                     \
+static inline unsigned VEC_OP (T,base,lower_bound)                       \
+     (VEC(T,base) *vec_, const T *obj_,                                          \
+      bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL)            \
+{                                                                        \
+   unsigned int len_ = VEC_OP (T, base, length) (vec_);                          \
+   unsigned int half_, middle_;                                                  \
+   unsigned int first_ = 0;                                              \
+   while (len_ > 0)                                                      \
+     {                                                                   \
+        T *middle_elem_;                                                 \
+        half_ = len_ >> 1;                                               \
+        middle_ = first_;                                                \
+        middle_ += half_;                                                \
+        middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
+        if (lessthan_ (middle_elem_, obj_))                              \
+          {                                                              \
+             first_ = middle_;                                           \
+             ++first_;                                                   \
+             len_ = len_ - half_ - 1;                                    \
+          }                                                              \
+        else                                                             \
+          len_ = half_;                                                          \
+     }                                                                   \
+   return first_;                                                        \
+}                                                                        \
+                                                                         \
+VEC_TA(T,base,none,)
+
+#define DEF_VEC_ALLOC_O(T,A)                                             \
+VEC_TA(T,base,A,);                                                       \
+                                                                         \
+static inline VEC(T,A) *VEC_OP (T,A,alloc)                               \
+     (int alloc_ MEM_STAT_DECL)                                                  \
 {                                                                        \
-  VEC_ASSERT (ix_ < vec_->num, "remove", TDEF);                                  \
-  vec_->vec[ix_] = vec_->vec[--vec_->num];                               \
+  /* We must request exact size allocation, hence the negation.  */      \
+  return (VEC(T,A) *) vec_##A##_o_reserve (NULL, -alloc_,                \
+                                           offsetof (VEC(T,A),base.vec),  \
+                                          sizeof (T)                     \
+                                           PASS_MEM_STAT);               \
 }                                                                        \
                                                                          \
-static inline TDEF *VEC_OP (TDEF,address)                                \
-     (VEC (TDEF) *vec_)                                                          \
+static inline void VEC_OP (T,A,free)                                     \
+     (VEC(T,A) **vec_)                                                   \
 {                                                                        \
-  return vec_ ? vec_->vec : 0;                                           \
+  if (*vec_)                                                             \
+    vec_##A##_free (*vec_);                                              \
+  *vec_ = NULL;                                                                  \
+}                                                                        \
+                                                                         \
+static inline int VEC_OP (T,A,reserve)                                   \
+     (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL)                  \
+{                                                                        \
+  int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_),                          \
+                                      alloc_ < 0 ? -alloc_ : alloc_      \
+                                      VEC_CHECK_PASS);                   \
+                                                                         \
+  if (extend)                                                            \
+    *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_,             \
+                                             offsetof (VEC(T,A),base.vec),\
+                                             sizeof (T)                  \
+                                             PASS_MEM_STAT);             \
+                                                                         \
+  return extend;                                                         \
+}                                                                        \
+                                                                         \
+static inline void VEC_OP (T,A,safe_grow)                                \
+     (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL)           \
+{                                                                        \
+  VEC_ASSERT (size_ >= 0                                                 \
+             && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
+                                                "grow", T, A);           \
+  VEC_OP (T,A,reserve) (vec_, (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) - size_ \
+                       VEC_CHECK_PASS PASS_MEM_STAT);                    \
+  VEC_BASE (*vec_)->num = size_;                                         \
+  VEC_BASE (*vec_)->num = size_;                                         \
+}                                                                        \
+                                                                         \
+static inline T *VEC_OP (T,A,safe_push)                                          \
+     (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL)       \
+{                                                                        \
+  VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);           \
+                                                                         \
+  return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS);  \
+}                                                                        \
+                                                                         \
+static inline T *VEC_OP (T,A,safe_insert)                                \
+     (VEC(T,A) **vec_, unsigned ix_, const T *obj_                       \
+               VEC_CHECK_DECL MEM_STAT_DECL)                             \
+{                                                                        \
+  VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT);           \
+                                                                         \
+  return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_        \
+                                      VEC_CHECK_PASS);                   \
 }                                                                        \
                                                                          \
 struct vec_swallow_trailing_semi