+2014-11-20 Trevor Saunders <tsaunders@mozilla.com>
+
+ * doc/gty.texi: Document the new cache gty attribute.
+ * gengtype.c (finish_cache_funcs): New function.
+ (write_roots): Call gt_clear_cache on global variables with the cache
+ gty attribute.
+ * ggc-common.c (ggc_mark_roots): Call gt_clear_caches.
+ * ggc.h (gt_clear_caches): New declaration.
+ * hash-table.h (struct ggc_cache_hasher): New hasher for caches in gc
+ memory.
+ (gt_cleare_cache): New function.
+ * emit-rtl.c, rtl.h, tree.c: Use hash_table instead of htab.
+
2014-11-20 Segher Boessenkool <segher@kernel.crashing.org>
* combine.c (try_combine): Prefer to delete dead SETs inside
structure. This is done by marking the pointer with the
@code{use_params} option.
+@findex cache
+@item cache
+
+When the @code{cache} option is applied to a global variable gt_clear_cache is
+called on that variable between the mark and sweep phases of garbage
+collection. The gt_clear_cache function is free to mark blocks as used, or to
+clear pointers in the variable.
+
@findex deletable
@item deletable
/* A hash table storing CONST_INTs whose absolute value is greater
than MAX_SAVED_CONST_INT. */
-static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
- htab_t const_int_htab;
+struct const_int_hasher : ggc_cache_hasher<rtx>
+{
+ typedef HOST_WIDE_INT compare_type;
+
+ static hashval_t hash (rtx i);
+ static bool equal (rtx i, HOST_WIDE_INT h);
+};
-static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
- htab_t const_wide_int_htab;
+static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
+
+struct const_wide_int_hasher : ggc_cache_hasher<rtx>
+{
+ static hashval_t hash (rtx x);
+ static bool equal (rtx x, rtx y);
+};
+
+static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
/* A hash table storing register attribute structures. */
-static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
- htab_t reg_attrs_htab;
+struct reg_attr_hasher : ggc_cache_hasher<reg_attrs *>
+{
+ static hashval_t hash (reg_attrs *x);
+ static bool equal (reg_attrs *a, reg_attrs *b);
+};
+
+static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
/* A hash table storing all CONST_DOUBLEs. */
-static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
- htab_t const_double_htab;
+struct const_double_hasher : ggc_cache_hasher<rtx>
+{
+ static hashval_t hash (rtx x);
+ static bool equal (rtx x, rtx y);
+};
+
+static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
/* A hash table storing all CONST_FIXEDs. */
-static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
- htab_t const_fixed_htab;
+struct const_fixed_hasher : ggc_cache_hasher<rtx>
+{
+ static hashval_t hash (rtx x);
+ static bool equal (rtx x, rtx y);
+};
+
+static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
static void set_used_decls (tree);
static void mark_label_nuses (rtx);
-static hashval_t const_int_htab_hash (const void *);
-static int const_int_htab_eq (const void *, const void *);
#if TARGET_SUPPORTS_WIDE_INT
-static hashval_t const_wide_int_htab_hash (const void *);
-static int const_wide_int_htab_eq (const void *, const void *);
static rtx lookup_const_wide_int (rtx);
#endif
-static hashval_t const_double_htab_hash (const void *);
-static int const_double_htab_eq (const void *, const void *);
static rtx lookup_const_double (rtx);
-static hashval_t const_fixed_htab_hash (const void *);
-static int const_fixed_htab_eq (const void *, const void *);
static rtx lookup_const_fixed (rtx);
-static hashval_t reg_attrs_htab_hash (const void *);
-static int reg_attrs_htab_eq (const void *, const void *);
static reg_attrs *get_reg_attrs (tree, int);
static rtx gen_const_vector (machine_mode, int);
static void copy_rtx_if_shared_1 (rtx *orig);
\f
/* Returns a hash code for X (which is a really a CONST_INT). */
-static hashval_t
-const_int_htab_hash (const void *x)
+hashval_t
+const_int_hasher::hash (rtx x)
{
- return (hashval_t) INTVAL ((const_rtx) x);
+ return (hashval_t) INTVAL (x);
}
/* Returns nonzero if the value represented by X (which is really a
CONST_INT) is the same as that given by Y (which is really a
HOST_WIDE_INT *). */
-static int
-const_int_htab_eq (const void *x, const void *y)
+bool
+const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
{
- return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
+ return (INTVAL (x) == y);
}
#if TARGET_SUPPORTS_WIDE_INT
/* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
-static hashval_t
-const_wide_int_htab_hash (const void *x)
+hashval_t
+const_wide_int_hasher::hash (rtx x)
{
int i;
HOST_WIDE_INT hash = 0;
- const_rtx xr = (const_rtx) x;
+ const_rtx xr = x;
for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
hash += CONST_WIDE_INT_ELT (xr, i);
CONST_WIDE_INT) is the same as that given by Y (which is really a
CONST_WIDE_INT). */
-static int
-const_wide_int_htab_eq (const void *x, const void *y)
+bool
+const_wide_int_hasher::equal (rtx x, rtx y)
{
int i;
- const_rtx xr = (const_rtx) x;
- const_rtx yr = (const_rtx) y;
+ const_rtx xr = x;
+ const_rtx yr = y;
if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
- return 0;
+ return false;
for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
- return 0;
+ return false;
- return 1;
+ return true;
}
#endif
/* Returns a hash code for X (which is really a CONST_DOUBLE). */
-static hashval_t
-const_double_htab_hash (const void *x)
+hashval_t
+const_double_hasher::hash (rtx x)
{
- const_rtx const value = (const_rtx) x;
+ const_rtx const value = x;
hashval_t h;
if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
/* Returns nonzero if the value represented by X (really a ...)
is the same as that represented by Y (really a ...) */
-static int
-const_double_htab_eq (const void *x, const void *y)
+bool
+const_double_hasher::equal (rtx x, rtx y)
{
- const_rtx const a = (const_rtx)x, b = (const_rtx)y;
+ const_rtx const a = x, b = y;
if (GET_MODE (a) != GET_MODE (b))
return 0;
/* Returns a hash code for X (which is really a CONST_FIXED). */
-static hashval_t
-const_fixed_htab_hash (const void *x)
+hashval_t
+const_fixed_hasher::hash (rtx x)
{
- const_rtx const value = (const_rtx) x;
+ const_rtx const value = x;
hashval_t h;
h = fixed_hash (CONST_FIXED_VALUE (value));
return h;
}
-/* Returns nonzero if the value represented by X (really a ...)
- is the same as that represented by Y (really a ...). */
+/* Returns nonzero if the value represented by X is the same as that
+ represented by Y. */
-static int
-const_fixed_htab_eq (const void *x, const void *y)
+bool
+const_fixed_hasher::equal (rtx x, rtx y)
{
- const_rtx const a = (const_rtx) x, b = (const_rtx) y;
+ const_rtx const a = x, b = y;
if (GET_MODE (a) != GET_MODE (b))
return 0;
/* Returns a hash code for X (which is a really a reg_attrs *). */
-static hashval_t
-reg_attrs_htab_hash (const void *x)
+hashval_t
+reg_attr_hasher::hash (reg_attrs *x)
{
- const reg_attrs *const p = (const reg_attrs *) x;
+ const reg_attrs *const p = x;
return ((p->offset * 1000) ^ (intptr_t) p->decl);
}
-/* Returns nonzero if the value represented by X (which is really a
- reg_attrs *) is the same as that given by Y (which is also really a
- reg_attrs *). */
+/* Returns nonzero if the value represented by X is the same as that given by
+ Y. */
-static int
-reg_attrs_htab_eq (const void *x, const void *y)
+bool
+reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
{
- const reg_attrs *const p = (const reg_attrs *) x;
- const reg_attrs *const q = (const reg_attrs *) y;
+ const reg_attrs *const p = x;
+ const reg_attrs *const q = y;
return (p->decl == q->decl && p->offset == q->offset);
}
get_reg_attrs (tree decl, int offset)
{
reg_attrs attrs;
- void **slot;
/* If everything is the default, we can just return zero. */
if (decl == 0 && offset == 0)
attrs.decl = decl;
attrs.offset = offset;
- slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
+ reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
if (*slot == 0)
{
*slot = ggc_alloc<reg_attrs> ();
memcpy (*slot, &attrs, sizeof (reg_attrs));
}
- return (reg_attrs *) *slot;
+ return *slot;
}
rtx
gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
{
- void **slot;
-
if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
return const_int_rtx[arg + MAX_SAVED_CONST_INT];
#endif
/* Look up the CONST_INT in the hash table. */
- slot = htab_find_slot_with_hash (const_int_htab, &arg,
- (hashval_t) arg, INSERT);
+ rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
+ INSERT);
if (*slot == 0)
*slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
- return (rtx) *slot;
+ return *slot;
}
rtx
static rtx
lookup_const_double (rtx real)
{
- void **slot = htab_find_slot (const_double_htab, real, INSERT);
+ rtx *slot = const_double_htab->find_slot (real, INSERT);
if (*slot == 0)
*slot = real;
- return (rtx) *slot;
+ return *slot;
}
/* Return a CONST_DOUBLE rtx for a floating-point value specified by
static rtx
lookup_const_fixed (rtx fixed)
{
- void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
+ rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
if (*slot == 0)
*slot = fixed;
- return (rtx) *slot;
+ return *slot;
}
/* Return a CONST_FIXED rtx for a fixed-point value specified by
static rtx
lookup_const_wide_int (rtx wint)
{
- void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT);
+ rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
if (*slot == 0)
*slot = wint;
- return (rtx) *slot;
+ return *slot;
}
#endif
mem_attrs *attrs;
/* Reset register attributes */
- htab_empty (reg_attrs_htab);
+ reg_attrs_htab->empty ();
/* We need reg_raw_mode, so initialize the modes now. */
init_reg_modes_target ();
/* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
CONST_FIXED, and memory attribute hash tables. */
- const_int_htab = htab_create_ggc (37, const_int_htab_hash,
- const_int_htab_eq, NULL);
+ const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
#if TARGET_SUPPORTS_WIDE_INT
- const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash,
- const_wide_int_htab_eq, NULL);
+ const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
#endif
- const_double_htab = htab_create_ggc (37, const_double_htab_hash,
- const_double_htab_eq, NULL);
+ const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
- const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
- const_fixed_htab_eq, NULL);
+ const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
- reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
- reg_attrs_htab_eq, NULL);
+ reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
#ifdef INIT_EXPANDERS
/* This is to initialize {init|mark|free}_machine_status before the first
}
}
+/* Finish off the created gt_clear_caches_file_c functions. */
+
+static void
+finish_cache_funcs (flist *flp)
+{
+ struct flist *fli2;
+
+ for (fli2 = flp; fli2; fli2 = fli2->next)
+ if (fli2->started_p)
+ {
+ oprintf (fli2->f, "}\n\n");
+ }
+
+ for (fli2 = flp; fli2 && base_files; fli2 = fli2->next)
+ if (fli2->started_p)
+ {
+ lang_bitmap bitmap = get_lang_bitmap (fli2->file);
+ int fnum;
+
+ for (fnum = 0; bitmap != 0; fnum++, bitmap >>= 1)
+ if (bitmap & 1)
+ {
+ oprintf (base_files[fnum], "extern void gt_clear_caches_");
+ put_mangled_filename (base_files[fnum], fli2->file);
+ oprintf (base_files[fnum], " ();\n");
+ }
+ }
+
+ for (size_t fnum = 0; base_files && fnum < num_lang_dirs; fnum++)
+ oprintf (base_files[fnum], "void\ngt_clear_caches ()\n{\n");
+
+ for (fli2 = flp; fli2; fli2 = fli2->next)
+ if (fli2->started_p)
+ {
+ lang_bitmap bitmap = get_lang_bitmap (fli2->file);
+ int fnum;
+
+ fli2->started_p = 0;
+
+ for (fnum = 0; base_files && bitmap != 0; fnum++, bitmap >>= 1)
+ if (bitmap & 1)
+ {
+ oprintf (base_files[fnum], " gt_clear_caches_");
+ put_mangled_filename (base_files[fnum], fli2->file);
+ oprintf (base_files[fnum], " ();\n");
+ }
+ }
+
+ for (size_t fnum = 0; base_files && fnum < num_lang_dirs; fnum++)
+ {
+ oprintf (base_files[fnum], "}\n");
+ }
+}
+
/* Write the first three fields (pointer, count and stride) for
root NAME to F. V and LINE are as for write_root.
;
else if (strcmp (o->name, "if_marked") == 0)
;
+ else if (strcmp (o->name, "cache") == 0)
+ ;
else
error_at_line (&v->line,
"global `%s' has unknown option `%s'",
finish_root_table (flp, "ggc_rc", "LAST_GGC_CACHE_TAB", "ggc_cache_tab",
"gt_ggc_cache_rtab");
+ for (v = variables; v; v = v->next)
+ {
+ outf_p f = get_output_file_with_visibility (CONST_CAST (input_file*,
+ v->line.file));
+ struct flist *fli;
+ bool cache = false;
+ options_p o;
+
+ for (o = v->opt; o; o = o->next)
+ if (strcmp (o->name, "cache") == 0)
+ cache = true;
+ if (!cache)
+ continue;
+
+ for (fli = flp; fli; fli = fli->next)
+ if (fli->f == f)
+ break;
+ if (!fli->started_p)
+ {
+ fli->started_p = 1;
+
+ oprintf (f, "void\ngt_clear_caches_");
+ put_mangled_filename (f, v->line.file);
+ oprintf (f, " ()\n{\n");
+ }
+
+ oprintf (f, " gt_cleare_cache (%s);\n", v->name);
+ }
+
+ finish_cache_funcs (flp);
+
if (!emit_pch)
return;
for (ct = gt_ggc_cache_rtab; *ct; ct++)
ggc_scan_cache_tab (*ct);
+ gt_clear_caches ();
+
FOR_EACH_VEC_ELT (extra_cache_vec, i, ctp)
ggc_scan_cache_tab (ctp);
function. */
extern void gt_pch_note_reorder (void *, void *, gt_handle_reorder);
+/* generated function to clear caches in gc memory. */
+extern void gt_clear_caches ();
+
/* Mark the object in the first parameter and anything it points to. */
typedef void (*gt_pointer_walker) (void *);
}
};
+/* Hasher for cache entry in gc memory. */
+
+template<typename T>
+struct ggc_cache_hasher
+{
+ typedef T value_type;
+ typedef T compare_type;
+ typedef int store_values_directly;
+
+ static void remove (T &) {}
+
+ /* Entries are weakly held because this is for caches. */
+
+ static void ggc_mx (T &) {}
+
+ static void
+ pch_nx (T &p)
+ {
+ extern void gt_pch_nx (T &);
+ gt_pch_nx (p);
+ }
+
+ static void
+ pch_nx (T &p, gt_pointer_operator op, void *cookie)
+ {
+ op (&p, cookie);
+ }
+
+ /* Clear out entries if they are about to be gc'd. */
+
+ static void
+ handle_cache_entry (T &e)
+ {
+ if (e != HTAB_EMPTY_ENTRY && e != HTAB_DELETED_ENTRY && !ggc_marked_p (e))
+ e = static_cast<T> (HTAB_DELETED_ENTRY);
+ }
+};
+
/* Table of primes and their inversion information. */
op (&h->m_entries, cookie);
}
+template<typename H>
+inline void
+gt_cleare_cache (hash_table<H> *h)
+{
+ if (!h)
+ return;
+
+ for (typename hash_table<H>::iterator iter = h->begin (); iter != h->end ();
+ ++iter)
+ H::handle_cache_entry (*iter);
+}
+
#endif /* TYPED_HASHTAB_H */
object in the low part of a 4-byte register, the OFFSET field
will be -3 rather than 0. */
-struct GTY(()) reg_attrs {
+struct GTY((for_user)) reg_attrs {
tree decl; /* decl corresponding to REG. */
HOST_WIDE_INT offset; /* Offset from start of DECL. */
};
/* Hash table and temporary node for larger integer const values. */
static GTY (()) tree int_cst_node;
-static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
- htab_t int_cst_hash_table;
+
+struct int_cst_hasher : ggc_cache_hasher<tree>
+{
+ static hashval_t hash (tree t);
+ static bool equal (tree x, tree y);
+};
+
+static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
/* Hash table for optimization flags and target option flags. Use the same
hash table for both sets of options. Nodes for building the current
allocating and freeing up a node repeatably. */
static GTY (()) tree cl_optimization_node;
static GTY (()) tree cl_target_option_node;
-static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
- htab_t cl_option_hash_table;
+
+struct cl_option_hasher : ggc_cache_hasher<tree>
+{
+ static hashval_t hash (tree t);
+ static bool equal (tree x, tree y);
+};
+
+static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
/* General tree->tree mapping structure for use in hash tables. */
static void set_type_quals (tree, int);
static int type_hash_eq (const void *, const void *);
static hashval_t type_hash_hash (const void *);
-static hashval_t int_cst_hash_hash (const void *);
-static int int_cst_hash_eq (const void *, const void *);
-static hashval_t cl_option_hash_hash (const void *);
-static int cl_option_hash_eq (const void *, const void *);
static void print_type_hash_statistics (void);
static void print_debug_expr_statistics (void);
static void print_value_expr_statistics (void);
value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
tree_decl_map_eq, 0);
- int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
- int_cst_hash_eq, NULL);
+ int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
int_cst_node = make_int_cst (1, 1);
- cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
- cl_option_hash_eq, NULL);
+ cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
cl_optimization_node = make_node (OPTIMIZATION_NODE);
cl_target_option_node = make_node (TARGET_OPTION_NODE);
/* Return the hash code code X, an INTEGER_CST. */
-static hashval_t
-int_cst_hash_hash (const void *x)
+hashval_t
+int_cst_hasher::hash (tree x)
{
- const_tree const t = (const_tree) x;
+ const_tree const t = x;
hashval_t code = htab_hash_pointer (TREE_TYPE (t));
int i;
/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
is the same as that given by *Y, which is the same. */
-static int
-int_cst_hash_eq (const void *x, const void *y)
+bool
+int_cst_hasher::equal (tree x, tree y)
{
- const_tree const xt = (const_tree) x;
- const_tree const yt = (const_tree) y;
+ const_tree const xt = x;
+ const_tree const yt = y;
if (TREE_TYPE (xt) != TREE_TYPE (yt)
|| TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
{
/* Use the cache of larger shared ints, using int_cst_node as
a temporary. */
- void **slot;
TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
TREE_TYPE (int_cst_node) = type;
- slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
- t = (tree) *slot;
+ tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
+ t = *slot;
if (!t)
{
/* Insert this one into the hash table. */
/* The value either hashes properly or we drop it on the floor
for the gc to take care of. There will not be enough of them
to worry about. */
- void **slot;
tree nt = build_new_int_cst (type, cst);
- slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
- t = (tree) *slot;
+ tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
+ t = *slot;
if (!t)
{
/* Insert this one into the hash table. */
else
{
/* Use the cache of larger shared ints. */
- void **slot;
-
- slot = htab_find_slot (int_cst_hash_table, t, INSERT);
+ tree *slot = int_cst_hash_table->find_slot (t, INSERT);
/* If there is already an entry for the number verify it's the
same. */
if (*slot)
/* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
-static hashval_t
-cl_option_hash_hash (const void *x)
+hashval_t
+cl_option_hasher::hash (tree x)
{
- const_tree const t = (const_tree) x;
+ const_tree const t = x;
const char *p;
size_t i;
size_t len = 0;
TARGET_OPTION tree node) is the same as that given by *Y, which is the
same. */
-static int
-cl_option_hash_eq (const void *x, const void *y)
+bool
+cl_option_hasher::equal (tree x, tree y)
{
- const_tree const xt = (const_tree) x;
- const_tree const yt = (const_tree) y;
+ const_tree const xt = x;
+ const_tree const yt = y;
const char *xp;
const char *yp;
size_t len;
build_optimization_node (struct gcc_options *opts)
{
tree t;
- void **slot;
/* Use the cache of optimization nodes. */
cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
opts);
- slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
- t = (tree) *slot;
+ tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
+ t = *slot;
if (!t)
{
/* Insert this one into the hash table. */
build_target_option_node (struct gcc_options *opts)
{
tree t;
- void **slot;
/* Use the cache of optimization nodes. */
cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
opts);
- slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
- t = (tree) *slot;
+ tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
+ t = *slot;
if (!t)
{
/* Insert this one into the hash table. */
return t;
}
-/* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
- Called through htab_traverse. */
-
-static int
-prepare_target_option_node_for_pch (void **slot, void *)
-{
- tree node = (tree) *slot;
- if (TREE_CODE (node) == TARGET_OPTION_NODE)
- TREE_TARGET_GLOBALS (node) = NULL;
- return 1;
-}
-
/* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
so that they aren't saved during PCH writing. */
void
prepare_target_option_nodes_for_pch (void)
{
- htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
- NULL);
+ hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
+ for (; iter != cl_option_hash_table->end (); ++iter)
+ if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
+ TREE_TARGET_GLOBALS (*iter) = NULL;
}
/* Determine the "ultimate origin" of a block. The block may be an inlined