20020201-1.c: Remove declarations for exit, abort, rand, srand.
[gcc.git] / gcc / vec.h
1 /* Vector API for GNU compiler.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #ifndef GCC_VEC_H
23 #define GCC_VEC_H
24
25 #include "statistics.h" /* For MEM_STAT_DECL. */
26
27 /* The macros here implement a set of templated vector types and
28 associated interfaces. These templates are implemented with
29 macros, as we're not in C++ land. The interface functions are
30 typesafe and use static inline functions, sometimes backed by
31 out-of-line generic functions. The vectors are designed to
32 interoperate with the GTY machinery.
33
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
42
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
47
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
59
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use the
72 reserve_exact operation. You can also create a vector of a
73 specific size from the get go.
74
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
85
86 When a vector type is defined, first a non-memory managed version
87 is created. You can then define either or both garbage collected
88 and heap allocated versions. The allocation mechanism is specified
89 when the type is defined, and is therefore part of the type. If
90 you need both gc'd and heap allocated versions, you still must have
91 *exactly* one definition of the common non-memory managed base vector.
92
93 If you need to directly manipulate a vector, then the 'address'
94 accessor will return the address of the start of the vector. Also
95 the 'space' predicate will tell you whether there is spare capacity
96 in the vector. You will not normally need to use these two functions.
97
98 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
99 get the non-memory allocation version, and then a
100 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
101 vectors. Variables of vector type are declared using a
102 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the
103 allocation strategy, and can be either 'gc' or 'heap' for garbage
104 collected and heap allocated respectively. It can be 'none' to get
105 a vector that must be explicitly allocated (for instance as a
106 trailing array of another structure). The characters O, P and I
107 indicate whether TYPEDEF is a pointer (P), object (O) or integral
108 (I) type. Be careful to pick the correct one, as you'll get an
109 awkward and inefficient API if you use the wrong one. There is a
110 check, which results in a compile-time warning, for the P and I
111 versions, but there is no check for the O versions, as that is not
112 possible in plain C. Due to the way GTY works, you must annotate
113 any structures you wish to insert or reference from a vector with a
114 GTY(()) tag. You need to do this even if you never declare the GC
115 allocated variants.
116
117 An example of their use would be,
118
119 DEF_VEC_P(tree); // non-managed tree vector.
120 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must
121 // appear at file scope.
122
123 struct my_struct {
124 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers.
125 };
126
127 struct my_struct *s;
128
129 if (VEC_length(tree,s->v)) { we have some contents }
130 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
131 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
132 { do something with elt }
133
134 */
135
136 /* Macros to invoke API calls. A single macro works for both pointer
137 and object vectors, but the argument and return types might well be
138 different. In each macro, T is the typedef of the vector elements,
139 and A is the allocation strategy. The allocation strategy is only
140 present when it is required. Some of these macros pass the vector,
141 V, by reference (by taking its address), this is noted in the
142 descriptions. */
143
144 /* Length of vector
145 unsigned VEC_T_length(const VEC(T) *v);
146
147 Return the number of active elements in V. V can be NULL, in which
148 case zero is returned. */
149
150 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
151
152
153 /* Check if vector is empty
154 int VEC_T_empty(const VEC(T) *v);
155
156 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
157
158 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
159
160
161 /* Get the final element of the vector.
162 T VEC_T_last(VEC(T) *v); // Integer
163 T VEC_T_last(VEC(T) *v); // Pointer
164 T *VEC_T_last(VEC(T) *v); // Object
165
166 Return the final element. V must not be empty. */
167
168 #define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
169
170 /* Index into vector
171 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
172 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
173 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
174
175 Return the IX'th element. If IX must be in the domain of V. */
176
177 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
178
179 /* Iterate over vector
180 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
181 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
182 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
183
184 Return iteration condition and update PTR to point to the IX'th
185 element. At the end of iteration, sets PTR to NULL. Use this to
186 iterate over the elements of a vector as follows,
187
188 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
189 continue; */
190
191 #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
192
193 /* Convenience macro for forward iteration. */
194
195 #define FOR_EACH_VEC_ELT(T, V, I, P) \
196 for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I))
197
198 /* Likewise, but start from FROM rather than 0. */
199
200 #define FOR_EACH_VEC_ELT_FROM(T, V, I, P, FROM) \
201 for (I = (FROM); VEC_iterate (T, (V), (I), (P)); ++(I))
202
203 /* Convenience macro for reverse iteration. */
204
205 #define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \
206 for (I = VEC_length (T, (V)) - 1; \
207 VEC_iterate (T, (V), (I), (P)); \
208 (I)--)
209
210 /* Allocate new vector.
211 VEC(T,A) *VEC_T_A_alloc(int reserve);
212
213 Allocate a new vector with space for RESERVE objects. If RESERVE
214 is zero, NO vector is created. */
215
216 #define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
217
218 /* Free a vector.
219 void VEC_T_A_free(VEC(T,A) *&);
220
221 Free a vector and set it to NULL. */
222
223 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
224
225 /* Use these to determine the required size and initialization of a
226 vector embedded within another structure (as the final member).
227
228 size_t VEC_T_embedded_size(int reserve);
229 void VEC_T_embedded_init(VEC(T) *v, int reserve);
230
231 These allow the caller to perform the memory allocation. */
232
233 #define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N))
234 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
235
236 /* Copy a vector.
237 VEC(T,A) *VEC_T_A_copy(VEC(T) *);
238
239 Copy the live elements of a vector into a new vector. The new and
240 old vectors need not be allocated by the same mechanism. */
241
242 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
243
244 /* Determine if a vector has additional capacity.
245
246 int VEC_T_space (VEC(T) *v,int reserve)
247
248 If V has space for RESERVE additional entries, return nonzero. You
249 usually only need to use this if you are doing your own vector
250 reallocation, for instance on an embedded vector. This returns
251 nonzero in exactly the same circumstances that VEC_T_reserve
252 will. */
253
254 #define VEC_space(T,V,R) \
255 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
256
257 /* Reserve space.
258 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
259
260 Ensure that V has at least RESERVE slots available. This will
261 create additional headroom. Note this can cause V to be
262 reallocated. Returns nonzero iff reallocation actually
263 occurred. */
264
265 #define VEC_reserve(T,A,V,R) \
266 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
267
268 /* Reserve space exactly.
269 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
270
271 Ensure that V has at least RESERVE slots available. This will not
272 create additional headroom. Note this can cause V to be
273 reallocated. Returns nonzero iff reallocation actually
274 occurred. */
275
276 #define VEC_reserve_exact(T,A,V,R) \
277 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
278
279 /* Copy elements with no reallocation
280 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Integer
281 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Pointer
282 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Object
283
284 Copy the elements in SRC to the end of DST as if by memcpy. DST and
285 SRC need not be allocated with the same mechanism, although they most
286 often will be. DST is assumed to have sufficient headroom
287 available. */
288
289 #define VEC_splice(T,DST,SRC) \
290 (VEC_OP(T,base,splice)(VEC_BASE(DST), VEC_BASE(SRC) VEC_CHECK_INFO))
291
292 /* Copy elements with reallocation
293 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Integer
294 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Pointer
295 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Object
296
297 Copy the elements in SRC to the end of DST as if by memcpy. DST and
298 SRC need not be allocated with the same mechanism, although they most
299 often will be. DST need not have sufficient headroom and will be
300 reallocated if needed. */
301
302 #define VEC_safe_splice(T,A,DST,SRC) \
303 (VEC_OP(T,A,safe_splice)(&(DST), VEC_BASE(SRC) VEC_CHECK_INFO MEM_STAT_INFO))
304
305 /* Push object with no reallocation
306 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
307 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
308 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
309
310 Push a new element onto the end, returns a pointer to the slot
311 filled in. For object vectors, the new value can be NULL, in which
312 case NO initialization is performed. There must
313 be sufficient space in the vector. */
314
315 #define VEC_quick_push(T,V,O) \
316 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
317
318 /* Push object with reallocation
319 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
320 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
321 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
322
323 Push a new element onto the end, returns a pointer to the slot
324 filled in. For object vectors, the new value can be NULL, in which
325 case NO initialization is performed. Reallocates V, if needed. */
326
327 #define VEC_safe_push(T,A,V,O) \
328 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
329
330 /* Pop element off end
331 T VEC_T_pop (VEC(T) *v); // Integer
332 T VEC_T_pop (VEC(T) *v); // Pointer
333 void VEC_T_pop (VEC(T) *v); // Object
334
335 Pop the last element off the end. Returns the element popped, for
336 pointer vectors. */
337
338 #define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
339
340 /* Truncate to specific length
341 void VEC_T_truncate (VEC(T) *v, unsigned len);
342
343 Set the length as specified. The new length must be less than or
344 equal to the current length. This is an O(1) operation. */
345
346 #define VEC_truncate(T,V,I) \
347 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
348
349 /* Grow to a specific length.
350 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
351
352 Grow the vector to a specific length. The LEN must be as
353 long or longer than the current length. The new elements are
354 uninitialized. */
355
356 #define VEC_safe_grow(T,A,V,I) \
357 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
358
359 /* Grow to a specific length.
360 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
361
362 Grow the vector to a specific length. The LEN must be as
363 long or longer than the current length. The new elements are
364 initialized to zero. */
365
366 #define VEC_safe_grow_cleared(T,A,V,I) \
367 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
368
369 /* Replace element
370 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
371 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
372 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
373
374 Replace the IXth element of V with a new value, VAL. For pointer
375 vectors returns the original value. For object vectors returns a
376 pointer to the new value. For object vectors the new value can be
377 NULL, in which case no overwriting of the slot is actually
378 performed. */
379
380 #define VEC_replace(T,V,I,O) \
381 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
382
383 /* Insert object with no reallocation
384 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
385 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
386 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
387
388 Insert an element, VAL, at the IXth position of V. Return a pointer
389 to the slot created. For vectors of object, the new value can be
390 NULL, in which case no initialization of the inserted slot takes
391 place. There must be sufficient space. */
392
393 #define VEC_quick_insert(T,V,I,O) \
394 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
395
396 /* Insert object with reallocation
397 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
398 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
399 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
400
401 Insert an element, VAL, at the IXth position of V. Return a pointer
402 to the slot created. For vectors of object, the new value can be
403 NULL, in which case no initialization of the inserted slot takes
404 place. Reallocate V, if necessary. */
405
406 #define VEC_safe_insert(T,A,V,I,O) \
407 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
408
409 /* Remove element retaining order
410 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
411 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
412 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
413
414 Remove an element from the IXth position of V. Ordering of
415 remaining elements is preserved. For pointer vectors returns the
416 removed object. This is an O(N) operation due to a memmove. */
417
418 #define VEC_ordered_remove(T,V,I) \
419 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
420
421 /* Remove element destroying order
422 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
423 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
424 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
425
426 Remove an element from the IXth position of V. Ordering of
427 remaining elements is destroyed. For pointer vectors returns the
428 removed object. This is an O(1) operation. */
429
430 #define VEC_unordered_remove(T,V,I) \
431 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
432
433 /* Remove a block of elements
434 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
435
436 Remove LEN elements starting at the IXth. Ordering is retained.
437 This is an O(N) operation due to memmove. */
438
439 #define VEC_block_remove(T,V,I,L) \
440 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
441
442 /* Get the address of the array of elements
443 T *VEC_T_address (VEC(T) v)
444
445 If you need to directly manipulate the array (for instance, you
446 want to feed it to qsort), use this accessor. */
447
448 #define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V)))
449
450 /* Conveniently sort the contents of the vector with qsort.
451 void VEC_qsort (VEC(T) *v, int (*cmp_func)(const void *, const void *)) */
452
453 #define VEC_qsort(T,V,CMP) qsort(VEC_address (T,V), VEC_length(T,V), \
454 sizeof (T), CMP)
455
456 /* Find the first index in the vector not less than the object.
457 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
458 bool (*lessthan) (const T, const T)); // Integer
459 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
460 bool (*lessthan) (const T, const T)); // Pointer
461 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
462 bool (*lessthan) (const T*, const T*)); // Object
463
464 Find the first position in which VAL could be inserted without
465 changing the ordering of V. LESSTHAN is a function that returns
466 true if the first argument is strictly less than the second. */
467
468 #define VEC_lower_bound(T,V,O,LT) \
469 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
470
471 /* Reallocate an array of elements with prefix. */
472 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
473 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL);
474 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
475 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
476 MEM_STAT_DECL);
477 extern void ggc_free (void *);
478 #define vec_gc_free(V) ggc_free (V)
479 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
480 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL);
481 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
482 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
483 MEM_STAT_DECL);
484 extern void dump_vec_loc_statistics (void);
485 extern void vec_heap_free (void *);
486
487 #if ENABLE_CHECKING
488 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
489 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
490 #define VEC_CHECK_PASS ,file_,line_,function_
491
492 #define VEC_ASSERT(EXPR,OP,T,A) \
493 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
494
495 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
496 ATTRIBUTE_NORETURN;
497 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
498 #else
499 #define VEC_CHECK_INFO
500 #define VEC_CHECK_DECL
501 #define VEC_CHECK_PASS
502 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
503 #endif
504
505 /* Note: gengtype has hardwired knowledge of the expansions of the
506 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the
507 expansions of these macros you may need to change gengtype too. */
508
509 typedef struct GTY(()) vec_prefix
510 {
511 unsigned num;
512 unsigned alloc;
513 } vec_prefix;
514
515 #define VEC(T,A) VEC_##T##_##A
516 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
517
518 /* Base of vector type, not user visible. */
519 #define VEC_T(T,B) \
520 typedef struct VEC(T,B) \
521 { \
522 struct vec_prefix prefix; \
523 T vec[1]; \
524 } VEC(T,B)
525
526 #define VEC_T_GTY(T,B) \
527 typedef struct GTY(()) VEC(T,B) \
528 { \
529 struct vec_prefix prefix; \
530 T GTY ((length ("%h.prefix.num"))) vec[1]; \
531 } VEC(T,B)
532
533 /* Derived vector type, user visible. */
534 #define VEC_TA_GTY(T,B,A,GTY) \
535 typedef struct GTY VEC(T,A) \
536 { \
537 VEC(T,B) base; \
538 } VEC(T,A)
539
540 #define VEC_TA(T,B,A) \
541 typedef struct VEC(T,A) \
542 { \
543 VEC(T,B) base; \
544 } VEC(T,A)
545
546 /* Convert to base type. */
547 #if GCC_VERSION >= 4000
548 #define VEC_BASE(P) \
549 ((offsetof (__typeof (*P), base) == 0 || (P)) ? &(P)->base : 0)
550 #else
551 #define VEC_BASE(P) ((P) ? &(P)->base : 0)
552 #endif
553
554 /* Vector of integer-like object. */
555 #define DEF_VEC_I(T) \
556 static inline void VEC_OP (T,must_be,integral_type) (void) \
557 { \
558 (void)~(T)0; \
559 } \
560 \
561 VEC_T(T,base); \
562 VEC_TA(T,base,none); \
563 DEF_VEC_FUNC_P(T) \
564 struct vec_swallow_trailing_semi
565 #define DEF_VEC_ALLOC_I(T,A) \
566 VEC_TA(T,base,A); \
567 DEF_VEC_ALLOC_FUNC_I(T,A) \
568 DEF_VEC_NONALLOC_FUNCS_I(T,A) \
569 struct vec_swallow_trailing_semi
570
571 /* Vector of pointer to object. */
572 #define DEF_VEC_P(T) \
573 static inline void VEC_OP (T,must_be,pointer_type) (void) \
574 { \
575 (void)((T)1 == (void *)1); \
576 } \
577 \
578 VEC_T_GTY(T,base); \
579 VEC_TA(T,base,none); \
580 DEF_VEC_FUNC_P(T) \
581 struct vec_swallow_trailing_semi
582 #define DEF_VEC_ALLOC_P(T,A) \
583 VEC_TA(T,base,A); \
584 DEF_VEC_ALLOC_FUNC_P(T,A) \
585 DEF_VEC_NONALLOC_FUNCS_P(T,A) \
586 struct vec_swallow_trailing_semi
587
588 #define DEF_VEC_FUNC_P(T) \
589 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
590 { \
591 return vec_ ? vec_->prefix.num : 0; \
592 } \
593 \
594 static inline T VEC_OP (T,base,last) \
595 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \
596 { \
597 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
598 \
599 return vec_->vec[vec_->prefix.num - 1]; \
600 } \
601 \
602 static inline T VEC_OP (T,base,index) \
603 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
604 { \
605 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
606 \
607 return vec_->vec[ix_]; \
608 } \
609 \
610 static inline int VEC_OP (T,base,iterate) \
611 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \
612 { \
613 if (vec_ && ix_ < vec_->prefix.num) \
614 { \
615 *ptr = vec_->vec[ix_]; \
616 return 1; \
617 } \
618 else \
619 { \
620 *ptr = (T) 0; \
621 return 0; \
622 } \
623 } \
624 \
625 static inline size_t VEC_OP (T,base,embedded_size) \
626 (int alloc_) \
627 { \
628 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
629 } \
630 \
631 static inline void VEC_OP (T,base,embedded_init) \
632 (VEC(T,base) *vec_, int alloc_) \
633 { \
634 vec_->prefix.num = 0; \
635 vec_->prefix.alloc = alloc_; \
636 } \
637 \
638 static inline int VEC_OP (T,base,space) \
639 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
640 { \
641 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
642 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
643 } \
644 \
645 static inline void VEC_OP(T,base,splice) \
646 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
647 { \
648 if (src_) \
649 { \
650 unsigned len_ = src_->prefix.num; \
651 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
652 \
653 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
654 dst_->prefix.num += len_; \
655 } \
656 } \
657 \
658 static inline T *VEC_OP (T,base,quick_push) \
659 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \
660 { \
661 T *slot_; \
662 \
663 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
664 slot_ = &vec_->vec[vec_->prefix.num++]; \
665 *slot_ = obj_; \
666 \
667 return slot_; \
668 } \
669 \
670 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
671 { \
672 T obj_; \
673 \
674 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
675 obj_ = vec_->vec[--vec_->prefix.num]; \
676 \
677 return obj_; \
678 } \
679 \
680 static inline void VEC_OP (T,base,truncate) \
681 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
682 { \
683 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
684 if (vec_) \
685 vec_->prefix.num = size_; \
686 } \
687 \
688 static inline T VEC_OP (T,base,replace) \
689 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
690 { \
691 T old_obj_; \
692 \
693 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
694 old_obj_ = vec_->vec[ix_]; \
695 vec_->vec[ix_] = obj_; \
696 \
697 return old_obj_; \
698 } \
699 \
700 static inline T *VEC_OP (T,base,quick_insert) \
701 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
702 { \
703 T *slot_; \
704 \
705 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
706 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
707 slot_ = &vec_->vec[ix_]; \
708 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
709 *slot_ = obj_; \
710 \
711 return slot_; \
712 } \
713 \
714 static inline T VEC_OP (T,base,ordered_remove) \
715 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
716 { \
717 T *slot_; \
718 T obj_; \
719 \
720 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
721 slot_ = &vec_->vec[ix_]; \
722 obj_ = *slot_; \
723 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
724 \
725 return obj_; \
726 } \
727 \
728 static inline T VEC_OP (T,base,unordered_remove) \
729 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
730 { \
731 T *slot_; \
732 T obj_; \
733 \
734 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
735 slot_ = &vec_->vec[ix_]; \
736 obj_ = *slot_; \
737 *slot_ = vec_->vec[--vec_->prefix.num]; \
738 \
739 return obj_; \
740 } \
741 \
742 static inline void VEC_OP (T,base,block_remove) \
743 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
744 { \
745 T *slot_; \
746 \
747 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
748 slot_ = &vec_->vec[ix_]; \
749 vec_->prefix.num -= len_; \
750 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
751 } \
752 \
753 static inline T *VEC_OP (T,base,address) \
754 (VEC(T,base) *vec_) \
755 { \
756 return vec_ ? vec_->vec : 0; \
757 } \
758 \
759 static inline unsigned VEC_OP (T,base,lower_bound) \
760 (VEC(T,base) *vec_, const T obj_, \
761 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \
762 { \
763 unsigned int len_ = VEC_OP (T,base, length) (vec_); \
764 unsigned int half_, middle_; \
765 unsigned int first_ = 0; \
766 while (len_ > 0) \
767 { \
768 T middle_elem_; \
769 half_ = len_ >> 1; \
770 middle_ = first_; \
771 middle_ += half_; \
772 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
773 if (lessthan_ (middle_elem_, obj_)) \
774 { \
775 first_ = middle_; \
776 ++first_; \
777 len_ = len_ - half_ - 1; \
778 } \
779 else \
780 len_ = half_; \
781 } \
782 return first_; \
783 }
784
785 #define DEF_VEC_ALLOC_FUNC_P(T,A) \
786 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
787 (int alloc_ MEM_STAT_DECL) \
788 { \
789 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
790 PASS_MEM_STAT); \
791 }
792
793
794 #define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
795 static inline void VEC_OP (T,A,free) \
796 (VEC(T,A) **vec_) \
797 { \
798 if (*vec_) \
799 vec_##A##_free (*vec_); \
800 *vec_ = NULL; \
801 } \
802 \
803 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
804 { \
805 size_t len_ = vec_ ? vec_->prefix.num : 0; \
806 VEC (T,A) *new_vec_ = NULL; \
807 \
808 if (len_) \
809 { \
810 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \
811 (NULL, len_ PASS_MEM_STAT)); \
812 \
813 new_vec_->base.prefix.num = len_; \
814 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
815 } \
816 return new_vec_; \
817 } \
818 \
819 static inline int VEC_OP (T,A,reserve) \
820 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
821 { \
822 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
823 VEC_CHECK_PASS); \
824 \
825 if (extend) \
826 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
827 \
828 return extend; \
829 } \
830 \
831 static inline int VEC_OP (T,A,reserve_exact) \
832 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
833 { \
834 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
835 VEC_CHECK_PASS); \
836 \
837 if (extend) \
838 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \
839 PASS_MEM_STAT); \
840 \
841 return extend; \
842 } \
843 \
844 static inline void VEC_OP (T,A,safe_grow) \
845 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
846 { \
847 VEC_ASSERT (size_ >= 0 \
848 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
849 "grow", T, A); \
850 VEC_OP (T,A,reserve_exact) (vec_, \
851 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
852 VEC_CHECK_PASS PASS_MEM_STAT); \
853 VEC_BASE (*vec_)->prefix.num = size_; \
854 } \
855 \
856 static inline void VEC_OP (T,A,safe_grow_cleared) \
857 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
858 { \
859 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
860 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
861 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
862 sizeof (T) * (size_ - oldsize)); \
863 } \
864 \
865 static inline void VEC_OP(T,A,safe_splice) \
866 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
867 { \
868 if (src_) \
869 { \
870 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
871 VEC_CHECK_PASS MEM_STAT_INFO); \
872 \
873 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
874 VEC_CHECK_PASS); \
875 } \
876 } \
877 \
878 static inline T *VEC_OP (T,A,safe_push) \
879 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
880 { \
881 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
882 \
883 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
884 } \
885 \
886 static inline T *VEC_OP (T,A,safe_insert) \
887 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
888 { \
889 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
890 \
891 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
892 VEC_CHECK_PASS); \
893 }
894
895 /* Vector of object. */
896 #define DEF_VEC_O(T) \
897 VEC_T_GTY(T,base); \
898 VEC_TA(T,base,none); \
899 DEF_VEC_FUNC_O(T) \
900 struct vec_swallow_trailing_semi
901 #define DEF_VEC_ALLOC_O(T,A) \
902 VEC_TA(T,base,A); \
903 DEF_VEC_ALLOC_FUNC_O(T,A) \
904 DEF_VEC_NONALLOC_FUNCS_O(T,A) \
905 struct vec_swallow_trailing_semi
906
907 #define DEF_VEC_FUNC_O(T) \
908 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
909 { \
910 return vec_ ? vec_->prefix.num : 0; \
911 } \
912 \
913 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
914 { \
915 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
916 \
917 return &vec_->vec[vec_->prefix.num - 1]; \
918 } \
919 \
920 static inline T *VEC_OP (T,base,index) \
921 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
922 { \
923 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
924 \
925 return &vec_->vec[ix_]; \
926 } \
927 \
928 static inline int VEC_OP (T,base,iterate) \
929 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \
930 { \
931 if (vec_ && ix_ < vec_->prefix.num) \
932 { \
933 *ptr = &vec_->vec[ix_]; \
934 return 1; \
935 } \
936 else \
937 { \
938 *ptr = 0; \
939 return 0; \
940 } \
941 } \
942 \
943 static inline size_t VEC_OP (T,base,embedded_size) \
944 (int alloc_) \
945 { \
946 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
947 } \
948 \
949 static inline void VEC_OP (T,base,embedded_init) \
950 (VEC(T,base) *vec_, int alloc_) \
951 { \
952 vec_->prefix.num = 0; \
953 vec_->prefix.alloc = alloc_; \
954 } \
955 \
956 static inline int VEC_OP (T,base,space) \
957 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
958 { \
959 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
960 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
961 } \
962 \
963 static inline void VEC_OP(T,base,splice) \
964 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
965 { \
966 if (src_) \
967 { \
968 unsigned len_ = src_->prefix.num; \
969 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
970 \
971 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
972 dst_->prefix.num += len_; \
973 } \
974 } \
975 \
976 static inline T *VEC_OP (T,base,quick_push) \
977 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \
978 { \
979 T *slot_; \
980 \
981 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
982 slot_ = &vec_->vec[vec_->prefix.num++]; \
983 if (obj_) \
984 *slot_ = *obj_; \
985 \
986 return slot_; \
987 } \
988 \
989 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
990 { \
991 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
992 --vec_->prefix.num; \
993 } \
994 \
995 static inline void VEC_OP (T,base,truncate) \
996 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
997 { \
998 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
999 if (vec_) \
1000 vec_->prefix.num = size_; \
1001 } \
1002 \
1003 static inline T *VEC_OP (T,base,replace) \
1004 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1005 { \
1006 T *slot_; \
1007 \
1008 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
1009 slot_ = &vec_->vec[ix_]; \
1010 if (obj_) \
1011 *slot_ = *obj_; \
1012 \
1013 return slot_; \
1014 } \
1015 \
1016 static inline T *VEC_OP (T,base,quick_insert) \
1017 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1018 { \
1019 T *slot_; \
1020 \
1021 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
1022 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
1023 slot_ = &vec_->vec[ix_]; \
1024 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
1025 if (obj_) \
1026 *slot_ = *obj_; \
1027 \
1028 return slot_; \
1029 } \
1030 \
1031 static inline void VEC_OP (T,base,ordered_remove) \
1032 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1033 { \
1034 T *slot_; \
1035 \
1036 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1037 slot_ = &vec_->vec[ix_]; \
1038 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
1039 } \
1040 \
1041 static inline void VEC_OP (T,base,unordered_remove) \
1042 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1043 { \
1044 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1045 vec_->vec[ix_] = vec_->vec[--vec_->prefix.num]; \
1046 } \
1047 \
1048 static inline void VEC_OP (T,base,block_remove) \
1049 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
1050 { \
1051 T *slot_; \
1052 \
1053 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
1054 slot_ = &vec_->vec[ix_]; \
1055 vec_->prefix.num -= len_; \
1056 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
1057 } \
1058 \
1059 static inline T *VEC_OP (T,base,address) \
1060 (VEC(T,base) *vec_) \
1061 { \
1062 return vec_ ? vec_->vec : 0; \
1063 } \
1064 \
1065 static inline unsigned VEC_OP (T,base,lower_bound) \
1066 (VEC(T,base) *vec_, const T *obj_, \
1067 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \
1068 { \
1069 unsigned int len_ = VEC_OP (T, base, length) (vec_); \
1070 unsigned int half_, middle_; \
1071 unsigned int first_ = 0; \
1072 while (len_ > 0) \
1073 { \
1074 T *middle_elem_; \
1075 half_ = len_ >> 1; \
1076 middle_ = first_; \
1077 middle_ += half_; \
1078 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
1079 if (lessthan_ (middle_elem_, obj_)) \
1080 { \
1081 first_ = middle_; \
1082 ++first_; \
1083 len_ = len_ - half_ - 1; \
1084 } \
1085 else \
1086 len_ = half_; \
1087 } \
1088 return first_; \
1089 }
1090
1091 #define DEF_VEC_ALLOC_FUNC_O(T,A) \
1092 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1093 (int alloc_ MEM_STAT_DECL) \
1094 { \
1095 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \
1096 offsetof (VEC(T,A),base.vec), \
1097 sizeof (T) \
1098 PASS_MEM_STAT); \
1099 }
1100
1101 #define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
1102 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1103 { \
1104 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1105 VEC (T,A) *new_vec_ = NULL; \
1106 \
1107 if (len_) \
1108 { \
1109 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1110 (NULL, len_, \
1111 offsetof (VEC(T,A),base.vec), sizeof (T) \
1112 PASS_MEM_STAT)); \
1113 \
1114 new_vec_->base.prefix.num = len_; \
1115 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1116 } \
1117 return new_vec_; \
1118 } \
1119 \
1120 static inline void VEC_OP (T,A,free) \
1121 (VEC(T,A) **vec_) \
1122 { \
1123 if (*vec_) \
1124 vec_##A##_free (*vec_); \
1125 *vec_ = NULL; \
1126 } \
1127 \
1128 static inline int VEC_OP (T,A,reserve) \
1129 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1130 { \
1131 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1132 VEC_CHECK_PASS); \
1133 \
1134 if (extend) \
1135 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1136 offsetof (VEC(T,A),base.vec),\
1137 sizeof (T) \
1138 PASS_MEM_STAT); \
1139 \
1140 return extend; \
1141 } \
1142 \
1143 static inline int VEC_OP (T,A,reserve_exact) \
1144 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1145 { \
1146 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1147 VEC_CHECK_PASS); \
1148 \
1149 if (extend) \
1150 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1151 (*vec_, alloc_, \
1152 offsetof (VEC(T,A),base.vec), \
1153 sizeof (T) PASS_MEM_STAT); \
1154 \
1155 return extend; \
1156 } \
1157 \
1158 static inline void VEC_OP (T,A,safe_grow) \
1159 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1160 { \
1161 VEC_ASSERT (size_ >= 0 \
1162 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1163 "grow", T, A); \
1164 VEC_OP (T,A,reserve_exact) (vec_, \
1165 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1166 VEC_CHECK_PASS PASS_MEM_STAT); \
1167 VEC_BASE (*vec_)->prefix.num = size_; \
1168 } \
1169 \
1170 static inline void VEC_OP (T,A,safe_grow_cleared) \
1171 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1172 { \
1173 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1174 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1175 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1176 sizeof (T) * (size_ - oldsize)); \
1177 } \
1178 \
1179 static inline void VEC_OP(T,A,safe_splice) \
1180 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1181 { \
1182 if (src_) \
1183 { \
1184 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1185 VEC_CHECK_PASS MEM_STAT_INFO); \
1186 \
1187 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1188 VEC_CHECK_PASS); \
1189 } \
1190 } \
1191 \
1192 static inline T *VEC_OP (T,A,safe_push) \
1193 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1194 { \
1195 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1196 \
1197 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1198 } \
1199 \
1200 static inline T *VEC_OP (T,A,safe_insert) \
1201 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \
1202 VEC_CHECK_DECL MEM_STAT_DECL) \
1203 { \
1204 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1205 \
1206 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1207 VEC_CHECK_PASS); \
1208 }
1209
1210 #define DEF_VEC_ALLOC_FUNC_I(T,A) \
1211 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1212 (int alloc_ MEM_STAT_DECL) \
1213 { \
1214 return (VEC(T,A) *) vec_##A##_o_reserve_exact \
1215 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \
1216 sizeof (T) PASS_MEM_STAT); \
1217 }
1218
1219 #define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
1220 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1221 { \
1222 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1223 VEC (T,A) *new_vec_ = NULL; \
1224 \
1225 if (len_) \
1226 { \
1227 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1228 (NULL, len_, \
1229 offsetof (VEC(T,A),base.vec), sizeof (T) \
1230 PASS_MEM_STAT)); \
1231 \
1232 new_vec_->base.prefix.num = len_; \
1233 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1234 } \
1235 return new_vec_; \
1236 } \
1237 \
1238 static inline void VEC_OP (T,A,free) \
1239 (VEC(T,A) **vec_) \
1240 { \
1241 if (*vec_) \
1242 vec_##A##_free (*vec_); \
1243 *vec_ = NULL; \
1244 } \
1245 \
1246 static inline int VEC_OP (T,A,reserve) \
1247 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1248 { \
1249 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1250 VEC_CHECK_PASS); \
1251 \
1252 if (extend) \
1253 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1254 offsetof (VEC(T,A),base.vec),\
1255 sizeof (T) \
1256 PASS_MEM_STAT); \
1257 \
1258 return extend; \
1259 } \
1260 \
1261 static inline int VEC_OP (T,A,reserve_exact) \
1262 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1263 { \
1264 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1265 VEC_CHECK_PASS); \
1266 \
1267 if (extend) \
1268 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1269 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \
1270 sizeof (T) PASS_MEM_STAT); \
1271 \
1272 return extend; \
1273 } \
1274 \
1275 static inline void VEC_OP (T,A,safe_grow) \
1276 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1277 { \
1278 VEC_ASSERT (size_ >= 0 \
1279 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1280 "grow", T, A); \
1281 VEC_OP (T,A,reserve_exact) (vec_, \
1282 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1283 VEC_CHECK_PASS PASS_MEM_STAT); \
1284 VEC_BASE (*vec_)->prefix.num = size_; \
1285 } \
1286 \
1287 static inline void VEC_OP (T,A,safe_grow_cleared) \
1288 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1289 { \
1290 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1291 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1292 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1293 sizeof (T) * (size_ - oldsize)); \
1294 } \
1295 \
1296 static inline void VEC_OP(T,A,safe_splice) \
1297 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1298 { \
1299 if (src_) \
1300 { \
1301 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1302 VEC_CHECK_PASS MEM_STAT_INFO); \
1303 \
1304 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1305 VEC_CHECK_PASS); \
1306 } \
1307 } \
1308 \
1309 static inline T *VEC_OP (T,A,safe_push) \
1310 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1311 { \
1312 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1313 \
1314 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1315 } \
1316 \
1317 static inline T *VEC_OP (T,A,safe_insert) \
1318 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \
1319 VEC_CHECK_DECL MEM_STAT_DECL) \
1320 { \
1321 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1322 \
1323 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1324 VEC_CHECK_PASS); \
1325 }
1326
1327 /* We support a vector which starts out with space on the stack and
1328 switches to heap space when forced to reallocate. This works a
1329 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1330 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
1331 space; because alloca can not be usefully called in an inline
1332 function, and because a macro can not define a macro, you must then
1333 write a #define for each type:
1334
1335 #define VEC_{TYPE}_stack_alloc(alloc) \
1336 VEC_stack_alloc({TYPE}, alloc)
1337
1338 This is really a hack and perhaps can be made better. Note that
1339 this macro will wind up evaluating the ALLOC parameter twice.
1340
1341 Only the initial allocation will be made using alloca, so pass a
1342 reasonable estimate that doesn't use too much stack space; don't
1343 pass zero. Don't return a VEC(TYPE,stack) vector from the function
1344 which allocated it. */
1345
1346 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
1347 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
1348 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1349 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
1350 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1351 MEM_STAT_DECL);
1352 extern void vec_stack_free (void *);
1353
1354 /* Unfortunately, we cannot use MEM_STAT_DECL here. */
1355 #if GATHER_STATISTICS
1356 #define VEC_stack_alloc(T,alloc,name,line,function) \
1357 (VEC_OP (T,stack,alloc1) \
1358 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1359 #else
1360 #define VEC_stack_alloc(T,alloc) \
1361 (VEC_OP (T,stack,alloc1) \
1362 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1363 #endif
1364
1365 #define DEF_VEC_ALLOC_P_STACK(T) \
1366 VEC_TA(T,base,stack); \
1367 DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1368 DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
1369 struct vec_swallow_trailing_semi
1370
1371 #define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1372 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1373 (int alloc_, VEC(T,stack)* space) \
1374 { \
1375 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1376 }
1377
1378 #define DEF_VEC_ALLOC_O_STACK(T) \
1379 VEC_TA(T,base,stack); \
1380 DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1381 DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
1382 struct vec_swallow_trailing_semi
1383
1384 #define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1385 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1386 (int alloc_, VEC(T,stack)* space) \
1387 { \
1388 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1389 }
1390
1391 #define DEF_VEC_ALLOC_I_STACK(T) \
1392 VEC_TA(T,base,stack); \
1393 DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1394 DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
1395 struct vec_swallow_trailing_semi
1396
1397 #define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1398 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1399 (int alloc_, VEC(T,stack)* space) \
1400 { \
1401 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1402 }
1403
1404 #endif /* GCC_VEC_H */