rs6000.h (FUNCTION_VALUE): Remove macro.
[gcc.git] / gcc / vec.h
1 /* Vector API for GNU compiler.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_VEC_H
22 #define GCC_VEC_H
23
24 /* The macros here implement a set of templated vector types and
25 associated interfaces. These templates are implemented with
26 macros, as we're not in C++ land. The interface functions are
27 typesafe and use static inline functions, sometimes backed by
28 out-of-line generic functions. The vectors are designed to
29 interoperate with the GTY machinery.
30
31 Because of the different behavior of structure objects, scalar
32 objects and of pointers, there are three flavors, one for each of
33 these variants. Both the structure object and pointer variants
34 pass pointers to objects around -- in the former case the pointers
35 are stored into the vector and in the latter case the pointers are
36 dereferenced and the objects copied into the vector. The scalar
37 object variant is suitable for int-like objects, and the vector
38 elements are returned by value.
39
40 There are both 'index' and 'iterate' accessors. The iterator
41 returns a boolean iteration condition and updates the iteration
42 variable passed by reference. Because the iterator will be
43 inlined, the address-of can be optimized away.
44
45 The vectors are implemented using the trailing array idiom, thus
46 they are not resizeable without changing the address of the vector
47 object itself. This means you cannot have variables or fields of
48 vector type -- always use a pointer to a vector. The one exception
49 is the final field of a structure, which could be a vector type.
50 You will have to use the embedded_size & embedded_init calls to
51 create such objects, and they will probably not be resizeable (so
52 don't use the 'safe' allocation variants). The trailing array
53 idiom is used (rather than a pointer to an array of data), because,
54 if we allow NULL to also represent an empty vector, empty vectors
55 occupy minimal space in the structure containing them.
56
57 Each operation that increases the number of active elements is
58 available in 'quick' and 'safe' variants. The former presumes that
59 there is sufficient allocated space for the operation to succeed
60 (it dies if there is not). The latter will reallocate the
61 vector, if needed. Reallocation causes an exponential increase in
62 vector size. If you know you will be adding N elements, it would
63 be more efficient to use the reserve operation before adding the
64 elements with the 'quick' operation. This will ensure there are at
65 least as many elements as you ask for, it will exponentially
66 increase if there are too few spare slots. If you want reserve a
67 specific number of slots, but do not want the exponential increase
68 (for instance, you know this is the last allocation), use the
69 reserve_exact operation. You can also create a vector of a
70 specific size from the get go.
71
72 You should prefer the push and pop operations, as they append and
73 remove from the end of the vector. If you need to remove several
74 items in one go, use the truncate operation. The insert and remove
75 operations allow you to change elements in the middle of the
76 vector. There are two remove operations, one which preserves the
77 element ordering 'ordered_remove', and one which does not
78 'unordered_remove'. The latter function copies the end element
79 into the removed slot, rather than invoke a memmove operation. The
80 'lower_bound' function will determine where to place an item in the
81 array using insert that will maintain sorted order.
82
83 When a vector type is defined, first a non-memory managed version
84 is created. You can then define either or both garbage collected
85 and heap allocated versions. The allocation mechanism is specified
86 when the type is defined, and is therefore part of the type. If
87 you need both gc'd and heap allocated versions, you still must have
88 *exactly* one definition of the common non-memory managed base vector.
89
90 If you need to directly manipulate a vector, then the 'address'
91 accessor will return the address of the start of the vector. Also
92 the 'space' predicate will tell you whether there is spare capacity
93 in the vector. You will not normally need to use these two functions.
94
95 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
96 get the non-memory allocation version, and then a
97 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
98 vectors. Variables of vector type are declared using a
99 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the
100 allocation strategy, and can be either 'gc' or 'heap' for garbage
101 collected and heap allocated respectively. It can be 'none' to get
102 a vector that must be explicitly allocated (for instance as a
103 trailing array of another structure). The characters O, P and I
104 indicate whether TYPEDEF is a pointer (P), object (O) or integral
105 (I) type. Be careful to pick the correct one, as you'll get an
106 awkward and inefficient API if you use the wrong one. There is a
107 check, which results in a compile-time warning, for the P and I
108 versions, but there is no check for the O versions, as that is not
109 possible in plain C. Due to the way GTY works, you must annotate
110 any structures you wish to insert or reference from a vector with a
111 GTY(()) tag. You need to do this even if you never declare the GC
112 allocated variants.
113
114 An example of their use would be,
115
116 DEF_VEC_P(tree); // non-managed tree vector.
117 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must
118 // appear at file scope.
119
120 struct my_struct {
121 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers.
122 };
123
124 struct my_struct *s;
125
126 if (VEC_length(tree,s->v)) { we have some contents }
127 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
128 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
129 { do something with elt }
130
131 */
132
133 /* Macros to invoke API calls. A single macro works for both pointer
134 and object vectors, but the argument and return types might well be
135 different. In each macro, T is the typedef of the vector elements,
136 and A is the allocation strategy. The allocation strategy is only
137 present when it is required. Some of these macros pass the vector,
138 V, by reference (by taking its address), this is noted in the
139 descriptions. */
140
141 /* Length of vector
142 unsigned VEC_T_length(const VEC(T) *v);
143
144 Return the number of active elements in V. V can be NULL, in which
145 case zero is returned. */
146
147 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
148
149
150 /* Check if vector is empty
151 int VEC_T_empty(const VEC(T) *v);
152
153 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
154
155 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
156
157
158 /* Get the final element of the vector.
159 T VEC_T_last(VEC(T) *v); // Integer
160 T VEC_T_last(VEC(T) *v); // Pointer
161 T *VEC_T_last(VEC(T) *v); // Object
162
163 Return the final element. V must not be empty. */
164
165 #define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
166
167 /* Index into vector
168 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
169 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
170 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
171
172 Return the IX'th element. If IX must be in the domain of V. */
173
174 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
175
176 /* Iterate over vector
177 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
178 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
179 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
180
181 Return iteration condition and update PTR to point to the IX'th
182 element. At the end of iteration, sets PTR to NULL. Use this to
183 iterate over the elements of a vector as follows,
184
185 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
186 continue; */
187
188 #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
189
190 /* Allocate new vector.
191 VEC(T,A) *VEC_T_A_alloc(int reserve);
192
193 Allocate a new vector with space for RESERVE objects. If RESERVE
194 is zero, NO vector is created. */
195
196 #define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
197
198 /* Free a vector.
199 void VEC_T_A_free(VEC(T,A) *&);
200
201 Free a vector and set it to NULL. */
202
203 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
204
205 /* Use these to determine the required size and initialization of a
206 vector embedded within another structure (as the final member).
207
208 size_t VEC_T_embedded_size(int reserve);
209 void VEC_T_embedded_init(VEC(T) *v, int reserve);
210
211 These allow the caller to perform the memory allocation. */
212
213 #define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N))
214 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
215
216 /* Copy a vector.
217 VEC(T,A) *VEC_T_A_copy(VEC(T) *);
218
219 Copy the live elements of a vector into a new vector. The new and
220 old vectors need not be allocated by the same mechanism. */
221
222 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
223
224 /* Determine if a vector has additional capacity.
225
226 int VEC_T_space (VEC(T) *v,int reserve)
227
228 If V has space for RESERVE additional entries, return nonzero. You
229 usually only need to use this if you are doing your own vector
230 reallocation, for instance on an embedded vector. This returns
231 nonzero in exactly the same circumstances that VEC_T_reserve
232 will. */
233
234 #define VEC_space(T,V,R) \
235 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
236
237 /* Reserve space.
238 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
239
240 Ensure that V has at least RESERVE slots available. This will
241 create additional headroom. Note this can cause V to be
242 reallocated. Returns nonzero iff reallocation actually
243 occurred. */
244
245 #define VEC_reserve(T,A,V,R) \
246 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
247
248 /* Reserve space exactly.
249 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
250
251 Ensure that V has at least RESERVE slots available. This will not
252 create additional headroom. Note this can cause V to be
253 reallocated. Returns nonzero iff reallocation actually
254 occurred. */
255
256 #define VEC_reserve_exact(T,A,V,R) \
257 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
258
259 /* Push object with no reallocation
260 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
261 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
262 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
263
264 Push a new element onto the end, returns a pointer to the slot
265 filled in. For object vectors, the new value can be NULL, in which
266 case NO initialization is performed. There must
267 be sufficient space in the vector. */
268
269 #define VEC_quick_push(T,V,O) \
270 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
271
272 /* Push object with reallocation
273 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
274 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
275 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
276
277 Push a new element onto the end, returns a pointer to the slot
278 filled in. For object vectors, the new value can be NULL, in which
279 case NO initialization is performed. Reallocates V, if needed. */
280
281 #define VEC_safe_push(T,A,V,O) \
282 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
283
284 /* Pop element off end
285 T VEC_T_pop (VEC(T) *v); // Integer
286 T VEC_T_pop (VEC(T) *v); // Pointer
287 void VEC_T_pop (VEC(T) *v); // Object
288
289 Pop the last element off the end. Returns the element popped, for
290 pointer vectors. */
291
292 #define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
293
294 /* Truncate to specific length
295 void VEC_T_truncate (VEC(T) *v, unsigned len);
296
297 Set the length as specified. The new length must be less than or
298 equal to the current length. This is an O(1) operation. */
299
300 #define VEC_truncate(T,V,I) \
301 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
302
303 /* Grow to a specific length.
304 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
305
306 Grow the vector to a specific length. The LEN must be as
307 long or longer than the current length. The new elements are
308 uninitialized. */
309
310 #define VEC_safe_grow(T,A,V,I) \
311 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
312
313 /* Grow to a specific length.
314 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
315
316 Grow the vector to a specific length. The LEN must be as
317 long or longer than the current length. The new elements are
318 initialized to zero. */
319
320 #define VEC_safe_grow_cleared(T,A,V,I) \
321 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
322
323 /* Replace element
324 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
325 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
326 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
327
328 Replace the IXth element of V with a new value, VAL. For pointer
329 vectors returns the original value. For object vectors returns a
330 pointer to the new value. For object vectors the new value can be
331 NULL, in which case no overwriting of the slot is actually
332 performed. */
333
334 #define VEC_replace(T,V,I,O) \
335 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
336
337 /* Insert object with no reallocation
338 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
339 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
340 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
341
342 Insert an element, VAL, at the IXth position of V. Return a pointer
343 to the slot created. For vectors of object, the new value can be
344 NULL, in which case no initialization of the inserted slot takes
345 place. There must be sufficient space. */
346
347 #define VEC_quick_insert(T,V,I,O) \
348 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
349
350 /* Insert object with reallocation
351 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
352 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
353 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
354
355 Insert an element, VAL, at the IXth position of V. Return a pointer
356 to the slot created. For vectors of object, the new value can be
357 NULL, in which case no initialization of the inserted slot takes
358 place. Reallocate V, if necessary. */
359
360 #define VEC_safe_insert(T,A,V,I,O) \
361 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
362
363 /* Remove element retaining order
364 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
365 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
366 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
367
368 Remove an element from the IXth position of V. Ordering of
369 remaining elements is preserved. For pointer vectors returns the
370 removed object. This is an O(N) operation due to a memmove. */
371
372 #define VEC_ordered_remove(T,V,I) \
373 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
374
375 /* Remove element destroying order
376 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
377 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
378 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
379
380 Remove an element from the IXth position of V. Ordering of
381 remaining elements is destroyed. For pointer vectors returns the
382 removed object. This is an O(1) operation. */
383
384 #define VEC_unordered_remove(T,V,I) \
385 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
386
387 /* Remove a block of elements
388 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
389
390 Remove LEN elements starting at the IXth. Ordering is retained.
391 This is an O(1) operation. */
392
393 #define VEC_block_remove(T,V,I,L) \
394 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
395
396 /* Get the address of the array of elements
397 T *VEC_T_address (VEC(T) v)
398
399 If you need to directly manipulate the array (for instance, you
400 want to feed it to qsort), use this accessor. */
401
402 #define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V)))
403
404 /* Find the first index in the vector not less than the object.
405 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
406 bool (*lessthan) (const T, const T)); // Integer
407 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
408 bool (*lessthan) (const T, const T)); // Pointer
409 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
410 bool (*lessthan) (const T*, const T*)); // Object
411
412 Find the first position in which VAL could be inserted without
413 changing the ordering of V. LESSTHAN is a function that returns
414 true if the first argument is strictly less than the second. */
415
416 #define VEC_lower_bound(T,V,O,LT) \
417 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
418
419 /* Reallocate an array of elements with prefix. */
420 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
421 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL);
422 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
423 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
424 MEM_STAT_DECL);
425 extern void ggc_free (void *);
426 #define vec_gc_free(V) ggc_free (V)
427 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
428 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL);
429 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
430 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
431 MEM_STAT_DECL);
432 extern void dump_vec_loc_statistics (void);
433 #ifdef GATHER_STATISTICS
434 void vec_heap_free (void *);
435 #else
436 #define vec_heap_free(V) free (V)
437 #endif
438
439 #if ENABLE_CHECKING
440 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
441 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
442 #define VEC_CHECK_PASS ,file_,line_,function_
443
444 #define VEC_ASSERT(EXPR,OP,T,A) \
445 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
446
447 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
448 ATTRIBUTE_NORETURN;
449 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
450 #else
451 #define VEC_CHECK_INFO
452 #define VEC_CHECK_DECL
453 #define VEC_CHECK_PASS
454 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
455 #endif
456
457 /* Note: gengtype has hardwired knowledge of the expansions of the
458 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the
459 expansions of these macros you may need to change gengtype too. */
460
461 #define VEC(T,A) VEC_##T##_##A
462 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
463
464 /* Base of vector type, not user visible. */
465 #define VEC_T(T,B) \
466 typedef struct VEC(T,B) \
467 { \
468 unsigned num; \
469 unsigned alloc; \
470 T vec[1]; \
471 } VEC(T,B)
472
473 #define VEC_T_GTY(T,B) \
474 typedef struct GTY(()) VEC(T,B) \
475 { \
476 unsigned num; \
477 unsigned alloc; \
478 T GTY ((length ("%h.num"))) vec[1]; \
479 } VEC(T,B)
480
481 /* Derived vector type, user visible. */
482 #define VEC_TA_GTY(T,B,A,GTY) \
483 typedef struct GTY VEC(T,A) \
484 { \
485 VEC(T,B) base; \
486 } VEC(T,A)
487
488 #define VEC_TA(T,B,A) \
489 typedef struct VEC(T,A) \
490 { \
491 VEC(T,B) base; \
492 } VEC(T,A)
493
494 /* Convert to base type. */
495 #define VEC_BASE(P) ((P) ? &(P)->base : 0)
496
497 /* Vector of integer-like object. */
498 #define DEF_VEC_I(T) \
499 static inline void VEC_OP (T,must_be,integral_type) (void) \
500 { \
501 (void)~(T)0; \
502 } \
503 \
504 VEC_T(T,base); \
505 VEC_TA(T,base,none); \
506 DEF_VEC_FUNC_P(T) \
507 struct vec_swallow_trailing_semi
508 #define DEF_VEC_ALLOC_I(T,A) \
509 VEC_TA(T,base,A); \
510 DEF_VEC_ALLOC_FUNC_I(T,A) \
511 DEF_VEC_NONALLOC_FUNCS_I(T,A) \
512 struct vec_swallow_trailing_semi
513
514 /* Vector of pointer to object. */
515 #define DEF_VEC_P(T) \
516 static inline void VEC_OP (T,must_be,pointer_type) (void) \
517 { \
518 (void)((T)1 == (void *)1); \
519 } \
520 \
521 VEC_T_GTY(T,base); \
522 VEC_TA(T,base,none); \
523 DEF_VEC_FUNC_P(T) \
524 struct vec_swallow_trailing_semi
525 #define DEF_VEC_ALLOC_P(T,A) \
526 VEC_TA(T,base,A); \
527 DEF_VEC_ALLOC_FUNC_P(T,A) \
528 DEF_VEC_NONALLOC_FUNCS_P(T,A) \
529 struct vec_swallow_trailing_semi
530
531 #define DEF_VEC_FUNC_P(T) \
532 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
533 { \
534 return vec_ ? vec_->num : 0; \
535 } \
536 \
537 static inline T VEC_OP (T,base,last) \
538 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \
539 { \
540 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \
541 \
542 return vec_->vec[vec_->num - 1]; \
543 } \
544 \
545 static inline T VEC_OP (T,base,index) \
546 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
547 { \
548 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \
549 \
550 return vec_->vec[ix_]; \
551 } \
552 \
553 static inline int VEC_OP (T,base,iterate) \
554 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \
555 { \
556 if (vec_ && ix_ < vec_->num) \
557 { \
558 *ptr = vec_->vec[ix_]; \
559 return 1; \
560 } \
561 else \
562 { \
563 *ptr = (T) 0; \
564 return 0; \
565 } \
566 } \
567 \
568 static inline size_t VEC_OP (T,base,embedded_size) \
569 (int alloc_) \
570 { \
571 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
572 } \
573 \
574 static inline void VEC_OP (T,base,embedded_init) \
575 (VEC(T,base) *vec_, int alloc_) \
576 { \
577 vec_->num = 0; \
578 vec_->alloc = alloc_; \
579 } \
580 \
581 static inline int VEC_OP (T,base,space) \
582 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
583 { \
584 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
585 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
586 } \
587 \
588 static inline T *VEC_OP (T,base,quick_push) \
589 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \
590 { \
591 T *slot_; \
592 \
593 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \
594 slot_ = &vec_->vec[vec_->num++]; \
595 *slot_ = obj_; \
596 \
597 return slot_; \
598 } \
599 \
600 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
601 { \
602 T obj_; \
603 \
604 VEC_ASSERT (vec_->num, "pop", T, base); \
605 obj_ = vec_->vec[--vec_->num]; \
606 \
607 return obj_; \
608 } \
609 \
610 static inline void VEC_OP (T,base,truncate) \
611 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
612 { \
613 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \
614 if (vec_) \
615 vec_->num = size_; \
616 } \
617 \
618 static inline T VEC_OP (T,base,replace) \
619 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
620 { \
621 T old_obj_; \
622 \
623 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \
624 old_obj_ = vec_->vec[ix_]; \
625 vec_->vec[ix_] = obj_; \
626 \
627 return old_obj_; \
628 } \
629 \
630 static inline T *VEC_OP (T,base,quick_insert) \
631 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
632 { \
633 T *slot_; \
634 \
635 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \
636 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \
637 slot_ = &vec_->vec[ix_]; \
638 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
639 *slot_ = obj_; \
640 \
641 return slot_; \
642 } \
643 \
644 static inline T VEC_OP (T,base,ordered_remove) \
645 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
646 { \
647 T *slot_; \
648 T obj_; \
649 \
650 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
651 slot_ = &vec_->vec[ix_]; \
652 obj_ = *slot_; \
653 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
654 \
655 return obj_; \
656 } \
657 \
658 static inline T VEC_OP (T,base,unordered_remove) \
659 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
660 { \
661 T *slot_; \
662 T obj_; \
663 \
664 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
665 slot_ = &vec_->vec[ix_]; \
666 obj_ = *slot_; \
667 *slot_ = vec_->vec[--vec_->num]; \
668 \
669 return obj_; \
670 } \
671 \
672 static inline void VEC_OP (T,base,block_remove) \
673 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
674 { \
675 T *slot_; \
676 \
677 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \
678 slot_ = &vec_->vec[ix_]; \
679 vec_->num -= len_; \
680 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
681 } \
682 \
683 static inline T *VEC_OP (T,base,address) \
684 (VEC(T,base) *vec_) \
685 { \
686 return vec_ ? vec_->vec : 0; \
687 } \
688 \
689 static inline unsigned VEC_OP (T,base,lower_bound) \
690 (VEC(T,base) *vec_, const T obj_, \
691 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \
692 { \
693 unsigned int len_ = VEC_OP (T,base, length) (vec_); \
694 unsigned int half_, middle_; \
695 unsigned int first_ = 0; \
696 while (len_ > 0) \
697 { \
698 T middle_elem_; \
699 half_ = len_ >> 1; \
700 middle_ = first_; \
701 middle_ += half_; \
702 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
703 if (lessthan_ (middle_elem_, obj_)) \
704 { \
705 first_ = middle_; \
706 ++first_; \
707 len_ = len_ - half_ - 1; \
708 } \
709 else \
710 len_ = half_; \
711 } \
712 return first_; \
713 }
714
715 #define DEF_VEC_ALLOC_FUNC_P(T,A) \
716 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
717 (int alloc_ MEM_STAT_DECL) \
718 { \
719 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
720 PASS_MEM_STAT); \
721 }
722
723
724 #define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
725 static inline void VEC_OP (T,A,free) \
726 (VEC(T,A) **vec_) \
727 { \
728 if (*vec_) \
729 vec_##A##_free (*vec_); \
730 *vec_ = NULL; \
731 } \
732 \
733 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
734 { \
735 size_t len_ = vec_ ? vec_->num : 0; \
736 VEC (T,A) *new_vec_ = NULL; \
737 \
738 if (len_) \
739 { \
740 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \
741 (NULL, len_ PASS_MEM_STAT)); \
742 \
743 new_vec_->base.num = len_; \
744 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
745 } \
746 return new_vec_; \
747 } \
748 \
749 static inline int VEC_OP (T,A,reserve) \
750 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
751 { \
752 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
753 VEC_CHECK_PASS); \
754 \
755 if (extend) \
756 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
757 \
758 return extend; \
759 } \
760 \
761 static inline int VEC_OP (T,A,reserve_exact) \
762 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
763 { \
764 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
765 VEC_CHECK_PASS); \
766 \
767 if (extend) \
768 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \
769 PASS_MEM_STAT); \
770 \
771 return extend; \
772 } \
773 \
774 static inline void VEC_OP (T,A,safe_grow) \
775 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
776 { \
777 VEC_ASSERT (size_ >= 0 \
778 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
779 "grow", T, A); \
780 VEC_OP (T,A,reserve_exact) (vec_, \
781 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
782 VEC_CHECK_PASS PASS_MEM_STAT); \
783 VEC_BASE (*vec_)->num = size_; \
784 } \
785 \
786 static inline void VEC_OP (T,A,safe_grow_cleared) \
787 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
788 { \
789 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
790 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
791 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
792 sizeof (T) * (size_ - oldsize)); \
793 } \
794 \
795 static inline T *VEC_OP (T,A,safe_push) \
796 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
797 { \
798 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
799 \
800 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
801 } \
802 \
803 static inline T *VEC_OP (T,A,safe_insert) \
804 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
805 { \
806 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
807 \
808 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
809 VEC_CHECK_PASS); \
810 }
811
812 /* Vector of object. */
813 #define DEF_VEC_O(T) \
814 VEC_T_GTY(T,base); \
815 VEC_TA(T,base,none); \
816 DEF_VEC_FUNC_O(T) \
817 struct vec_swallow_trailing_semi
818 #define DEF_VEC_ALLOC_O(T,A) \
819 VEC_TA(T,base,A); \
820 DEF_VEC_ALLOC_FUNC_O(T,A) \
821 DEF_VEC_NONALLOC_FUNCS_O(T,A) \
822 struct vec_swallow_trailing_semi
823
824 #define DEF_VEC_FUNC_O(T) \
825 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
826 { \
827 return vec_ ? vec_->num : 0; \
828 } \
829 \
830 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
831 { \
832 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \
833 \
834 return &vec_->vec[vec_->num - 1]; \
835 } \
836 \
837 static inline T *VEC_OP (T,base,index) \
838 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
839 { \
840 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \
841 \
842 return &vec_->vec[ix_]; \
843 } \
844 \
845 static inline int VEC_OP (T,base,iterate) \
846 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \
847 { \
848 if (vec_ && ix_ < vec_->num) \
849 { \
850 *ptr = &vec_->vec[ix_]; \
851 return 1; \
852 } \
853 else \
854 { \
855 *ptr = 0; \
856 return 0; \
857 } \
858 } \
859 \
860 static inline size_t VEC_OP (T,base,embedded_size) \
861 (int alloc_) \
862 { \
863 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
864 } \
865 \
866 static inline void VEC_OP (T,base,embedded_init) \
867 (VEC(T,base) *vec_, int alloc_) \
868 { \
869 vec_->num = 0; \
870 vec_->alloc = alloc_; \
871 } \
872 \
873 static inline int VEC_OP (T,base,space) \
874 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
875 { \
876 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
877 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
878 } \
879 \
880 static inline T *VEC_OP (T,base,quick_push) \
881 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \
882 { \
883 T *slot_; \
884 \
885 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \
886 slot_ = &vec_->vec[vec_->num++]; \
887 if (obj_) \
888 *slot_ = *obj_; \
889 \
890 return slot_; \
891 } \
892 \
893 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
894 { \
895 VEC_ASSERT (vec_->num, "pop", T, base); \
896 --vec_->num; \
897 } \
898 \
899 static inline void VEC_OP (T,base,truncate) \
900 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
901 { \
902 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \
903 if (vec_) \
904 vec_->num = size_; \
905 } \
906 \
907 static inline T *VEC_OP (T,base,replace) \
908 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
909 { \
910 T *slot_; \
911 \
912 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \
913 slot_ = &vec_->vec[ix_]; \
914 if (obj_) \
915 *slot_ = *obj_; \
916 \
917 return slot_; \
918 } \
919 \
920 static inline T *VEC_OP (T,base,quick_insert) \
921 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
922 { \
923 T *slot_; \
924 \
925 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \
926 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \
927 slot_ = &vec_->vec[ix_]; \
928 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
929 if (obj_) \
930 *slot_ = *obj_; \
931 \
932 return slot_; \
933 } \
934 \
935 static inline void VEC_OP (T,base,ordered_remove) \
936 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
937 { \
938 T *slot_; \
939 \
940 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
941 slot_ = &vec_->vec[ix_]; \
942 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
943 } \
944 \
945 static inline void VEC_OP (T,base,unordered_remove) \
946 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
947 { \
948 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
949 vec_->vec[ix_] = vec_->vec[--vec_->num]; \
950 } \
951 \
952 static inline void VEC_OP (T,base,block_remove) \
953 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
954 { \
955 T *slot_; \
956 \
957 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \
958 slot_ = &vec_->vec[ix_]; \
959 vec_->num -= len_; \
960 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
961 } \
962 \
963 static inline T *VEC_OP (T,base,address) \
964 (VEC(T,base) *vec_) \
965 { \
966 return vec_ ? vec_->vec : 0; \
967 } \
968 \
969 static inline unsigned VEC_OP (T,base,lower_bound) \
970 (VEC(T,base) *vec_, const T *obj_, \
971 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \
972 { \
973 unsigned int len_ = VEC_OP (T, base, length) (vec_); \
974 unsigned int half_, middle_; \
975 unsigned int first_ = 0; \
976 while (len_ > 0) \
977 { \
978 T *middle_elem_; \
979 half_ = len_ >> 1; \
980 middle_ = first_; \
981 middle_ += half_; \
982 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
983 if (lessthan_ (middle_elem_, obj_)) \
984 { \
985 first_ = middle_; \
986 ++first_; \
987 len_ = len_ - half_ - 1; \
988 } \
989 else \
990 len_ = half_; \
991 } \
992 return first_; \
993 }
994
995 #define DEF_VEC_ALLOC_FUNC_O(T,A) \
996 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
997 (int alloc_ MEM_STAT_DECL) \
998 { \
999 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \
1000 offsetof (VEC(T,A),base.vec), \
1001 sizeof (T) \
1002 PASS_MEM_STAT); \
1003 }
1004
1005 #define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
1006 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1007 { \
1008 size_t len_ = vec_ ? vec_->num : 0; \
1009 VEC (T,A) *new_vec_ = NULL; \
1010 \
1011 if (len_) \
1012 { \
1013 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1014 (NULL, len_, \
1015 offsetof (VEC(T,A),base.vec), sizeof (T) \
1016 PASS_MEM_STAT)); \
1017 \
1018 new_vec_->base.num = len_; \
1019 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1020 } \
1021 return new_vec_; \
1022 } \
1023 \
1024 static inline void VEC_OP (T,A,free) \
1025 (VEC(T,A) **vec_) \
1026 { \
1027 if (*vec_) \
1028 vec_##A##_free (*vec_); \
1029 *vec_ = NULL; \
1030 } \
1031 \
1032 static inline int VEC_OP (T,A,reserve) \
1033 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1034 { \
1035 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1036 VEC_CHECK_PASS); \
1037 \
1038 if (extend) \
1039 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1040 offsetof (VEC(T,A),base.vec),\
1041 sizeof (T) \
1042 PASS_MEM_STAT); \
1043 \
1044 return extend; \
1045 } \
1046 \
1047 static inline int VEC_OP (T,A,reserve_exact) \
1048 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1049 { \
1050 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1051 VEC_CHECK_PASS); \
1052 \
1053 if (extend) \
1054 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1055 (*vec_, alloc_, \
1056 offsetof (VEC(T,A),base.vec), \
1057 sizeof (T) PASS_MEM_STAT); \
1058 \
1059 return extend; \
1060 } \
1061 \
1062 static inline void VEC_OP (T,A,safe_grow) \
1063 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1064 { \
1065 VEC_ASSERT (size_ >= 0 \
1066 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1067 "grow", T, A); \
1068 VEC_OP (T,A,reserve_exact) (vec_, \
1069 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
1070 VEC_CHECK_PASS PASS_MEM_STAT); \
1071 VEC_BASE (*vec_)->num = size_; \
1072 } \
1073 \
1074 static inline void VEC_OP (T,A,safe_grow_cleared) \
1075 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1076 { \
1077 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1078 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1079 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1080 sizeof (T) * (size_ - oldsize)); \
1081 } \
1082 \
1083 static inline T *VEC_OP (T,A,safe_push) \
1084 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1085 { \
1086 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1087 \
1088 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1089 } \
1090 \
1091 static inline T *VEC_OP (T,A,safe_insert) \
1092 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \
1093 VEC_CHECK_DECL MEM_STAT_DECL) \
1094 { \
1095 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1096 \
1097 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1098 VEC_CHECK_PASS); \
1099 }
1100
1101 #define DEF_VEC_ALLOC_FUNC_I(T,A) \
1102 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1103 (int alloc_ MEM_STAT_DECL) \
1104 { \
1105 return (VEC(T,A) *) vec_##A##_o_reserve_exact \
1106 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \
1107 sizeof (T) PASS_MEM_STAT); \
1108 }
1109
1110 #define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
1111 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1112 { \
1113 size_t len_ = vec_ ? vec_->num : 0; \
1114 VEC (T,A) *new_vec_ = NULL; \
1115 \
1116 if (len_) \
1117 { \
1118 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1119 (NULL, len_, \
1120 offsetof (VEC(T,A),base.vec), sizeof (T) \
1121 PASS_MEM_STAT)); \
1122 \
1123 new_vec_->base.num = len_; \
1124 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1125 } \
1126 return new_vec_; \
1127 } \
1128 \
1129 static inline void VEC_OP (T,A,free) \
1130 (VEC(T,A) **vec_) \
1131 { \
1132 if (*vec_) \
1133 vec_##A##_free (*vec_); \
1134 *vec_ = NULL; \
1135 } \
1136 \
1137 static inline int VEC_OP (T,A,reserve) \
1138 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1139 { \
1140 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1141 VEC_CHECK_PASS); \
1142 \
1143 if (extend) \
1144 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1145 offsetof (VEC(T,A),base.vec),\
1146 sizeof (T) \
1147 PASS_MEM_STAT); \
1148 \
1149 return extend; \
1150 } \
1151 \
1152 static inline int VEC_OP (T,A,reserve_exact) \
1153 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1154 { \
1155 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1156 VEC_CHECK_PASS); \
1157 \
1158 if (extend) \
1159 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1160 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \
1161 sizeof (T) PASS_MEM_STAT); \
1162 \
1163 return extend; \
1164 } \
1165 \
1166 static inline void VEC_OP (T,A,safe_grow) \
1167 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1168 { \
1169 VEC_ASSERT (size_ >= 0 \
1170 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1171 "grow", T, A); \
1172 VEC_OP (T,A,reserve_exact) (vec_, \
1173 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
1174 VEC_CHECK_PASS PASS_MEM_STAT); \
1175 VEC_BASE (*vec_)->num = size_; \
1176 } \
1177 \
1178 static inline void VEC_OP (T,A,safe_grow_cleared) \
1179 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1180 { \
1181 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1182 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1183 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1184 sizeof (T) * (size_ - oldsize)); \
1185 } \
1186 \
1187 static inline T *VEC_OP (T,A,safe_push) \
1188 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1189 { \
1190 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1191 \
1192 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1193 } \
1194 \
1195 static inline T *VEC_OP (T,A,safe_insert) \
1196 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \
1197 VEC_CHECK_DECL MEM_STAT_DECL) \
1198 { \
1199 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1200 \
1201 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1202 VEC_CHECK_PASS); \
1203 }
1204
1205 /* We support a vector which starts out with space on the stack and
1206 switches to heap space when forced to reallocate. This works a
1207 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1208 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
1209 space; because alloca can not be usefully called in an inline
1210 function, and because a macro can not define a macro, you must then
1211 write a #define for each type:
1212
1213 #define VEC_{TYPE}_stack_alloc(alloc) \
1214 VEC_stack_alloc({TYPE}, alloc)
1215
1216 This is really a hack and perhaps can be made better. Note that
1217 this macro will wind up evaluating the ALLOC parameter twice.
1218
1219 Only the initial allocation will be made using alloca, so pass a
1220 reasonable estimate that doesn't use too much stack space; don't
1221 pass zero. Don't return a VEC(TYPE,stack) vector from the function
1222 which allocated it. */
1223
1224 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
1225 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
1226 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1227 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
1228 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1229 MEM_STAT_DECL);
1230 extern void vec_stack_free (void *);
1231
1232 #ifdef GATHER_STATISTICS
1233 #define VEC_stack_alloc(T,alloc,name,line,function) \
1234 (VEC_OP (T,stack,alloc1) \
1235 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1236 #else
1237 #define VEC_stack_alloc(T,alloc) \
1238 (VEC_OP (T,stack,alloc1) \
1239 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1240 #endif
1241
1242 #define DEF_VEC_ALLOC_P_STACK(T) \
1243 VEC_TA(T,base,stack); \
1244 DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1245 DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
1246 struct vec_swallow_trailing_semi
1247
1248 #define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1249 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1250 (int alloc_, VEC(T,stack)* space) \
1251 { \
1252 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1253 }
1254
1255 #define DEF_VEC_ALLOC_O_STACK(T) \
1256 VEC_TA(T,base,stack); \
1257 DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1258 DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
1259 struct vec_swallow_trailing_semi
1260
1261 #define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1262 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1263 (int alloc_, VEC(T,stack)* space) \
1264 { \
1265 return ((VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1266 }
1267
1268 #define DEF_VEC_ALLOC_I_STACK(T) \
1269 VEC_TA(T,base,stack); \
1270 DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1271 DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
1272 struct vec_swallow_trailing_semi
1273
1274 #define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1275 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1276 (int alloc_, VEC(T,stack)* space) \
1277 { \
1278 return ((VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1279 }
1280
1281 #endif /* GCC_VEC_H */