intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / vec.h
1 /* Vector API for GNU compiler.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #ifndef GCC_VEC_H
23 #define GCC_VEC_H
24
25 #include "statistics.h" /* For MEM_STAT_DECL. */
26
27 /* The macros here implement a set of templated vector types and
28 associated interfaces. These templates are implemented with
29 macros, as we're not in C++ land. The interface functions are
30 typesafe and use static inline functions, sometimes backed by
31 out-of-line generic functions. The vectors are designed to
32 interoperate with the GTY machinery.
33
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
42
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
47
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
59
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use the
72 reserve_exact operation. You can also create a vector of a
73 specific size from the get go.
74
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
85
86 When a vector type is defined, first a non-memory managed version
87 is created. You can then define either or both garbage collected
88 and heap allocated versions. The allocation mechanism is specified
89 when the type is defined, and is therefore part of the type. If
90 you need both gc'd and heap allocated versions, you still must have
91 *exactly* one definition of the common non-memory managed base vector.
92
93 If you need to directly manipulate a vector, then the 'address'
94 accessor will return the address of the start of the vector. Also
95 the 'space' predicate will tell you whether there is spare capacity
96 in the vector. You will not normally need to use these two functions.
97
98 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
99 get the non-memory allocation version, and then a
100 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
101 vectors. Variables of vector type are declared using a
102 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the
103 allocation strategy, and can be either 'gc' or 'heap' for garbage
104 collected and heap allocated respectively. It can be 'none' to get
105 a vector that must be explicitly allocated (for instance as a
106 trailing array of another structure). The characters O, P and I
107 indicate whether TYPEDEF is a pointer (P), object (O) or integral
108 (I) type. Be careful to pick the correct one, as you'll get an
109 awkward and inefficient API if you use the wrong one. There is a
110 check, which results in a compile-time warning, for the P and I
111 versions, but there is no check for the O versions, as that is not
112 possible in plain C. Due to the way GTY works, you must annotate
113 any structures you wish to insert or reference from a vector with a
114 GTY(()) tag. You need to do this even if you never declare the GC
115 allocated variants.
116
117 An example of their use would be,
118
119 DEF_VEC_P(tree); // non-managed tree vector.
120 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must
121 // appear at file scope.
122
123 struct my_struct {
124 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers.
125 };
126
127 struct my_struct *s;
128
129 if (VEC_length(tree,s->v)) { we have some contents }
130 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
131 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
132 { do something with elt }
133
134 */
135
136 /* Macros to invoke API calls. A single macro works for both pointer
137 and object vectors, but the argument and return types might well be
138 different. In each macro, T is the typedef of the vector elements,
139 and A is the allocation strategy. The allocation strategy is only
140 present when it is required. Some of these macros pass the vector,
141 V, by reference (by taking its address), this is noted in the
142 descriptions. */
143
144 /* Length of vector
145 unsigned VEC_T_length(const VEC(T) *v);
146
147 Return the number of active elements in V. V can be NULL, in which
148 case zero is returned. */
149
150 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
151
152
153 /* Check if vector is empty
154 int VEC_T_empty(const VEC(T) *v);
155
156 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
157
158 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
159
160
161 /* Get the final element of the vector.
162 T VEC_T_last(VEC(T) *v); // Integer
163 T VEC_T_last(VEC(T) *v); // Pointer
164 T *VEC_T_last(VEC(T) *v); // Object
165
166 Return the final element. V must not be empty. */
167
168 #define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
169
170 /* Index into vector
171 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
172 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
173 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
174
175 Return the IX'th element. If IX must be in the domain of V. */
176
177 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
178
179 /* Iterate over vector
180 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
181 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
182 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
183
184 Return iteration condition and update PTR to point to the IX'th
185 element. At the end of iteration, sets PTR to NULL. Use this to
186 iterate over the elements of a vector as follows,
187
188 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
189 continue; */
190
191 #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
192
193 /* Allocate new vector.
194 VEC(T,A) *VEC_T_A_alloc(int reserve);
195
196 Allocate a new vector with space for RESERVE objects. If RESERVE
197 is zero, NO vector is created. */
198
199 #define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
200
201 /* Free a vector.
202 void VEC_T_A_free(VEC(T,A) *&);
203
204 Free a vector and set it to NULL. */
205
206 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
207
208 /* Use these to determine the required size and initialization of a
209 vector embedded within another structure (as the final member).
210
211 size_t VEC_T_embedded_size(int reserve);
212 void VEC_T_embedded_init(VEC(T) *v, int reserve);
213
214 These allow the caller to perform the memory allocation. */
215
216 #define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N))
217 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
218
219 /* Copy a vector.
220 VEC(T,A) *VEC_T_A_copy(VEC(T) *);
221
222 Copy the live elements of a vector into a new vector. The new and
223 old vectors need not be allocated by the same mechanism. */
224
225 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
226
227 /* Determine if a vector has additional capacity.
228
229 int VEC_T_space (VEC(T) *v,int reserve)
230
231 If V has space for RESERVE additional entries, return nonzero. You
232 usually only need to use this if you are doing your own vector
233 reallocation, for instance on an embedded vector. This returns
234 nonzero in exactly the same circumstances that VEC_T_reserve
235 will. */
236
237 #define VEC_space(T,V,R) \
238 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
239
240 /* Reserve space.
241 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
242
243 Ensure that V has at least RESERVE slots available. This will
244 create additional headroom. Note this can cause V to be
245 reallocated. Returns nonzero iff reallocation actually
246 occurred. */
247
248 #define VEC_reserve(T,A,V,R) \
249 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
250
251 /* Reserve space exactly.
252 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
253
254 Ensure that V has at least RESERVE slots available. This will not
255 create additional headroom. Note this can cause V to be
256 reallocated. Returns nonzero iff reallocation actually
257 occurred. */
258
259 #define VEC_reserve_exact(T,A,V,R) \
260 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
261
262 /* Push object with no reallocation
263 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
264 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
265 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
266
267 Push a new element onto the end, returns a pointer to the slot
268 filled in. For object vectors, the new value can be NULL, in which
269 case NO initialization is performed. There must
270 be sufficient space in the vector. */
271
272 #define VEC_quick_push(T,V,O) \
273 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
274
275 /* Push object with reallocation
276 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
277 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
278 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
279
280 Push a new element onto the end, returns a pointer to the slot
281 filled in. For object vectors, the new value can be NULL, in which
282 case NO initialization is performed. Reallocates V, if needed. */
283
284 #define VEC_safe_push(T,A,V,O) \
285 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
286
287 /* Pop element off end
288 T VEC_T_pop (VEC(T) *v); // Integer
289 T VEC_T_pop (VEC(T) *v); // Pointer
290 void VEC_T_pop (VEC(T) *v); // Object
291
292 Pop the last element off the end. Returns the element popped, for
293 pointer vectors. */
294
295 #define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
296
297 /* Truncate to specific length
298 void VEC_T_truncate (VEC(T) *v, unsigned len);
299
300 Set the length as specified. The new length must be less than or
301 equal to the current length. This is an O(1) operation. */
302
303 #define VEC_truncate(T,V,I) \
304 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
305
306 /* Grow to a specific length.
307 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
308
309 Grow the vector to a specific length. The LEN must be as
310 long or longer than the current length. The new elements are
311 uninitialized. */
312
313 #define VEC_safe_grow(T,A,V,I) \
314 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
315
316 /* Grow to a specific length.
317 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
318
319 Grow the vector to a specific length. The LEN must be as
320 long or longer than the current length. The new elements are
321 initialized to zero. */
322
323 #define VEC_safe_grow_cleared(T,A,V,I) \
324 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
325
326 /* Replace element
327 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
328 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
329 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
330
331 Replace the IXth element of V with a new value, VAL. For pointer
332 vectors returns the original value. For object vectors returns a
333 pointer to the new value. For object vectors the new value can be
334 NULL, in which case no overwriting of the slot is actually
335 performed. */
336
337 #define VEC_replace(T,V,I,O) \
338 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
339
340 /* Insert object with no reallocation
341 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
342 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
343 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
344
345 Insert an element, VAL, at the IXth position of V. Return a pointer
346 to the slot created. For vectors of object, the new value can be
347 NULL, in which case no initialization of the inserted slot takes
348 place. There must be sufficient space. */
349
350 #define VEC_quick_insert(T,V,I,O) \
351 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
352
353 /* Insert object with reallocation
354 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
355 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
356 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
357
358 Insert an element, VAL, at the IXth position of V. Return a pointer
359 to the slot created. For vectors of object, the new value can be
360 NULL, in which case no initialization of the inserted slot takes
361 place. Reallocate V, if necessary. */
362
363 #define VEC_safe_insert(T,A,V,I,O) \
364 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
365
366 /* Remove element retaining order
367 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
368 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
369 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
370
371 Remove an element from the IXth position of V. Ordering of
372 remaining elements is preserved. For pointer vectors returns the
373 removed object. This is an O(N) operation due to a memmove. */
374
375 #define VEC_ordered_remove(T,V,I) \
376 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
377
378 /* Remove element destroying order
379 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
380 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
381 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
382
383 Remove an element from the IXth position of V. Ordering of
384 remaining elements is destroyed. For pointer vectors returns the
385 removed object. This is an O(1) operation. */
386
387 #define VEC_unordered_remove(T,V,I) \
388 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
389
390 /* Remove a block of elements
391 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
392
393 Remove LEN elements starting at the IXth. Ordering is retained.
394 This is an O(1) operation. */
395
396 #define VEC_block_remove(T,V,I,L) \
397 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
398
399 /* Get the address of the array of elements
400 T *VEC_T_address (VEC(T) v)
401
402 If you need to directly manipulate the array (for instance, you
403 want to feed it to qsort), use this accessor. */
404
405 #define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V)))
406
407 /* Find the first index in the vector not less than the object.
408 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
409 bool (*lessthan) (const T, const T)); // Integer
410 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
411 bool (*lessthan) (const T, const T)); // Pointer
412 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
413 bool (*lessthan) (const T*, const T*)); // Object
414
415 Find the first position in which VAL could be inserted without
416 changing the ordering of V. LESSTHAN is a function that returns
417 true if the first argument is strictly less than the second. */
418
419 #define VEC_lower_bound(T,V,O,LT) \
420 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
421
422 /* Reallocate an array of elements with prefix. */
423 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
424 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL);
425 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
426 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
427 MEM_STAT_DECL);
428 extern void ggc_free (void *);
429 #define vec_gc_free(V) ggc_free (V)
430 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
431 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL);
432 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
433 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
434 MEM_STAT_DECL);
435 extern void dump_vec_loc_statistics (void);
436 #ifdef GATHER_STATISTICS
437 void vec_heap_free (void *);
438 #else
439 #define vec_heap_free(V) free (V)
440 #endif
441
442 #if ENABLE_CHECKING
443 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
444 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
445 #define VEC_CHECK_PASS ,file_,line_,function_
446
447 #define VEC_ASSERT(EXPR,OP,T,A) \
448 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
449
450 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
451 ATTRIBUTE_NORETURN;
452 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
453 #else
454 #define VEC_CHECK_INFO
455 #define VEC_CHECK_DECL
456 #define VEC_CHECK_PASS
457 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
458 #endif
459
460 /* Note: gengtype has hardwired knowledge of the expansions of the
461 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the
462 expansions of these macros you may need to change gengtype too. */
463
464 #define VEC(T,A) VEC_##T##_##A
465 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
466
467 /* Base of vector type, not user visible. */
468 #define VEC_T(T,B) \
469 typedef struct VEC(T,B) \
470 { \
471 unsigned num; \
472 unsigned alloc; \
473 T vec[1]; \
474 } VEC(T,B)
475
476 #define VEC_T_GTY(T,B) \
477 typedef struct GTY(()) VEC(T,B) \
478 { \
479 unsigned num; \
480 unsigned alloc; \
481 T GTY ((length ("%h.num"))) vec[1]; \
482 } VEC(T,B)
483
484 /* Derived vector type, user visible. */
485 #define VEC_TA_GTY(T,B,A,GTY) \
486 typedef struct GTY VEC(T,A) \
487 { \
488 VEC(T,B) base; \
489 } VEC(T,A)
490
491 #define VEC_TA(T,B,A) \
492 typedef struct VEC(T,A) \
493 { \
494 VEC(T,B) base; \
495 } VEC(T,A)
496
497 /* Convert to base type. */
498 #define VEC_BASE(P) ((P) ? &(P)->base : 0)
499
500 /* Vector of integer-like object. */
501 #define DEF_VEC_I(T) \
502 static inline void VEC_OP (T,must_be,integral_type) (void) \
503 { \
504 (void)~(T)0; \
505 } \
506 \
507 VEC_T(T,base); \
508 VEC_TA(T,base,none); \
509 DEF_VEC_FUNC_P(T) \
510 struct vec_swallow_trailing_semi
511 #define DEF_VEC_ALLOC_I(T,A) \
512 VEC_TA(T,base,A); \
513 DEF_VEC_ALLOC_FUNC_I(T,A) \
514 DEF_VEC_NONALLOC_FUNCS_I(T,A) \
515 struct vec_swallow_trailing_semi
516
517 /* Vector of pointer to object. */
518 #define DEF_VEC_P(T) \
519 static inline void VEC_OP (T,must_be,pointer_type) (void) \
520 { \
521 (void)((T)1 == (void *)1); \
522 } \
523 \
524 VEC_T_GTY(T,base); \
525 VEC_TA(T,base,none); \
526 DEF_VEC_FUNC_P(T) \
527 struct vec_swallow_trailing_semi
528 #define DEF_VEC_ALLOC_P(T,A) \
529 VEC_TA(T,base,A); \
530 DEF_VEC_ALLOC_FUNC_P(T,A) \
531 DEF_VEC_NONALLOC_FUNCS_P(T,A) \
532 struct vec_swallow_trailing_semi
533
534 #define DEF_VEC_FUNC_P(T) \
535 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
536 { \
537 return vec_ ? vec_->num : 0; \
538 } \
539 \
540 static inline T VEC_OP (T,base,last) \
541 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \
542 { \
543 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \
544 \
545 return vec_->vec[vec_->num - 1]; \
546 } \
547 \
548 static inline T VEC_OP (T,base,index) \
549 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
550 { \
551 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \
552 \
553 return vec_->vec[ix_]; \
554 } \
555 \
556 static inline int VEC_OP (T,base,iterate) \
557 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \
558 { \
559 if (vec_ && ix_ < vec_->num) \
560 { \
561 *ptr = vec_->vec[ix_]; \
562 return 1; \
563 } \
564 else \
565 { \
566 *ptr = (T) 0; \
567 return 0; \
568 } \
569 } \
570 \
571 static inline size_t VEC_OP (T,base,embedded_size) \
572 (int alloc_) \
573 { \
574 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
575 } \
576 \
577 static inline void VEC_OP (T,base,embedded_init) \
578 (VEC(T,base) *vec_, int alloc_) \
579 { \
580 vec_->num = 0; \
581 vec_->alloc = alloc_; \
582 } \
583 \
584 static inline int VEC_OP (T,base,space) \
585 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
586 { \
587 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
588 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
589 } \
590 \
591 static inline T *VEC_OP (T,base,quick_push) \
592 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \
593 { \
594 T *slot_; \
595 \
596 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \
597 slot_ = &vec_->vec[vec_->num++]; \
598 *slot_ = obj_; \
599 \
600 return slot_; \
601 } \
602 \
603 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
604 { \
605 T obj_; \
606 \
607 VEC_ASSERT (vec_->num, "pop", T, base); \
608 obj_ = vec_->vec[--vec_->num]; \
609 \
610 return obj_; \
611 } \
612 \
613 static inline void VEC_OP (T,base,truncate) \
614 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
615 { \
616 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \
617 if (vec_) \
618 vec_->num = size_; \
619 } \
620 \
621 static inline T VEC_OP (T,base,replace) \
622 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
623 { \
624 T old_obj_; \
625 \
626 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \
627 old_obj_ = vec_->vec[ix_]; \
628 vec_->vec[ix_] = obj_; \
629 \
630 return old_obj_; \
631 } \
632 \
633 static inline T *VEC_OP (T,base,quick_insert) \
634 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
635 { \
636 T *slot_; \
637 \
638 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \
639 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \
640 slot_ = &vec_->vec[ix_]; \
641 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
642 *slot_ = obj_; \
643 \
644 return slot_; \
645 } \
646 \
647 static inline T VEC_OP (T,base,ordered_remove) \
648 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
649 { \
650 T *slot_; \
651 T obj_; \
652 \
653 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
654 slot_ = &vec_->vec[ix_]; \
655 obj_ = *slot_; \
656 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
657 \
658 return obj_; \
659 } \
660 \
661 static inline T VEC_OP (T,base,unordered_remove) \
662 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
663 { \
664 T *slot_; \
665 T obj_; \
666 \
667 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
668 slot_ = &vec_->vec[ix_]; \
669 obj_ = *slot_; \
670 *slot_ = vec_->vec[--vec_->num]; \
671 \
672 return obj_; \
673 } \
674 \
675 static inline void VEC_OP (T,base,block_remove) \
676 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
677 { \
678 T *slot_; \
679 \
680 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \
681 slot_ = &vec_->vec[ix_]; \
682 vec_->num -= len_; \
683 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
684 } \
685 \
686 static inline T *VEC_OP (T,base,address) \
687 (VEC(T,base) *vec_) \
688 { \
689 return vec_ ? vec_->vec : 0; \
690 } \
691 \
692 static inline unsigned VEC_OP (T,base,lower_bound) \
693 (VEC(T,base) *vec_, const T obj_, \
694 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \
695 { \
696 unsigned int len_ = VEC_OP (T,base, length) (vec_); \
697 unsigned int half_, middle_; \
698 unsigned int first_ = 0; \
699 while (len_ > 0) \
700 { \
701 T middle_elem_; \
702 half_ = len_ >> 1; \
703 middle_ = first_; \
704 middle_ += half_; \
705 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
706 if (lessthan_ (middle_elem_, obj_)) \
707 { \
708 first_ = middle_; \
709 ++first_; \
710 len_ = len_ - half_ - 1; \
711 } \
712 else \
713 len_ = half_; \
714 } \
715 return first_; \
716 }
717
718 #define DEF_VEC_ALLOC_FUNC_P(T,A) \
719 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
720 (int alloc_ MEM_STAT_DECL) \
721 { \
722 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
723 PASS_MEM_STAT); \
724 }
725
726
727 #define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
728 static inline void VEC_OP (T,A,free) \
729 (VEC(T,A) **vec_) \
730 { \
731 if (*vec_) \
732 vec_##A##_free (*vec_); \
733 *vec_ = NULL; \
734 } \
735 \
736 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
737 { \
738 size_t len_ = vec_ ? vec_->num : 0; \
739 VEC (T,A) *new_vec_ = NULL; \
740 \
741 if (len_) \
742 { \
743 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \
744 (NULL, len_ PASS_MEM_STAT)); \
745 \
746 new_vec_->base.num = len_; \
747 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
748 } \
749 return new_vec_; \
750 } \
751 \
752 static inline int VEC_OP (T,A,reserve) \
753 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
754 { \
755 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
756 VEC_CHECK_PASS); \
757 \
758 if (extend) \
759 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
760 \
761 return extend; \
762 } \
763 \
764 static inline int VEC_OP (T,A,reserve_exact) \
765 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
766 { \
767 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
768 VEC_CHECK_PASS); \
769 \
770 if (extend) \
771 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \
772 PASS_MEM_STAT); \
773 \
774 return extend; \
775 } \
776 \
777 static inline void VEC_OP (T,A,safe_grow) \
778 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
779 { \
780 VEC_ASSERT (size_ >= 0 \
781 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
782 "grow", T, A); \
783 VEC_OP (T,A,reserve_exact) (vec_, \
784 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
785 VEC_CHECK_PASS PASS_MEM_STAT); \
786 VEC_BASE (*vec_)->num = size_; \
787 } \
788 \
789 static inline void VEC_OP (T,A,safe_grow_cleared) \
790 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
791 { \
792 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
793 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
794 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
795 sizeof (T) * (size_ - oldsize)); \
796 } \
797 \
798 static inline T *VEC_OP (T,A,safe_push) \
799 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
800 { \
801 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
802 \
803 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
804 } \
805 \
806 static inline T *VEC_OP (T,A,safe_insert) \
807 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
808 { \
809 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
810 \
811 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
812 VEC_CHECK_PASS); \
813 }
814
815 /* Vector of object. */
816 #define DEF_VEC_O(T) \
817 VEC_T_GTY(T,base); \
818 VEC_TA(T,base,none); \
819 DEF_VEC_FUNC_O(T) \
820 struct vec_swallow_trailing_semi
821 #define DEF_VEC_ALLOC_O(T,A) \
822 VEC_TA(T,base,A); \
823 DEF_VEC_ALLOC_FUNC_O(T,A) \
824 DEF_VEC_NONALLOC_FUNCS_O(T,A) \
825 struct vec_swallow_trailing_semi
826
827 #define DEF_VEC_FUNC_O(T) \
828 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
829 { \
830 return vec_ ? vec_->num : 0; \
831 } \
832 \
833 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
834 { \
835 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \
836 \
837 return &vec_->vec[vec_->num - 1]; \
838 } \
839 \
840 static inline T *VEC_OP (T,base,index) \
841 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
842 { \
843 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \
844 \
845 return &vec_->vec[ix_]; \
846 } \
847 \
848 static inline int VEC_OP (T,base,iterate) \
849 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \
850 { \
851 if (vec_ && ix_ < vec_->num) \
852 { \
853 *ptr = &vec_->vec[ix_]; \
854 return 1; \
855 } \
856 else \
857 { \
858 *ptr = 0; \
859 return 0; \
860 } \
861 } \
862 \
863 static inline size_t VEC_OP (T,base,embedded_size) \
864 (int alloc_) \
865 { \
866 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
867 } \
868 \
869 static inline void VEC_OP (T,base,embedded_init) \
870 (VEC(T,base) *vec_, int alloc_) \
871 { \
872 vec_->num = 0; \
873 vec_->alloc = alloc_; \
874 } \
875 \
876 static inline int VEC_OP (T,base,space) \
877 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
878 { \
879 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
880 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \
881 } \
882 \
883 static inline T *VEC_OP (T,base,quick_push) \
884 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \
885 { \
886 T *slot_; \
887 \
888 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \
889 slot_ = &vec_->vec[vec_->num++]; \
890 if (obj_) \
891 *slot_ = *obj_; \
892 \
893 return slot_; \
894 } \
895 \
896 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
897 { \
898 VEC_ASSERT (vec_->num, "pop", T, base); \
899 --vec_->num; \
900 } \
901 \
902 static inline void VEC_OP (T,base,truncate) \
903 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
904 { \
905 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \
906 if (vec_) \
907 vec_->num = size_; \
908 } \
909 \
910 static inline T *VEC_OP (T,base,replace) \
911 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
912 { \
913 T *slot_; \
914 \
915 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \
916 slot_ = &vec_->vec[ix_]; \
917 if (obj_) \
918 *slot_ = *obj_; \
919 \
920 return slot_; \
921 } \
922 \
923 static inline T *VEC_OP (T,base,quick_insert) \
924 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
925 { \
926 T *slot_; \
927 \
928 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \
929 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \
930 slot_ = &vec_->vec[ix_]; \
931 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \
932 if (obj_) \
933 *slot_ = *obj_; \
934 \
935 return slot_; \
936 } \
937 \
938 static inline void VEC_OP (T,base,ordered_remove) \
939 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
940 { \
941 T *slot_; \
942 \
943 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
944 slot_ = &vec_->vec[ix_]; \
945 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \
946 } \
947 \
948 static inline void VEC_OP (T,base,unordered_remove) \
949 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
950 { \
951 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \
952 vec_->vec[ix_] = vec_->vec[--vec_->num]; \
953 } \
954 \
955 static inline void VEC_OP (T,base,block_remove) \
956 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
957 { \
958 T *slot_; \
959 \
960 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \
961 slot_ = &vec_->vec[ix_]; \
962 vec_->num -= len_; \
963 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \
964 } \
965 \
966 static inline T *VEC_OP (T,base,address) \
967 (VEC(T,base) *vec_) \
968 { \
969 return vec_ ? vec_->vec : 0; \
970 } \
971 \
972 static inline unsigned VEC_OP (T,base,lower_bound) \
973 (VEC(T,base) *vec_, const T *obj_, \
974 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \
975 { \
976 unsigned int len_ = VEC_OP (T, base, length) (vec_); \
977 unsigned int half_, middle_; \
978 unsigned int first_ = 0; \
979 while (len_ > 0) \
980 { \
981 T *middle_elem_; \
982 half_ = len_ >> 1; \
983 middle_ = first_; \
984 middle_ += half_; \
985 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
986 if (lessthan_ (middle_elem_, obj_)) \
987 { \
988 first_ = middle_; \
989 ++first_; \
990 len_ = len_ - half_ - 1; \
991 } \
992 else \
993 len_ = half_; \
994 } \
995 return first_; \
996 }
997
998 #define DEF_VEC_ALLOC_FUNC_O(T,A) \
999 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1000 (int alloc_ MEM_STAT_DECL) \
1001 { \
1002 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \
1003 offsetof (VEC(T,A),base.vec), \
1004 sizeof (T) \
1005 PASS_MEM_STAT); \
1006 }
1007
1008 #define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
1009 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1010 { \
1011 size_t len_ = vec_ ? vec_->num : 0; \
1012 VEC (T,A) *new_vec_ = NULL; \
1013 \
1014 if (len_) \
1015 { \
1016 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1017 (NULL, len_, \
1018 offsetof (VEC(T,A),base.vec), sizeof (T) \
1019 PASS_MEM_STAT)); \
1020 \
1021 new_vec_->base.num = len_; \
1022 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1023 } \
1024 return new_vec_; \
1025 } \
1026 \
1027 static inline void VEC_OP (T,A,free) \
1028 (VEC(T,A) **vec_) \
1029 { \
1030 if (*vec_) \
1031 vec_##A##_free (*vec_); \
1032 *vec_ = NULL; \
1033 } \
1034 \
1035 static inline int VEC_OP (T,A,reserve) \
1036 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1037 { \
1038 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1039 VEC_CHECK_PASS); \
1040 \
1041 if (extend) \
1042 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1043 offsetof (VEC(T,A),base.vec),\
1044 sizeof (T) \
1045 PASS_MEM_STAT); \
1046 \
1047 return extend; \
1048 } \
1049 \
1050 static inline int VEC_OP (T,A,reserve_exact) \
1051 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1052 { \
1053 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1054 VEC_CHECK_PASS); \
1055 \
1056 if (extend) \
1057 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1058 (*vec_, alloc_, \
1059 offsetof (VEC(T,A),base.vec), \
1060 sizeof (T) PASS_MEM_STAT); \
1061 \
1062 return extend; \
1063 } \
1064 \
1065 static inline void VEC_OP (T,A,safe_grow) \
1066 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1067 { \
1068 VEC_ASSERT (size_ >= 0 \
1069 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1070 "grow", T, A); \
1071 VEC_OP (T,A,reserve_exact) (vec_, \
1072 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
1073 VEC_CHECK_PASS PASS_MEM_STAT); \
1074 VEC_BASE (*vec_)->num = size_; \
1075 } \
1076 \
1077 static inline void VEC_OP (T,A,safe_grow_cleared) \
1078 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1079 { \
1080 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1081 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1082 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1083 sizeof (T) * (size_ - oldsize)); \
1084 } \
1085 \
1086 static inline T *VEC_OP (T,A,safe_push) \
1087 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1088 { \
1089 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1090 \
1091 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1092 } \
1093 \
1094 static inline T *VEC_OP (T,A,safe_insert) \
1095 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \
1096 VEC_CHECK_DECL MEM_STAT_DECL) \
1097 { \
1098 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1099 \
1100 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1101 VEC_CHECK_PASS); \
1102 }
1103
1104 #define DEF_VEC_ALLOC_FUNC_I(T,A) \
1105 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1106 (int alloc_ MEM_STAT_DECL) \
1107 { \
1108 return (VEC(T,A) *) vec_##A##_o_reserve_exact \
1109 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \
1110 sizeof (T) PASS_MEM_STAT); \
1111 }
1112
1113 #define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
1114 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1115 { \
1116 size_t len_ = vec_ ? vec_->num : 0; \
1117 VEC (T,A) *new_vec_ = NULL; \
1118 \
1119 if (len_) \
1120 { \
1121 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1122 (NULL, len_, \
1123 offsetof (VEC(T,A),base.vec), sizeof (T) \
1124 PASS_MEM_STAT)); \
1125 \
1126 new_vec_->base.num = len_; \
1127 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1128 } \
1129 return new_vec_; \
1130 } \
1131 \
1132 static inline void VEC_OP (T,A,free) \
1133 (VEC(T,A) **vec_) \
1134 { \
1135 if (*vec_) \
1136 vec_##A##_free (*vec_); \
1137 *vec_ = NULL; \
1138 } \
1139 \
1140 static inline int VEC_OP (T,A,reserve) \
1141 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1142 { \
1143 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1144 VEC_CHECK_PASS); \
1145 \
1146 if (extend) \
1147 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1148 offsetof (VEC(T,A),base.vec),\
1149 sizeof (T) \
1150 PASS_MEM_STAT); \
1151 \
1152 return extend; \
1153 } \
1154 \
1155 static inline int VEC_OP (T,A,reserve_exact) \
1156 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1157 { \
1158 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1159 VEC_CHECK_PASS); \
1160 \
1161 if (extend) \
1162 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1163 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \
1164 sizeof (T) PASS_MEM_STAT); \
1165 \
1166 return extend; \
1167 } \
1168 \
1169 static inline void VEC_OP (T,A,safe_grow) \
1170 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1171 { \
1172 VEC_ASSERT (size_ >= 0 \
1173 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1174 "grow", T, A); \
1175 VEC_OP (T,A,reserve_exact) (vec_, \
1176 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \
1177 VEC_CHECK_PASS PASS_MEM_STAT); \
1178 VEC_BASE (*vec_)->num = size_; \
1179 } \
1180 \
1181 static inline void VEC_OP (T,A,safe_grow_cleared) \
1182 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1183 { \
1184 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1185 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1186 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1187 sizeof (T) * (size_ - oldsize)); \
1188 } \
1189 \
1190 static inline T *VEC_OP (T,A,safe_push) \
1191 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1192 { \
1193 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1194 \
1195 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1196 } \
1197 \
1198 static inline T *VEC_OP (T,A,safe_insert) \
1199 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \
1200 VEC_CHECK_DECL MEM_STAT_DECL) \
1201 { \
1202 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1203 \
1204 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1205 VEC_CHECK_PASS); \
1206 }
1207
1208 /* We support a vector which starts out with space on the stack and
1209 switches to heap space when forced to reallocate. This works a
1210 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1211 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
1212 space; because alloca can not be usefully called in an inline
1213 function, and because a macro can not define a macro, you must then
1214 write a #define for each type:
1215
1216 #define VEC_{TYPE}_stack_alloc(alloc) \
1217 VEC_stack_alloc({TYPE}, alloc)
1218
1219 This is really a hack and perhaps can be made better. Note that
1220 this macro will wind up evaluating the ALLOC parameter twice.
1221
1222 Only the initial allocation will be made using alloca, so pass a
1223 reasonable estimate that doesn't use too much stack space; don't
1224 pass zero. Don't return a VEC(TYPE,stack) vector from the function
1225 which allocated it. */
1226
1227 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
1228 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
1229 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1230 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
1231 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1232 MEM_STAT_DECL);
1233 extern void vec_stack_free (void *);
1234
1235 #ifdef GATHER_STATISTICS
1236 #define VEC_stack_alloc(T,alloc,name,line,function) \
1237 (VEC_OP (T,stack,alloc1) \
1238 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1239 #else
1240 #define VEC_stack_alloc(T,alloc) \
1241 (VEC_OP (T,stack,alloc1) \
1242 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1243 #endif
1244
1245 #define DEF_VEC_ALLOC_P_STACK(T) \
1246 VEC_TA(T,base,stack); \
1247 DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1248 DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
1249 struct vec_swallow_trailing_semi
1250
1251 #define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1252 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1253 (int alloc_, VEC(T,stack)* space) \
1254 { \
1255 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1256 }
1257
1258 #define DEF_VEC_ALLOC_O_STACK(T) \
1259 VEC_TA(T,base,stack); \
1260 DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1261 DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
1262 struct vec_swallow_trailing_semi
1263
1264 #define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1265 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1266 (int alloc_, VEC(T,stack)* space) \
1267 { \
1268 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1269 }
1270
1271 #define DEF_VEC_ALLOC_I_STACK(T) \
1272 VEC_TA(T,base,stack); \
1273 DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1274 DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
1275 struct vec_swallow_trailing_semi
1276
1277 #define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1278 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1279 (int alloc_, VEC(T,stack)* space) \
1280 { \
1281 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1282 }
1283
1284 #endif /* GCC_VEC_H */