PR c++/68795: fix uninitialized close_paren_loc in cp_parser_postfix_expression
[gcc.git] / gcc / alloc-pool.h
1 /* Functions to support a pool of allocatable objects
2 Copyright (C) 1997-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@cgsoftware.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20 #ifndef ALLOC_POOL_H
21 #define ALLOC_POOL_H
22
23 #include "memory-block.h"
24 #include "options.h" // for flag_checking
25
26 extern void dump_alloc_pool_statistics (void);
27
28 typedef unsigned long ALLOC_POOL_ID_TYPE;
29
30 /* Last used ID. */
31 extern ALLOC_POOL_ID_TYPE last_id;
32
33 /* Pool allocator memory usage. */
34 struct pool_usage: public mem_usage
35 {
36 /* Default contructor. */
37 pool_usage (): m_element_size (0), m_pool_name ("") {}
38 /* Constructor. */
39 pool_usage (size_t allocated, size_t times, size_t peak,
40 size_t instances, size_t element_size,
41 const char *pool_name)
42 : mem_usage (allocated, times, peak, instances),
43 m_element_size (element_size),
44 m_pool_name (pool_name) {}
45
46 /* Sum the usage with SECOND usage. */
47 pool_usage
48 operator+ (const pool_usage &second)
49 {
50 return pool_usage (m_allocated + second.m_allocated,
51 m_times + second.m_times,
52 m_peak + second.m_peak,
53 m_instances + second.m_instances,
54 m_element_size, m_pool_name);
55 }
56
57 /* Dump usage coupled to LOC location, where TOTAL is sum of all rows. */
58 inline void
59 dump (mem_location *loc, mem_usage &total) const
60 {
61 char *location_string = loc->to_string ();
62
63 fprintf (stderr, "%-32s%-48s %6li%10li:%5.1f%%%10li%10li:%5.1f%%%12li\n",
64 m_pool_name, location_string, (long)m_instances,
65 (long)m_allocated, get_percent (m_allocated, total.m_allocated),
66 (long)m_peak, (long)m_times,
67 get_percent (m_times, total.m_times),
68 (long)m_element_size);
69
70 free (location_string);
71 }
72
73 /* Dump header with NAME. */
74 static inline void
75 dump_header (const char *name)
76 {
77 fprintf (stderr, "%-32s%-48s %6s%11s%16s%17s%12s\n", "Pool name", name,
78 "Pools", "Leak", "Peak", "Times", "Elt size");
79 print_dash_line ();
80 }
81
82 /* Dump footer. */
83 inline void
84 dump_footer ()
85 {
86 print_dash_line ();
87 fprintf (stderr, "%s%82li%10li\n", "Total", (long)m_instances,
88 (long)m_allocated);
89 print_dash_line ();
90 }
91
92 /* Element size. */
93 size_t m_element_size;
94 /* Pool name. */
95 const char *m_pool_name;
96 };
97
98 extern mem_alloc_description<pool_usage> pool_allocator_usage;
99
100 #if 0
101 /* If a pool with custom block size is needed, one might use the following
102 template. An instance of this template can be used as a parameter for
103 instantiating base_pool_allocator template:
104
105 typedef custom_block_allocator <128*1024> huge_block_allocator;
106 ...
107 static base_pool_allocator <huge_block_allocator>
108 value_pool ("value", 16384);
109
110 Right now it's not used anywhere in the code, and is given here as an
111 example). */
112
113 template <size_t BlockSize>
114 class custom_block_allocator
115 {
116 public:
117 static const size_t block_size = BlockSize;
118
119 static inline void *
120 allocate () ATTRIBUTE_MALLOC
121 {
122 return XNEWVEC (char, BlockSize);
123 }
124
125 static inline void
126 release (void *block)
127 {
128 XDELETEVEC (block);
129 }
130 };
131 #endif
132
133 /* Generic pool allocator. */
134
135 template <typename TBlockAllocator>
136 class base_pool_allocator
137 {
138 public:
139 /* Default constructor for pool allocator called NAME. */
140 base_pool_allocator (const char *name, size_t size CXX_MEM_STAT_INFO);
141 ~base_pool_allocator ();
142 void release ();
143 void release_if_empty ();
144 void *allocate () ATTRIBUTE_MALLOC;
145 void remove (void *object);
146 size_t num_elts_current ();
147
148 private:
149 struct allocation_pool_list
150 {
151 allocation_pool_list *next;
152 };
153
154 /* Initialize a pool allocator. */
155 void initialize ();
156
157 struct allocation_object
158 {
159 /* The ID of alloc pool which the object was allocated from. */
160 ALLOC_POOL_ID_TYPE id;
161
162 union
163 {
164 /* The data of the object. */
165 char data[1];
166
167 /* Because we want any type of data to be well aligned after the ID,
168 the following elements are here. They are never accessed so
169 the allocated object may be even smaller than this structure.
170 We do not care about alignment for floating-point types. */
171 char *align_p;
172 int64_t align_i;
173 } u;
174
175 static inline allocation_object*
176 get_instance (void *data_ptr)
177 {
178 return (allocation_object *)(((char *)(data_ptr))
179 - offsetof (allocation_object,
180 u.data));
181 }
182
183 static inline void*
184 get_data (void *instance_ptr)
185 {
186 return (void*)(((allocation_object *) instance_ptr)->u.data);
187 }
188 };
189
190 /* Align X to 8. */
191 static inline size_t
192 align_eight (size_t x)
193 {
194 return (((x+7) >> 3) << 3);
195 }
196
197 const char *m_name;
198 ALLOC_POOL_ID_TYPE m_id;
199 size_t m_elts_per_block;
200
201 /* These are the elements that have been allocated at least once
202 and freed. */
203 allocation_pool_list *m_returned_free_list;
204
205 /* These are the elements that have not yet been allocated out of
206 the last block obtained from XNEWVEC. */
207 char* m_virgin_free_list;
208
209 /* The number of elements in the virgin_free_list that can be
210 allocated before needing another block. */
211 size_t m_virgin_elts_remaining;
212 /* The number of elements that are allocated. */
213 size_t m_elts_allocated;
214 /* The number of elements that are released. */
215 size_t m_elts_free;
216 /* The number of allocated blocks. */
217 size_t m_blocks_allocated;
218 /* List of blocks that are used to allocate new objects. */
219 allocation_pool_list *m_block_list;
220 /* Size of a pool elements in bytes. */
221 size_t m_elt_size;
222 /* Size in bytes that should be allocated for each element. */
223 size_t m_size;
224 /* Flag if a pool allocator is initialized. */
225 bool m_initialized;
226 /* Memory allocation location. */
227 mem_location m_location;
228 };
229
230 template <typename TBlockAllocator>
231 inline
232 base_pool_allocator <TBlockAllocator>::base_pool_allocator (
233 const char *name, size_t size MEM_STAT_DECL):
234 m_name (name), m_id (0), m_elts_per_block (0), m_returned_free_list (NULL),
235 m_virgin_free_list (NULL), m_virgin_elts_remaining (0), m_elts_allocated (0),
236 m_elts_free (0), m_blocks_allocated (0), m_block_list (NULL), m_size (size),
237 m_initialized (false), m_location (ALLOC_POOL_ORIGIN, false PASS_MEM_STAT) {}
238
239 /* Initialize a pool allocator. */
240
241 template <typename TBlockAllocator>
242 inline void
243 base_pool_allocator <TBlockAllocator>::initialize ()
244 {
245 gcc_checking_assert (!m_initialized);
246 m_initialized = true;
247
248 size_t size = m_size;
249
250 gcc_checking_assert (m_name);
251
252 /* Make size large enough to store the list header. */
253 if (size < sizeof (allocation_pool_list*))
254 size = sizeof (allocation_pool_list*);
255
256 /* Now align the size to a multiple of 8. */
257 size = align_eight (size);
258
259 /* Add the aligned size of ID. */
260 size += offsetof (allocation_object, u.data);
261
262 m_elt_size = size;
263
264 if (GATHER_STATISTICS)
265 {
266 pool_usage *u = pool_allocator_usage.register_descriptor
267 (this, new mem_location (m_location));
268
269 u->m_element_size = m_elt_size;
270 u->m_pool_name = m_name;
271 }
272
273 /* List header size should be a multiple of 8. */
274 size_t header_size = align_eight (sizeof (allocation_pool_list));
275
276 m_elts_per_block = (TBlockAllocator::block_size - header_size) / size;
277 gcc_checking_assert (m_elts_per_block != 0);
278
279 /* Increase the last used ID and use it for this pool.
280 ID == 0 is used for free elements of pool so skip it. */
281 last_id++;
282 if (last_id == 0)
283 last_id++;
284
285 m_id = last_id;
286 }
287
288 /* Free all memory allocated for the given memory pool. */
289 template <typename TBlockAllocator>
290 inline void
291 base_pool_allocator <TBlockAllocator>::release ()
292 {
293 if (!m_initialized)
294 return;
295
296 allocation_pool_list *block, *next_block;
297
298 /* Free each block allocated to the pool. */
299 for (block = m_block_list; block != NULL; block = next_block)
300 {
301 next_block = block->next;
302 TBlockAllocator::release (block);
303 }
304
305 if (GATHER_STATISTICS)
306 {
307 pool_allocator_usage.release_instance_overhead
308 (this, (m_elts_allocated - m_elts_free) * m_elt_size);
309 }
310
311 m_returned_free_list = NULL;
312 m_virgin_free_list = NULL;
313 m_virgin_elts_remaining = 0;
314 m_elts_allocated = 0;
315 m_elts_free = 0;
316 m_blocks_allocated = 0;
317 m_block_list = NULL;
318 }
319
320 template <typename TBlockAllocator>
321 inline void
322 base_pool_allocator <TBlockAllocator>::release_if_empty ()
323 {
324 if (m_elts_free == m_elts_allocated)
325 release ();
326 }
327
328 template <typename TBlockAllocator>
329 inline base_pool_allocator <TBlockAllocator>::~base_pool_allocator ()
330 {
331 release ();
332 }
333
334 /* Allocates one element from the pool specified. */
335 template <typename TBlockAllocator>
336 inline void*
337 base_pool_allocator <TBlockAllocator>::allocate ()
338 {
339 if (!m_initialized)
340 initialize ();
341
342 allocation_pool_list *header;
343 #ifdef ENABLE_VALGRIND_ANNOTATIONS
344 int size;
345 #endif
346
347 if (GATHER_STATISTICS)
348 {
349 pool_allocator_usage.register_instance_overhead (m_elt_size, this);
350 }
351
352 #ifdef ENABLE_VALGRIND_ANNOTATIONS
353 size = m_elt_size - offsetof (allocation_object, u.data);
354 #endif
355
356 /* If there are no more free elements, make some more!. */
357 if (!m_returned_free_list)
358 {
359 char *block;
360 if (!m_virgin_elts_remaining)
361 {
362 allocation_pool_list *block_header;
363
364 /* Make the block. */
365 block = reinterpret_cast<char *> (TBlockAllocator::allocate ());
366 block_header = new (block) allocation_pool_list;
367 block += align_eight (sizeof (allocation_pool_list));
368
369 /* Throw it on the block list. */
370 block_header->next = m_block_list;
371 m_block_list = block_header;
372
373 /* Make the block available for allocation. */
374 m_virgin_free_list = block;
375 m_virgin_elts_remaining = m_elts_per_block;
376
377 /* Also update the number of elements we have free/allocated, and
378 increment the allocated block count. */
379 m_elts_allocated += m_elts_per_block;
380 m_elts_free += m_elts_per_block;
381 m_blocks_allocated += 1;
382 }
383
384 /* We now know that we can take the first elt off the virgin list and
385 put it on the returned list. */
386 block = m_virgin_free_list;
387 header = (allocation_pool_list*) allocation_object::get_data (block);
388 header->next = NULL;
389
390 /* Mark the element to be free. */
391 ((allocation_object*) block)->id = 0;
392 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (header,size));
393 m_returned_free_list = header;
394 m_virgin_free_list += m_elt_size;
395 m_virgin_elts_remaining--;
396
397 }
398
399 /* Pull the first free element from the free list, and return it. */
400 header = m_returned_free_list;
401 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_DEFINED (header, sizeof (*header)));
402 m_returned_free_list = header->next;
403 m_elts_free--;
404
405 /* Set the ID for element. */
406 allocation_object::get_instance (header)->id = m_id;
407 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_UNDEFINED (header, size));
408
409 return (void *)(header);
410 }
411
412 /* Puts PTR back on POOL's free list. */
413 template <typename TBlockAllocator>
414 inline void
415 base_pool_allocator <TBlockAllocator>::remove (void *object)
416 {
417 int size = m_elt_size - offsetof (allocation_object, u.data);
418
419 if (flag_checking)
420 {
421 gcc_assert (m_initialized);
422 gcc_assert (object
423 /* Check if we free more than we allocated, which is Bad (TM). */
424 && m_elts_free < m_elts_allocated
425 /* Check whether the PTR was allocated from POOL. */
426 && m_id == allocation_object::get_instance (object)->id);
427
428 memset (object, 0xaf, size);
429 }
430
431 /* Mark the element to be free. */
432 allocation_object::get_instance (object)->id = 0;
433
434 allocation_pool_list *header = new (object) allocation_pool_list;
435 header->next = m_returned_free_list;
436 m_returned_free_list = header;
437 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (object, size));
438 m_elts_free++;
439
440 if (GATHER_STATISTICS)
441 {
442 pool_allocator_usage.release_instance_overhead (this, m_elt_size);
443 }
444 }
445
446 /* Number of elements currently active (not returned to pool). Used for cheap
447 consistency checks. */
448 template <typename TBlockAllocator>
449 inline size_t
450 base_pool_allocator <TBlockAllocator>::num_elts_current ()
451 {
452 return m_elts_allocated - m_elts_free;
453 }
454
455 /* Specialization of base_pool_allocator which should be used in most cases.
456 Another specialization may be needed, if object size is greater than
457 memory_block_pool::block_size (64 KB). */
458 typedef base_pool_allocator <memory_block_pool> pool_allocator;
459
460 /* Type based memory pool allocator. */
461 template <typename T>
462 class object_allocator
463 {
464 public:
465 /* Default constructor for pool allocator called NAME. */
466 object_allocator (const char *name CXX_MEM_STAT_INFO):
467 m_allocator (name, sizeof (T) PASS_MEM_STAT) {}
468
469 inline void
470 release ()
471 {
472 m_allocator.release ();
473 }
474
475 inline void release_if_empty ()
476 {
477 m_allocator.release_if_empty ();
478 }
479
480
481 /* Allocate memory for instance of type T and call a default constructor. */
482
483 inline T *
484 allocate () ATTRIBUTE_MALLOC
485 {
486 return ::new (m_allocator.allocate ()) T;
487 }
488
489 /* Allocate memory for instance of type T and return void * that
490 could be used in situations where a default constructor is not provided
491 by the class T. */
492
493 inline void *
494 allocate_raw () ATTRIBUTE_MALLOC
495 {
496 return m_allocator.allocate ();
497 }
498
499 inline void
500 remove (T *object)
501 {
502 /* Call destructor. */
503 object->~T ();
504
505 m_allocator.remove (object);
506 }
507
508 inline size_t
509 num_elts_current ()
510 {
511 return m_allocator.num_elts_current ();
512 }
513
514 private:
515 pool_allocator m_allocator;
516 };
517
518 /* Store information about each particular alloc_pool. Note that this
519 will underestimate the amount the amount of storage used by a small amount:
520 1) The overhead in a pool is not accounted for.
521 2) The unallocated elements in a block are not accounted for. Note
522 that this can at worst case be one element smaller that the block
523 size for that pool. */
524 struct alloc_pool_descriptor
525 {
526 /* Number of pools allocated. */
527 unsigned long created;
528 /* Gross allocated storage. */
529 unsigned long allocated;
530 /* Amount of currently active storage. */
531 unsigned long current;
532 /* Peak amount of storage used. */
533 unsigned long peak;
534 /* Size of element in the pool. */
535 int elt_size;
536 };
537
538 /* Helper for classes that do not provide default ctor. */
539
540 template <typename T>
541 inline void *
542 operator new (size_t, object_allocator<T> &a)
543 {
544 return a.allocate_raw ();
545 }
546
547 /* Hashtable mapping alloc_pool names to descriptors. */
548 extern hash_map<const char *, alloc_pool_descriptor> *alloc_pool_hash;
549
550
551 #endif