+ if (aligning_type)
+ {
+ /* Latch malloc's return value and get a pointer to the aligning field
+ first. */
+ tree storage_ptr = save_expr (malloc_ptr);
+
+ tree aligning_record_addr
+ = convert (build_pointer_type (aligning_type), storage_ptr);
+
+ tree aligning_record
+ = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
+
+ tree aligning_field
+ = build_component_ref (aligning_record, NULL_TREE,
+ TYPE_FIELDS (aligning_type), 0);
+
+ tree aligning_field_addr
+ = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
+
+ /* Then arrange to store the allocator's return value ahead
+ and return. */
+ tree storage_ptr_slot_addr
+ = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
+ convert (ptr_void_type_node, aligning_field_addr),
+ size_int (-POINTER_SIZE/BITS_PER_UNIT));
+
+ tree storage_ptr_slot
+ = build_unary_op (INDIRECT_REF, NULL_TREE,
+ convert (build_pointer_type (ptr_void_type_node),
+ storage_ptr_slot_addr));
+
+ return
+ build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
+ build_binary_op (MODIFY_EXPR, NULL_TREE,
+ storage_ptr_slot, storage_ptr),
+ aligning_field_addr);
+ }
+ else
+ return malloc_ptr;
+}
+
+/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
+ designated by DATA_PTR using the __gnat_free entry point. */
+
+static inline tree
+maybe_wrap_free (tree data_ptr, tree data_type)
+{
+ /* In the regular alignment case, we pass the data pointer straight to free.
+ In the superaligned case, we need to retrieve the initial allocator
+ return value, stored in front of the data block at allocation time. */
+
+ unsigned int data_align = TYPE_ALIGN (data_type);
+ unsigned int default_allocator_alignment
+ = get_target_default_allocator_alignment () * BITS_PER_UNIT;
+
+ tree free_ptr;
+
+ if (data_align > default_allocator_alignment)
+ {
+ /* DATA_FRONT_PTR (void *)
+ = (void *)DATA_PTR - (void *)sizeof (void *)) */
+ tree data_front_ptr
+ = build_binary_op
+ (POINTER_PLUS_EXPR, ptr_void_type_node,
+ convert (ptr_void_type_node, data_ptr),
+ size_int (-POINTER_SIZE/BITS_PER_UNIT));
+
+ /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
+ free_ptr
+ = build_unary_op
+ (INDIRECT_REF, NULL_TREE,
+ convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
+ }
+ else
+ free_ptr = data_ptr;
+
+ return build_call_1_expr (free_decl, free_ptr);
+}
+
+/* Build a GCC tree to call an allocation or deallocation function.
+ If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
+ generate an allocator.
+
+ GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
+ object type, used to determine the to-be-honored address alignment.
+ GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
+ pool to use. If not present, malloc and free are used. GNAT_NODE is used
+ to provide an error location for restriction violation messages. */
+
+tree
+build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
+ Entity_Id gnat_proc, Entity_Id gnat_pool,
+ Node_Id gnat_node)
+{
+ gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
+
+ /* Explicit proc to call ? This one is assumed to deal with the type
+ alignment constraints. */
+ if (Present (gnat_proc))
+ return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
+ gnat_proc, gnat_pool);
+
+ /* Otherwise, object to "free" or "malloc" with possible special processing
+ for alignments stricter than what the default allocator honors. */
+ else if (gnu_obj)
+ return maybe_wrap_free (gnu_obj, gnu_type);
+ else
+ {
+ /* Assert that we no longer can be called with this special pool. */
+ gcc_assert (gnat_pool != -1);
+
+ /* Check that we aren't violating the associated restriction. */
+ if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
+ Check_No_Implicit_Heap_Alloc (gnat_node);
+
+ return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
+ }