nir: glsl_get_bit_size() should take glsl_type
[mesa.git] / src / compiler / nir / nir_lower_var_copies.c
1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #include "nir.h"
29 #include "compiler/nir_types.h"
30
31 /*
32 * Lowers all copy intrinsics to sequences of load/store intrinsics.
33 */
34
35 /* Walks down the deref chain and returns the next deref in the chain whose
36 * child is a wildcard. In other words, given the chain a[1].foo[*].bar,
37 * this function will return the deref to foo. Calling it a second time
38 * with the [*].bar, it will return NULL.
39 */
40 static nir_deref *
41 deref_next_wildcard_parent(nir_deref *deref)
42 {
43 for (nir_deref *tail = deref; tail->child; tail = tail->child) {
44 if (tail->child->deref_type != nir_deref_type_array)
45 continue;
46
47 nir_deref_array *arr = nir_deref_as_array(tail->child);
48
49 if (arr->deref_array_type == nir_deref_array_type_wildcard)
50 return tail;
51 }
52
53 return NULL;
54 }
55
56 /* This function recursively walks the given deref chain and replaces the
57 * given copy instruction with an equivalent sequence load/store
58 * operations.
59 *
60 * @copy_instr The copy instruction to replace; new instructions will be
61 * inserted before this one
62 *
63 * @dest_head The head of the destination variable deref chain
64 *
65 * @src_head The head of the source variable deref chain
66 *
67 * @dest_tail The current tail of the destination variable deref chain;
68 * this is used for recursion and external callers of this
69 * function should call it with tail == head
70 *
71 * @src_tail The current tail of the source variable deref chain;
72 * this is used for recursion and external callers of this
73 * function should call it with tail == head
74 *
75 * @state The current variable lowering state
76 */
77 static void
78 emit_copy_load_store(nir_intrinsic_instr *copy_instr,
79 nir_deref_var *dest_head, nir_deref_var *src_head,
80 nir_deref *dest_tail, nir_deref *src_tail, void *mem_ctx)
81 {
82 /* Find the next pair of wildcards */
83 nir_deref *src_arr_parent = deref_next_wildcard_parent(src_tail);
84 nir_deref *dest_arr_parent = deref_next_wildcard_parent(dest_tail);
85
86 if (src_arr_parent || dest_arr_parent) {
87 /* Wildcards had better come in matched pairs */
88 assert(src_arr_parent && dest_arr_parent);
89
90 nir_deref_array *src_arr = nir_deref_as_array(src_arr_parent->child);
91 nir_deref_array *dest_arr = nir_deref_as_array(dest_arr_parent->child);
92
93 unsigned length = glsl_get_length(src_arr_parent->type);
94 /* The wildcards should represent the same number of elements */
95 assert(length == glsl_get_length(dest_arr_parent->type));
96 assert(length > 0);
97
98 /* Walk over all of the elements that this wildcard refers to and
99 * call emit_copy_load_store on each one of them */
100 src_arr->deref_array_type = nir_deref_array_type_direct;
101 dest_arr->deref_array_type = nir_deref_array_type_direct;
102 for (unsigned i = 0; i < length; i++) {
103 src_arr->base_offset = i;
104 dest_arr->base_offset = i;
105 emit_copy_load_store(copy_instr, dest_head, src_head,
106 &dest_arr->deref, &src_arr->deref, mem_ctx);
107 }
108 src_arr->deref_array_type = nir_deref_array_type_wildcard;
109 dest_arr->deref_array_type = nir_deref_array_type_wildcard;
110 } else {
111 /* In this case, we have no wildcards anymore, so all we have to do
112 * is just emit the load and store operations. */
113 src_tail = nir_deref_tail(src_tail);
114 dest_tail = nir_deref_tail(dest_tail);
115
116 assert(src_tail->type == dest_tail->type);
117
118 unsigned num_components = glsl_get_vector_elements(src_tail->type);
119 unsigned bit_size = glsl_get_bit_size(src_tail->type);
120
121 nir_intrinsic_instr *load =
122 nir_intrinsic_instr_create(mem_ctx, nir_intrinsic_load_var);
123 load->num_components = num_components;
124 load->variables[0] = nir_deref_as_var(nir_copy_deref(load, &src_head->deref));
125 nir_ssa_dest_init(&load->instr, &load->dest, num_components, bit_size,
126 NULL);
127
128 nir_instr_insert_before(&copy_instr->instr, &load->instr);
129
130 nir_intrinsic_instr *store =
131 nir_intrinsic_instr_create(mem_ctx, nir_intrinsic_store_var);
132 store->num_components = num_components;
133 nir_intrinsic_set_write_mask(store, (1 << num_components) - 1);
134 store->variables[0] = nir_deref_as_var(nir_copy_deref(store, &dest_head->deref));
135
136 store->src[0].is_ssa = true;
137 store->src[0].ssa = &load->dest.ssa;
138
139 nir_instr_insert_before(&copy_instr->instr, &store->instr);
140 }
141 }
142
143 /* Lowers a copy instruction to a sequence of load/store instructions
144 *
145 * The new instructions are placed before the copy instruction in the IR.
146 */
147 void
148 nir_lower_var_copy_instr(nir_intrinsic_instr *copy, void *mem_ctx)
149 {
150 assert(copy->intrinsic == nir_intrinsic_copy_var);
151 emit_copy_load_store(copy, copy->variables[0], copy->variables[1],
152 &copy->variables[0]->deref,
153 &copy->variables[1]->deref, mem_ctx);
154 }
155
156 static void
157 lower_var_copies_impl(nir_function_impl *impl)
158 {
159 void *mem_ctx = ralloc_parent(impl);
160
161 nir_foreach_block(block, impl) {
162 nir_foreach_instr_safe(instr, block) {
163 if (instr->type != nir_instr_type_intrinsic)
164 continue;
165
166 nir_intrinsic_instr *copy = nir_instr_as_intrinsic(instr);
167 if (copy->intrinsic != nir_intrinsic_copy_var)
168 continue;
169
170 nir_lower_var_copy_instr(copy, mem_ctx);
171
172 nir_instr_remove(&copy->instr);
173 ralloc_free(copy);
174 }
175 }
176 }
177
178 /* Lowers every copy_var instruction in the program to a sequence of
179 * load/store instructions.
180 */
181 void
182 nir_lower_var_copies(nir_shader *shader)
183 {
184 nir_foreach_function(function, shader) {
185 if (function->impl)
186 lower_var_copies_impl(function->impl);
187 }
188 }