nir: Add a pass for selectively lowering variables to scratch space
[mesa.git] / src / compiler / nir / nir_clone.c
1 /*
2 * Copyright © 2015 Red Hat
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "nir.h"
25 #include "nir_control_flow.h"
26
27 /* Secret Decoder Ring:
28 * clone_foo():
29 * Allocate and clone a foo.
30 * __clone_foo():
31 * Clone body of foo (ie. parent class, embedded struct, etc)
32 */
33
34 typedef struct {
35 /* True if we are cloning an entire shader. */
36 bool global_clone;
37
38 /* If true allows the clone operation to fall back to the original pointer
39 * if no clone pointer is found in the remap table. This allows us to
40 * clone a loop body without having to add srcs from outside the loop to
41 * the remap table. This is useful for loop unrolling.
42 */
43 bool allow_remap_fallback;
44
45 /* maps orig ptr -> cloned ptr: */
46 struct hash_table *remap_table;
47
48 /* List of phi sources. */
49 struct list_head phi_srcs;
50
51 /* new shader object, used as memctx for just about everything else: */
52 nir_shader *ns;
53 } clone_state;
54
55 static void
56 init_clone_state(clone_state *state, struct hash_table *remap_table,
57 bool global, bool allow_remap_fallback)
58 {
59 state->global_clone = global;
60 state->allow_remap_fallback = allow_remap_fallback;
61
62 if (remap_table) {
63 state->remap_table = remap_table;
64 } else {
65 state->remap_table = _mesa_pointer_hash_table_create(NULL);
66 }
67
68 list_inithead(&state->phi_srcs);
69 }
70
71 static void
72 free_clone_state(clone_state *state)
73 {
74 _mesa_hash_table_destroy(state->remap_table, NULL);
75 }
76
77 static inline void *
78 _lookup_ptr(clone_state *state, const void *ptr, bool global)
79 {
80 struct hash_entry *entry;
81
82 if (!ptr)
83 return NULL;
84
85 if (!state->global_clone && global)
86 return (void *)ptr;
87
88 entry = _mesa_hash_table_search(state->remap_table, ptr);
89 if (!entry) {
90 assert(state->allow_remap_fallback);
91 return (void *)ptr;
92 }
93
94 return entry->data;
95 }
96
97 static void
98 add_remap(clone_state *state, void *nptr, const void *ptr)
99 {
100 _mesa_hash_table_insert(state->remap_table, ptr, nptr);
101 }
102
103 static void *
104 remap_local(clone_state *state, const void *ptr)
105 {
106 return _lookup_ptr(state, ptr, false);
107 }
108
109 static void *
110 remap_global(clone_state *state, const void *ptr)
111 {
112 return _lookup_ptr(state, ptr, true);
113 }
114
115 static nir_register *
116 remap_reg(clone_state *state, const nir_register *reg)
117 {
118 return _lookup_ptr(state, reg, false);
119 }
120
121 static nir_variable *
122 remap_var(clone_state *state, const nir_variable *var)
123 {
124 return _lookup_ptr(state, var, nir_variable_is_global(var));
125 }
126
127 nir_constant *
128 nir_constant_clone(const nir_constant *c, nir_variable *nvar)
129 {
130 nir_constant *nc = ralloc(nvar, nir_constant);
131
132 memcpy(nc->values, c->values, sizeof(nc->values));
133 nc->num_elements = c->num_elements;
134 nc->elements = ralloc_array(nvar, nir_constant *, c->num_elements);
135 for (unsigned i = 0; i < c->num_elements; i++) {
136 nc->elements[i] = nir_constant_clone(c->elements[i], nvar);
137 }
138
139 return nc;
140 }
141
142 /* NOTE: for cloning nir_variables, bypass nir_variable_create to avoid
143 * having to deal with locals and globals separately:
144 */
145 nir_variable *
146 nir_variable_clone(const nir_variable *var, nir_shader *shader)
147 {
148 nir_variable *nvar = rzalloc(shader, nir_variable);
149
150 nvar->type = var->type;
151 nvar->name = ralloc_strdup(nvar, var->name);
152 nvar->data = var->data;
153 nvar->num_state_slots = var->num_state_slots;
154 nvar->state_slots = ralloc_array(nvar, nir_state_slot, var->num_state_slots);
155 memcpy(nvar->state_slots, var->state_slots,
156 var->num_state_slots * sizeof(nir_state_slot));
157 if (var->constant_initializer) {
158 nvar->constant_initializer =
159 nir_constant_clone(var->constant_initializer, nvar);
160 }
161 nvar->interface_type = var->interface_type;
162
163 nvar->num_members = var->num_members;
164 if (var->num_members) {
165 nvar->members = ralloc_array(nvar, struct nir_variable_data,
166 var->num_members);
167 memcpy(nvar->members, var->members,
168 var->num_members * sizeof(*var->members));
169 }
170
171 return nvar;
172 }
173
174 static nir_variable *
175 clone_variable(clone_state *state, const nir_variable *var)
176 {
177 nir_variable *nvar = nir_variable_clone(var, state->ns);
178 add_remap(state, nvar, var);
179
180 return nvar;
181 }
182
183 /* clone list of nir_variable: */
184 static void
185 clone_var_list(clone_state *state, struct exec_list *dst,
186 const struct exec_list *list)
187 {
188 exec_list_make_empty(dst);
189 foreach_list_typed(nir_variable, var, node, list) {
190 nir_variable *nvar = clone_variable(state, var);
191 exec_list_push_tail(dst, &nvar->node);
192 }
193 }
194
195 /* NOTE: for cloning nir_registers, bypass nir_global/local_reg_create()
196 * to avoid having to deal with locals and globals separately:
197 */
198 static nir_register *
199 clone_register(clone_state *state, const nir_register *reg)
200 {
201 nir_register *nreg = rzalloc(state->ns, nir_register);
202 add_remap(state, nreg, reg);
203
204 nreg->num_components = reg->num_components;
205 nreg->bit_size = reg->bit_size;
206 nreg->num_array_elems = reg->num_array_elems;
207 nreg->index = reg->index;
208 nreg->name = ralloc_strdup(nreg, reg->name);
209
210 /* reconstructing uses/defs/if_uses handled by nir_instr_insert() */
211 list_inithead(&nreg->uses);
212 list_inithead(&nreg->defs);
213 list_inithead(&nreg->if_uses);
214
215 return nreg;
216 }
217
218 /* clone list of nir_register: */
219 static void
220 clone_reg_list(clone_state *state, struct exec_list *dst,
221 const struct exec_list *list)
222 {
223 exec_list_make_empty(dst);
224 foreach_list_typed(nir_register, reg, node, list) {
225 nir_register *nreg = clone_register(state, reg);
226 exec_list_push_tail(dst, &nreg->node);
227 }
228 }
229
230 static void
231 __clone_src(clone_state *state, void *ninstr_or_if,
232 nir_src *nsrc, const nir_src *src)
233 {
234 nsrc->is_ssa = src->is_ssa;
235 if (src->is_ssa) {
236 nsrc->ssa = remap_local(state, src->ssa);
237 } else {
238 nsrc->reg.reg = remap_reg(state, src->reg.reg);
239 if (src->reg.indirect) {
240 nsrc->reg.indirect = ralloc(ninstr_or_if, nir_src);
241 __clone_src(state, ninstr_or_if, nsrc->reg.indirect, src->reg.indirect);
242 }
243 nsrc->reg.base_offset = src->reg.base_offset;
244 }
245 }
246
247 static void
248 __clone_dst(clone_state *state, nir_instr *ninstr,
249 nir_dest *ndst, const nir_dest *dst)
250 {
251 ndst->is_ssa = dst->is_ssa;
252 if (dst->is_ssa) {
253 nir_ssa_dest_init(ninstr, ndst, dst->ssa.num_components,
254 dst->ssa.bit_size, dst->ssa.name);
255 add_remap(state, &ndst->ssa, &dst->ssa);
256 } else {
257 ndst->reg.reg = remap_reg(state, dst->reg.reg);
258 if (dst->reg.indirect) {
259 ndst->reg.indirect = ralloc(ninstr, nir_src);
260 __clone_src(state, ninstr, ndst->reg.indirect, dst->reg.indirect);
261 }
262 ndst->reg.base_offset = dst->reg.base_offset;
263 }
264 }
265
266 static nir_alu_instr *
267 clone_alu(clone_state *state, const nir_alu_instr *alu)
268 {
269 nir_alu_instr *nalu = nir_alu_instr_create(state->ns, alu->op);
270 nalu->exact = alu->exact;
271
272 __clone_dst(state, &nalu->instr, &nalu->dest.dest, &alu->dest.dest);
273 nalu->dest.saturate = alu->dest.saturate;
274 nalu->dest.write_mask = alu->dest.write_mask;
275
276 for (unsigned i = 0; i < nir_op_infos[alu->op].num_inputs; i++) {
277 __clone_src(state, &nalu->instr, &nalu->src[i].src, &alu->src[i].src);
278 nalu->src[i].negate = alu->src[i].negate;
279 nalu->src[i].abs = alu->src[i].abs;
280 memcpy(nalu->src[i].swizzle, alu->src[i].swizzle,
281 sizeof(nalu->src[i].swizzle));
282 }
283
284 return nalu;
285 }
286
287 static nir_deref_instr *
288 clone_deref_instr(clone_state *state, const nir_deref_instr *deref)
289 {
290 nir_deref_instr *nderef =
291 nir_deref_instr_create(state->ns, deref->deref_type);
292
293 __clone_dst(state, &nderef->instr, &nderef->dest, &deref->dest);
294
295 nderef->mode = deref->mode;
296 nderef->type = deref->type;
297
298 if (deref->deref_type == nir_deref_type_var) {
299 nderef->var = remap_var(state, deref->var);
300 return nderef;
301 }
302
303 __clone_src(state, &nderef->instr, &nderef->parent, &deref->parent);
304
305 switch (deref->deref_type) {
306 case nir_deref_type_struct:
307 nderef->strct.index = deref->strct.index;
308 break;
309
310 case nir_deref_type_array:
311 case nir_deref_type_ptr_as_array:
312 __clone_src(state, &nderef->instr,
313 &nderef->arr.index, &deref->arr.index);
314 break;
315
316 case nir_deref_type_array_wildcard:
317 /* Nothing to do */
318 break;
319
320 case nir_deref_type_cast:
321 nderef->cast.ptr_stride = deref->cast.ptr_stride;
322 break;
323
324 default:
325 unreachable("Invalid instruction deref type");
326 }
327
328 return nderef;
329 }
330
331 static nir_intrinsic_instr *
332 clone_intrinsic(clone_state *state, const nir_intrinsic_instr *itr)
333 {
334 nir_intrinsic_instr *nitr =
335 nir_intrinsic_instr_create(state->ns, itr->intrinsic);
336
337 unsigned num_srcs = nir_intrinsic_infos[itr->intrinsic].num_srcs;
338
339 if (nir_intrinsic_infos[itr->intrinsic].has_dest)
340 __clone_dst(state, &nitr->instr, &nitr->dest, &itr->dest);
341
342 nitr->num_components = itr->num_components;
343 memcpy(nitr->const_index, itr->const_index, sizeof(nitr->const_index));
344
345 for (unsigned i = 0; i < num_srcs; i++)
346 __clone_src(state, &nitr->instr, &nitr->src[i], &itr->src[i]);
347
348 return nitr;
349 }
350
351 static nir_load_const_instr *
352 clone_load_const(clone_state *state, const nir_load_const_instr *lc)
353 {
354 nir_load_const_instr *nlc =
355 nir_load_const_instr_create(state->ns, lc->def.num_components,
356 lc->def.bit_size);
357
358 memcpy(&nlc->value, &lc->value, sizeof(nlc->value));
359
360 add_remap(state, &nlc->def, &lc->def);
361
362 return nlc;
363 }
364
365 static nir_ssa_undef_instr *
366 clone_ssa_undef(clone_state *state, const nir_ssa_undef_instr *sa)
367 {
368 nir_ssa_undef_instr *nsa =
369 nir_ssa_undef_instr_create(state->ns, sa->def.num_components,
370 sa->def.bit_size);
371
372 add_remap(state, &nsa->def, &sa->def);
373
374 return nsa;
375 }
376
377 static nir_tex_instr *
378 clone_tex(clone_state *state, const nir_tex_instr *tex)
379 {
380 nir_tex_instr *ntex = nir_tex_instr_create(state->ns, tex->num_srcs);
381
382 ntex->sampler_dim = tex->sampler_dim;
383 ntex->dest_type = tex->dest_type;
384 ntex->op = tex->op;
385 __clone_dst(state, &ntex->instr, &ntex->dest, &tex->dest);
386 for (unsigned i = 0; i < ntex->num_srcs; i++) {
387 ntex->src[i].src_type = tex->src[i].src_type;
388 __clone_src(state, &ntex->instr, &ntex->src[i].src, &tex->src[i].src);
389 }
390 ntex->coord_components = tex->coord_components;
391 ntex->is_array = tex->is_array;
392 ntex->is_shadow = tex->is_shadow;
393 ntex->is_new_style_shadow = tex->is_new_style_shadow;
394 ntex->component = tex->component;
395 memcpy(ntex->tg4_offsets, tex->tg4_offsets, sizeof(tex->tg4_offsets));
396
397 ntex->texture_index = tex->texture_index;
398 ntex->texture_array_size = tex->texture_array_size;
399 ntex->sampler_index = tex->sampler_index;
400
401 return ntex;
402 }
403
404 static nir_phi_instr *
405 clone_phi(clone_state *state, const nir_phi_instr *phi, nir_block *nblk)
406 {
407 nir_phi_instr *nphi = nir_phi_instr_create(state->ns);
408
409 __clone_dst(state, &nphi->instr, &nphi->dest, &phi->dest);
410
411 /* Cloning a phi node is a bit different from other instructions. The
412 * sources of phi instructions are the only time where we can use an SSA
413 * def before it is defined. In order to handle this, we just copy over
414 * the sources from the old phi instruction directly and then fix them up
415 * in a second pass once all the instrutions in the function have been
416 * properly cloned.
417 *
418 * In order to ensure that the copied sources (which are the same as the
419 * old phi instruction's sources for now) don't get inserted into the old
420 * shader's use-def lists, we have to add the phi instruction *before* we
421 * set up its sources.
422 */
423 nir_instr_insert_after_block(nblk, &nphi->instr);
424
425 foreach_list_typed(nir_phi_src, src, node, &phi->srcs) {
426 nir_phi_src *nsrc = ralloc(nphi, nir_phi_src);
427
428 /* Just copy the old source for now. */
429 memcpy(nsrc, src, sizeof(*src));
430
431 /* Since we're not letting nir_insert_instr handle use/def stuff for us,
432 * we have to set the parent_instr manually. It doesn't really matter
433 * when we do it, so we might as well do it here.
434 */
435 nsrc->src.parent_instr = &nphi->instr;
436
437 /* Stash it in the list of phi sources. We'll walk this list and fix up
438 * sources at the very end of clone_function_impl.
439 */
440 list_add(&nsrc->src.use_link, &state->phi_srcs);
441
442 exec_list_push_tail(&nphi->srcs, &nsrc->node);
443 }
444
445 return nphi;
446 }
447
448 static nir_jump_instr *
449 clone_jump(clone_state *state, const nir_jump_instr *jmp)
450 {
451 nir_jump_instr *njmp = nir_jump_instr_create(state->ns, jmp->type);
452
453 return njmp;
454 }
455
456 static nir_call_instr *
457 clone_call(clone_state *state, const nir_call_instr *call)
458 {
459 nir_function *ncallee = remap_global(state, call->callee);
460 nir_call_instr *ncall = nir_call_instr_create(state->ns, ncallee);
461
462 for (unsigned i = 0; i < ncall->num_params; i++)
463 __clone_src(state, ncall, &ncall->params[i], &call->params[i]);
464
465 return ncall;
466 }
467
468 static nir_instr *
469 clone_instr(clone_state *state, const nir_instr *instr)
470 {
471 switch (instr->type) {
472 case nir_instr_type_alu:
473 return &clone_alu(state, nir_instr_as_alu(instr))->instr;
474 case nir_instr_type_deref:
475 return &clone_deref_instr(state, nir_instr_as_deref(instr))->instr;
476 case nir_instr_type_intrinsic:
477 return &clone_intrinsic(state, nir_instr_as_intrinsic(instr))->instr;
478 case nir_instr_type_load_const:
479 return &clone_load_const(state, nir_instr_as_load_const(instr))->instr;
480 case nir_instr_type_ssa_undef:
481 return &clone_ssa_undef(state, nir_instr_as_ssa_undef(instr))->instr;
482 case nir_instr_type_tex:
483 return &clone_tex(state, nir_instr_as_tex(instr))->instr;
484 case nir_instr_type_phi:
485 unreachable("Cannot clone phis with clone_instr");
486 case nir_instr_type_jump:
487 return &clone_jump(state, nir_instr_as_jump(instr))->instr;
488 case nir_instr_type_call:
489 return &clone_call(state, nir_instr_as_call(instr))->instr;
490 case nir_instr_type_parallel_copy:
491 unreachable("Cannot clone parallel copies");
492 default:
493 unreachable("bad instr type");
494 return NULL;
495 }
496 }
497
498 static nir_block *
499 clone_block(clone_state *state, struct exec_list *cf_list, const nir_block *blk)
500 {
501 /* Don't actually create a new block. Just use the one from the tail of
502 * the list. NIR guarantees that the tail of the list is a block and that
503 * no two blocks are side-by-side in the IR; It should be empty.
504 */
505 nir_block *nblk =
506 exec_node_data(nir_block, exec_list_get_tail(cf_list), cf_node.node);
507 assert(nblk->cf_node.type == nir_cf_node_block);
508 assert(exec_list_is_empty(&nblk->instr_list));
509
510 /* We need this for phi sources */
511 add_remap(state, nblk, blk);
512
513 nir_foreach_instr(instr, blk) {
514 if (instr->type == nir_instr_type_phi) {
515 /* Phi instructions are a bit of a special case when cloning because
516 * we don't want inserting the instruction to automatically handle
517 * use/defs for us. Instead, we need to wait until all the
518 * blocks/instructions are in so that we can set their sources up.
519 */
520 clone_phi(state, nir_instr_as_phi(instr), nblk);
521 } else {
522 nir_instr *ninstr = clone_instr(state, instr);
523 nir_instr_insert_after_block(nblk, ninstr);
524 }
525 }
526
527 return nblk;
528 }
529
530 static void
531 clone_cf_list(clone_state *state, struct exec_list *dst,
532 const struct exec_list *list);
533
534 static nir_if *
535 clone_if(clone_state *state, struct exec_list *cf_list, const nir_if *i)
536 {
537 nir_if *ni = nir_if_create(state->ns);
538 ni->control = i->control;
539
540 __clone_src(state, ni, &ni->condition, &i->condition);
541
542 nir_cf_node_insert_end(cf_list, &ni->cf_node);
543
544 clone_cf_list(state, &ni->then_list, &i->then_list);
545 clone_cf_list(state, &ni->else_list, &i->else_list);
546
547 return ni;
548 }
549
550 static nir_loop *
551 clone_loop(clone_state *state, struct exec_list *cf_list, const nir_loop *loop)
552 {
553 nir_loop *nloop = nir_loop_create(state->ns);
554 nloop->control = loop->control;
555 nloop->partially_unrolled = loop->partially_unrolled;
556
557 nir_cf_node_insert_end(cf_list, &nloop->cf_node);
558
559 clone_cf_list(state, &nloop->body, &loop->body);
560
561 return nloop;
562 }
563
564 /* clone list of nir_cf_node: */
565 static void
566 clone_cf_list(clone_state *state, struct exec_list *dst,
567 const struct exec_list *list)
568 {
569 foreach_list_typed(nir_cf_node, cf, node, list) {
570 switch (cf->type) {
571 case nir_cf_node_block:
572 clone_block(state, dst, nir_cf_node_as_block(cf));
573 break;
574 case nir_cf_node_if:
575 clone_if(state, dst, nir_cf_node_as_if(cf));
576 break;
577 case nir_cf_node_loop:
578 clone_loop(state, dst, nir_cf_node_as_loop(cf));
579 break;
580 default:
581 unreachable("bad cf type");
582 }
583 }
584 }
585
586 /* After we've cloned almost everything, we have to walk the list of phi
587 * sources and fix them up. Thanks to loops, the block and SSA value for a
588 * phi source may not be defined when we first encounter it. Instead, we
589 * add it to the phi_srcs list and we fix it up here.
590 */
591 static void
592 fixup_phi_srcs(clone_state *state)
593 {
594 list_for_each_entry_safe(nir_phi_src, src, &state->phi_srcs, src.use_link) {
595 src->pred = remap_local(state, src->pred);
596
597 /* Remove from this list */
598 list_del(&src->src.use_link);
599
600 if (src->src.is_ssa) {
601 src->src.ssa = remap_local(state, src->src.ssa);
602 list_addtail(&src->src.use_link, &src->src.ssa->uses);
603 } else {
604 src->src.reg.reg = remap_reg(state, src->src.reg.reg);
605 list_addtail(&src->src.use_link, &src->src.reg.reg->uses);
606 }
607 }
608 assert(list_empty(&state->phi_srcs));
609 }
610
611 void
612 nir_cf_list_clone(nir_cf_list *dst, nir_cf_list *src, nir_cf_node *parent,
613 struct hash_table *remap_table)
614 {
615 exec_list_make_empty(&dst->list);
616 dst->impl = src->impl;
617
618 if (exec_list_is_empty(&src->list))
619 return;
620
621 clone_state state;
622 init_clone_state(&state, remap_table, false, true);
623
624 /* We use the same shader */
625 state.ns = src->impl->function->shader;
626
627 /* The control-flow code assumes that the list of cf_nodes always starts
628 * and ends with a block. We start by adding an empty block.
629 */
630 nir_block *nblk = nir_block_create(state.ns);
631 nblk->cf_node.parent = parent;
632 exec_list_push_tail(&dst->list, &nblk->cf_node.node);
633
634 clone_cf_list(&state, &dst->list, &src->list);
635
636 fixup_phi_srcs(&state);
637 }
638
639 static nir_function_impl *
640 clone_function_impl(clone_state *state, const nir_function_impl *fi)
641 {
642 nir_function_impl *nfi = nir_function_impl_create_bare(state->ns);
643
644 clone_var_list(state, &nfi->locals, &fi->locals);
645 clone_reg_list(state, &nfi->registers, &fi->registers);
646 nfi->reg_alloc = fi->reg_alloc;
647
648 assert(list_empty(&state->phi_srcs));
649
650 clone_cf_list(state, &nfi->body, &fi->body);
651
652 fixup_phi_srcs(state);
653
654 /* All metadata is invalidated in the cloning process */
655 nfi->valid_metadata = 0;
656
657 return nfi;
658 }
659
660 nir_function_impl *
661 nir_function_impl_clone(nir_shader *shader, const nir_function_impl *fi)
662 {
663 clone_state state;
664 init_clone_state(&state, NULL, false, false);
665
666 state.ns = shader;
667
668 nir_function_impl *nfi = clone_function_impl(&state, fi);
669
670 free_clone_state(&state);
671
672 return nfi;
673 }
674
675 static nir_function *
676 clone_function(clone_state *state, const nir_function *fxn, nir_shader *ns)
677 {
678 assert(ns == state->ns);
679 nir_function *nfxn = nir_function_create(ns, fxn->name);
680
681 /* Needed for call instructions */
682 add_remap(state, nfxn, fxn);
683
684 nfxn->num_params = fxn->num_params;
685 nfxn->params = ralloc_array(state->ns, nir_parameter, fxn->num_params);
686 memcpy(nfxn->params, fxn->params, sizeof(nir_parameter) * fxn->num_params);
687 nfxn->is_entrypoint = fxn->is_entrypoint;
688
689 /* At first glance, it looks like we should clone the function_impl here.
690 * However, call instructions need to be able to reference at least the
691 * function and those will get processed as we clone the function_impls.
692 * We stop here and do function_impls as a second pass.
693 */
694
695 return nfxn;
696 }
697
698 nir_shader *
699 nir_shader_clone(void *mem_ctx, const nir_shader *s)
700 {
701 clone_state state;
702 init_clone_state(&state, NULL, true, false);
703
704 nir_shader *ns = nir_shader_create(mem_ctx, s->info.stage, s->options, NULL);
705 state.ns = ns;
706
707 clone_var_list(&state, &ns->uniforms, &s->uniforms);
708 clone_var_list(&state, &ns->inputs, &s->inputs);
709 clone_var_list(&state, &ns->outputs, &s->outputs);
710 clone_var_list(&state, &ns->shared, &s->shared);
711 clone_var_list(&state, &ns->globals, &s->globals);
712 clone_var_list(&state, &ns->system_values, &s->system_values);
713
714 /* Go through and clone functions */
715 foreach_list_typed(nir_function, fxn, node, &s->functions)
716 clone_function(&state, fxn, ns);
717
718 /* Only after all functions are cloned can we clone the actual function
719 * implementations. This is because nir_call_instrs need to reference the
720 * functions of other functions and we don't know what order the functions
721 * will have in the list.
722 */
723 nir_foreach_function(fxn, s) {
724 nir_function *nfxn = remap_global(&state, fxn);
725 nfxn->impl = clone_function_impl(&state, fxn->impl);
726 nfxn->impl->function = nfxn;
727 }
728
729 ns->info = s->info;
730 ns->info.name = ralloc_strdup(ns, ns->info.name);
731 if (ns->info.label)
732 ns->info.label = ralloc_strdup(ns, ns->info.label);
733
734 ns->num_inputs = s->num_inputs;
735 ns->num_uniforms = s->num_uniforms;
736 ns->num_outputs = s->num_outputs;
737 ns->num_shared = s->num_shared;
738 ns->scratch_size = s->scratch_size;
739
740 ns->constant_data_size = s->constant_data_size;
741 if (s->constant_data_size > 0) {
742 ns->constant_data = ralloc_size(ns, s->constant_data_size);
743 memcpy(ns->constant_data, s->constant_data, s->constant_data_size);
744 }
745
746 free_clone_state(&state);
747
748 return ns;
749 }