2 * Copyright (c) 2020 Etnaviv Project
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sub license,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the
12 * next paragraph) shall be included in all copies or substantial portions
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 * Jonathan Marek <jonathan@marek.ca>
27 #ifndef H_ETNAVIV_COMPILER_NIR
28 #define H_ETNAVIV_COMPILER_NIR
30 #include "compiler/nir/nir.h"
32 #define compile_error(ctx, args...) ({ \
43 static inline bool is_sysval(nir_instr
*instr
)
45 if (instr
->type
!= nir_instr_type_intrinsic
)
48 nir_intrinsic_instr
*intr
= nir_instr_as_intrinsic(instr
);
49 return intr
->intrinsic
== nir_intrinsic_load_front_face
||
50 intr
->intrinsic
== nir_intrinsic_load_frag_coord
;
53 /* get unique ssa/reg index for nir_src */
54 static inline unsigned
55 src_index(nir_function_impl
*impl
, nir_src
*src
)
57 return src
->is_ssa
? src
->ssa
->index
: (src
->reg
.reg
->index
+ impl
->ssa_alloc
);
60 /* get unique ssa/reg index for nir_dest */
61 static inline unsigned
62 dest_index(nir_function_impl
*impl
, nir_dest
*dest
)
64 return dest
->is_ssa
? dest
->ssa
.index
: (dest
->reg
.reg
->index
+ impl
->ssa_alloc
);
68 update_swiz_mask(nir_alu_instr
*alu
, nir_dest
*dest
, unsigned *swiz
, unsigned *mask
)
73 bool is_vec
= dest
!= NULL
;
74 unsigned swizzle
= 0, write_mask
= 0;
75 for (unsigned i
= 0; i
< 4; i
++) {
76 /* channel not written */
77 if (!(alu
->dest
.write_mask
& (1 << i
)))
79 /* src is different (only check for vecN) */
80 if (is_vec
&& alu
->src
[i
].src
.ssa
!= &dest
->ssa
)
83 unsigned src_swiz
= is_vec
? alu
->src
[i
].swizzle
[0] : alu
->src
[0].swizzle
[i
];
84 swizzle
|= (*swiz
>> src_swiz
* 2 & 3) << i
* 2;
85 /* this channel isn't written through this chain */
86 if (*mask
& (1 << src_swiz
))
94 real_dest(nir_dest
*dest
, unsigned *swiz
, unsigned *mask
)
96 if (!dest
|| !dest
->is_ssa
)
99 bool can_bypass_src
= !list_length(&dest
->ssa
.if_uses
);
100 nir_instr
*p_instr
= dest
->ssa
.parent_instr
;
102 /* if used by a vecN, the "real" destination becomes the vecN destination
103 * lower_alu guarantees that values used by a vecN are only used by that vecN
104 * we can apply the same logic to movs in a some cases too
106 nir_foreach_use(use_src
, &dest
->ssa
) {
107 nir_instr
*instr
= use_src
->parent_instr
;
109 /* src bypass check: for now only deal with tex src mov case
110 * note: for alu don't bypass mov for multiple uniform sources
112 switch (instr
->type
) {
113 case nir_instr_type_tex
:
114 if (p_instr
->type
== nir_instr_type_alu
&&
115 nir_instr_as_alu(p_instr
)->op
== nir_op_mov
) {
119 can_bypass_src
= false;
123 if (instr
->type
!= nir_instr_type_alu
)
126 nir_alu_instr
*alu
= nir_instr_as_alu(instr
);
132 assert(list_length(&dest
->ssa
.if_uses
) == 0);
133 nir_foreach_use(use_src
, &dest
->ssa
)
134 assert(use_src
->parent_instr
== instr
);
136 update_swiz_mask(alu
, dest
, swiz
, mask
);
139 switch (dest
->ssa
.parent_instr
->type
) {
140 case nir_instr_type_alu
:
141 case nir_instr_type_tex
:
146 if (list_length(&dest
->ssa
.if_uses
) || list_length(&dest
->ssa
.uses
) > 1)
149 update_swiz_mask(alu
, NULL
, swiz
, mask
);
156 assert(!(instr
->pass_flags
& BYPASS_SRC
));
157 instr
->pass_flags
|= BYPASS_DST
;
158 return real_dest(&alu
->dest
.dest
, swiz
, mask
);
161 if (can_bypass_src
&& !(p_instr
->pass_flags
& BYPASS_DST
)) {
162 p_instr
->pass_flags
|= BYPASS_SRC
;
169 /* if instruction dest needs a register, return nir_dest for it */
170 static inline nir_dest
*
171 dest_for_instr(nir_instr
*instr
)
173 nir_dest
*dest
= NULL
;
175 switch (instr
->type
) {
176 case nir_instr_type_alu
:
177 dest
= &nir_instr_as_alu(instr
)->dest
.dest
;
179 case nir_instr_type_tex
:
180 dest
= &nir_instr_as_tex(instr
)->dest
;
182 case nir_instr_type_intrinsic
: {
183 nir_intrinsic_instr
*intr
= nir_instr_as_intrinsic(instr
);
184 if (intr
->intrinsic
== nir_intrinsic_load_uniform
||
185 intr
->intrinsic
== nir_intrinsic_load_ubo
||
186 intr
->intrinsic
== nir_intrinsic_load_input
||
187 intr
->intrinsic
== nir_intrinsic_load_instance_id
)
190 case nir_instr_type_deref
:
195 return real_dest(dest
, NULL
, NULL
);
200 nir_dest
*dest
; /* cached dest_for_instr */
201 unsigned live_start
, live_end
; /* live range */
205 etna_live_defs(nir_function_impl
*impl
, struct live_def
*defs
, unsigned *live_map
);