amd/llvm: implement nir_intrinsic_demote(_if) and nir_intrinsic_is_helper_invocation
[mesa.git] / src / amd / llvm / ac_llvm_build.h
1 /*
2 * Copyright 2016 Bas Nieuwenhuizen
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
15 * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
16 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
17 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
18 * USE OR OTHER DEALINGS IN THE SOFTWARE.
19 *
20 * The above copyright notice and this permission notice (including the
21 * next paragraph) shall be included in all copies or substantial portions
22 * of the Software.
23 *
24 */
25 #ifndef AC_LLVM_BUILD_H
26 #define AC_LLVM_BUILD_H
27
28 #include <stdbool.h>
29 #include <llvm-c/Core.h>
30 #include "compiler/nir/nir.h"
31 #include "amd_family.h"
32 #include "ac_shader_util.h"
33 #include "ac_shader_args.h"
34 #include "ac_shader_abi.h"
35
36 #ifdef __cplusplus
37 extern "C" {
38 #endif
39
40 enum {
41 AC_ADDR_SPACE_FLAT = 0, /* Slower than global. */
42 AC_ADDR_SPACE_GLOBAL = 1,
43 AC_ADDR_SPACE_GDS = 2,
44 AC_ADDR_SPACE_LDS = 3,
45 AC_ADDR_SPACE_CONST = 4, /* Global allowing SMEM. */
46 AC_ADDR_SPACE_CONST_32BIT = 6, /* same as CONST, but the pointer type has 32 bits */
47 };
48
49 #define AC_WAIT_LGKM (1 << 0) /* LDS, GDS, constant, message */
50 #define AC_WAIT_VLOAD (1 << 1) /* VMEM load/sample instructions */
51 #define AC_WAIT_VSTORE (1 << 2) /* VMEM store instructions */
52
53 struct ac_llvm_flow;
54 struct ac_llvm_compiler;
55 enum ac_float_mode;
56
57 struct ac_llvm_flow_state {
58 struct ac_llvm_flow *stack;
59 unsigned depth_max;
60 unsigned depth;
61 };
62
63 struct ac_llvm_context {
64 LLVMContextRef context;
65 LLVMModuleRef module;
66 LLVMBuilderRef builder;
67
68 LLVMValueRef main_function;
69
70 LLVMTypeRef voidt;
71 LLVMTypeRef i1;
72 LLVMTypeRef i8;
73 LLVMTypeRef i16;
74 LLVMTypeRef i32;
75 LLVMTypeRef i64;
76 LLVMTypeRef i128;
77 LLVMTypeRef intptr;
78 LLVMTypeRef f16;
79 LLVMTypeRef f32;
80 LLVMTypeRef f64;
81 LLVMTypeRef v2i16;
82 LLVMTypeRef v2i32;
83 LLVMTypeRef v3i32;
84 LLVMTypeRef v4i32;
85 LLVMTypeRef v2f32;
86 LLVMTypeRef v3f32;
87 LLVMTypeRef v4f32;
88 LLVMTypeRef v8i32;
89 LLVMTypeRef iN_wavemask;
90 LLVMTypeRef iN_ballotmask;
91
92 LLVMValueRef i8_0;
93 LLVMValueRef i8_1;
94 LLVMValueRef i16_0;
95 LLVMValueRef i16_1;
96 LLVMValueRef i32_0;
97 LLVMValueRef i32_1;
98 LLVMValueRef i64_0;
99 LLVMValueRef i64_1;
100 LLVMValueRef i128_0;
101 LLVMValueRef i128_1;
102 LLVMValueRef f16_0;
103 LLVMValueRef f16_1;
104 LLVMValueRef f32_0;
105 LLVMValueRef f32_1;
106 LLVMValueRef f64_0;
107 LLVMValueRef f64_1;
108 LLVMValueRef i1true;
109 LLVMValueRef i1false;
110
111 /* Temporary helper to implement demote_to_helper:
112 * True = live lanes
113 * False = demoted lanes
114 */
115 LLVMValueRef postponed_kill;
116
117 /* Since ac_nir_translate makes a local copy of ac_llvm_context, there
118 * are two ac_llvm_contexts. Declare a pointer here, so that the control
119 * flow stack is shared by both ac_llvm_contexts.
120 */
121 struct ac_llvm_flow_state *flow;
122
123 unsigned range_md_kind;
124 unsigned invariant_load_md_kind;
125 unsigned uniform_md_kind;
126 unsigned fpmath_md_kind;
127 LLVMValueRef fpmath_md_2p5_ulp;
128 LLVMValueRef empty_md;
129
130 enum chip_class chip_class;
131 enum radeon_family family;
132
133 unsigned wave_size;
134 unsigned ballot_mask_bits;
135
136 unsigned float_mode;
137
138 LLVMValueRef lds;
139 };
140
141 void
142 ac_llvm_context_init(struct ac_llvm_context *ctx,
143 struct ac_llvm_compiler *compiler,
144 enum chip_class chip_class, enum radeon_family family,
145 enum ac_float_mode float_mode, unsigned wave_size,
146 unsigned ballot_mask_bits);
147
148 void
149 ac_llvm_context_dispose(struct ac_llvm_context *ctx);
150
151 int
152 ac_get_llvm_num_components(LLVMValueRef value);
153
154 int
155 ac_get_elem_bits(struct ac_llvm_context *ctx, LLVMTypeRef type);
156
157 LLVMValueRef
158 ac_llvm_extract_elem(struct ac_llvm_context *ac,
159 LLVMValueRef value,
160 int index);
161
162 unsigned ac_get_type_size(LLVMTypeRef type);
163
164 LLVMTypeRef ac_to_integer_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
165 LLVMValueRef ac_to_integer(struct ac_llvm_context *ctx, LLVMValueRef v);
166 LLVMValueRef ac_to_integer_or_pointer(struct ac_llvm_context *ctx, LLVMValueRef v);
167 LLVMTypeRef ac_to_float_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
168 LLVMValueRef ac_to_float(struct ac_llvm_context *ctx, LLVMValueRef v);
169
170 LLVMValueRef
171 ac_build_intrinsic(struct ac_llvm_context *ctx, const char *name,
172 LLVMTypeRef return_type, LLVMValueRef *params,
173 unsigned param_count, unsigned attrib_mask);
174
175 void ac_build_type_name_for_intr(LLVMTypeRef type, char *buf, unsigned bufsize);
176
177 LLVMValueRef
178 ac_build_phi(struct ac_llvm_context *ctx, LLVMTypeRef type,
179 unsigned count_incoming, LLVMValueRef *values,
180 LLVMBasicBlockRef *blocks);
181
182 void ac_build_s_barrier(struct ac_llvm_context *ctx);
183 void ac_build_optimization_barrier(struct ac_llvm_context *ctx,
184 LLVMValueRef *pvgpr);
185
186 LLVMValueRef ac_build_shader_clock(struct ac_llvm_context *ctx);
187
188 LLVMValueRef ac_build_ballot(struct ac_llvm_context *ctx, LLVMValueRef value);
189 LLVMValueRef ac_get_i1_sgpr_mask(struct ac_llvm_context *ctx,
190 LLVMValueRef value);
191
192 LLVMValueRef ac_build_vote_all(struct ac_llvm_context *ctx, LLVMValueRef value);
193
194 LLVMValueRef ac_build_vote_any(struct ac_llvm_context *ctx, LLVMValueRef value);
195
196 LLVMValueRef ac_build_vote_eq(struct ac_llvm_context *ctx, LLVMValueRef value);
197
198 LLVMValueRef
199 ac_build_varying_gather_values(struct ac_llvm_context *ctx, LLVMValueRef *values,
200 unsigned value_count, unsigned component);
201
202 LLVMValueRef
203 ac_build_gather_values_extended(struct ac_llvm_context *ctx,
204 LLVMValueRef *values,
205 unsigned value_count,
206 unsigned value_stride,
207 bool load,
208 bool always_vector);
209 LLVMValueRef
210 ac_build_gather_values(struct ac_llvm_context *ctx,
211 LLVMValueRef *values,
212 unsigned value_count);
213
214 LLVMValueRef
215 ac_extract_components(struct ac_llvm_context *ctx,
216 LLVMValueRef value,
217 unsigned start,
218 unsigned channels);
219
220 LLVMValueRef ac_build_expand_to_vec4(struct ac_llvm_context *ctx,
221 LLVMValueRef value,
222 unsigned num_channels);
223 LLVMValueRef ac_build_round(struct ac_llvm_context *ctx, LLVMValueRef value);
224
225 LLVMValueRef
226 ac_build_fdiv(struct ac_llvm_context *ctx,
227 LLVMValueRef num,
228 LLVMValueRef den);
229
230 LLVMValueRef ac_build_fast_udiv(struct ac_llvm_context *ctx,
231 LLVMValueRef num,
232 LLVMValueRef multiplier,
233 LLVMValueRef pre_shift,
234 LLVMValueRef post_shift,
235 LLVMValueRef increment);
236 LLVMValueRef ac_build_fast_udiv_nuw(struct ac_llvm_context *ctx,
237 LLVMValueRef num,
238 LLVMValueRef multiplier,
239 LLVMValueRef pre_shift,
240 LLVMValueRef post_shift,
241 LLVMValueRef increment);
242 LLVMValueRef ac_build_fast_udiv_u31_d_not_one(struct ac_llvm_context *ctx,
243 LLVMValueRef num,
244 LLVMValueRef multiplier,
245 LLVMValueRef post_shift);
246
247 void
248 ac_prepare_cube_coords(struct ac_llvm_context *ctx,
249 bool is_deriv, bool is_array, bool is_lod,
250 LLVMValueRef *coords_arg,
251 LLVMValueRef *derivs_arg);
252
253
254 LLVMValueRef
255 ac_build_fs_interp(struct ac_llvm_context *ctx,
256 LLVMValueRef llvm_chan,
257 LLVMValueRef attr_number,
258 LLVMValueRef params,
259 LLVMValueRef i,
260 LLVMValueRef j);
261
262 LLVMValueRef
263 ac_build_fs_interp_f16(struct ac_llvm_context *ctx,
264 LLVMValueRef llvm_chan,
265 LLVMValueRef attr_number,
266 LLVMValueRef params,
267 LLVMValueRef i,
268 LLVMValueRef j);
269
270 LLVMValueRef
271 ac_build_fs_interp_mov(struct ac_llvm_context *ctx,
272 LLVMValueRef parameter,
273 LLVMValueRef llvm_chan,
274 LLVMValueRef attr_number,
275 LLVMValueRef params);
276
277 LLVMValueRef
278 ac_build_gep_ptr(struct ac_llvm_context *ctx,
279 LLVMValueRef base_ptr,
280 LLVMValueRef index);
281
282 LLVMValueRef
283 ac_build_gep0(struct ac_llvm_context *ctx,
284 LLVMValueRef base_ptr,
285 LLVMValueRef index);
286 LLVMValueRef ac_build_pointer_add(struct ac_llvm_context *ctx, LLVMValueRef ptr,
287 LLVMValueRef index);
288
289 void
290 ac_build_indexed_store(struct ac_llvm_context *ctx,
291 LLVMValueRef base_ptr, LLVMValueRef index,
292 LLVMValueRef value);
293
294 LLVMValueRef ac_build_load(struct ac_llvm_context *ctx, LLVMValueRef base_ptr,
295 LLVMValueRef index);
296 LLVMValueRef ac_build_load_invariant(struct ac_llvm_context *ctx,
297 LLVMValueRef base_ptr, LLVMValueRef index);
298 LLVMValueRef ac_build_load_to_sgpr(struct ac_llvm_context *ctx,
299 LLVMValueRef base_ptr, LLVMValueRef index);
300 LLVMValueRef ac_build_load_to_sgpr_uint_wraparound(struct ac_llvm_context *ctx,
301 LLVMValueRef base_ptr, LLVMValueRef index);
302
303 void
304 ac_build_buffer_store_dword(struct ac_llvm_context *ctx,
305 LLVMValueRef rsrc,
306 LLVMValueRef vdata,
307 unsigned num_channels,
308 LLVMValueRef voffset,
309 LLVMValueRef soffset,
310 unsigned inst_offset,
311 unsigned cache_policy);
312
313 void
314 ac_build_buffer_store_format(struct ac_llvm_context *ctx,
315 LLVMValueRef rsrc,
316 LLVMValueRef data,
317 LLVMValueRef vindex,
318 LLVMValueRef voffset,
319 unsigned num_channels,
320 unsigned cache_policy);
321
322 LLVMValueRef
323 ac_build_buffer_load(struct ac_llvm_context *ctx,
324 LLVMValueRef rsrc,
325 int num_channels,
326 LLVMValueRef vindex,
327 LLVMValueRef voffset,
328 LLVMValueRef soffset,
329 unsigned inst_offset,
330 unsigned cache_policy,
331 bool can_speculate,
332 bool allow_smem);
333
334 LLVMValueRef ac_build_buffer_load_format(struct ac_llvm_context *ctx,
335 LLVMValueRef rsrc,
336 LLVMValueRef vindex,
337 LLVMValueRef voffset,
338 unsigned num_channels,
339 unsigned cache_policy,
340 bool can_speculate);
341
342 LLVMValueRef
343 ac_build_tbuffer_load_short(struct ac_llvm_context *ctx,
344 LLVMValueRef rsrc,
345 LLVMValueRef voffset,
346 LLVMValueRef soffset,
347 LLVMValueRef immoffset,
348 unsigned cache_policy);
349
350 LLVMValueRef
351 ac_build_tbuffer_load_byte(struct ac_llvm_context *ctx,
352 LLVMValueRef rsrc,
353 LLVMValueRef voffset,
354 LLVMValueRef soffset,
355 LLVMValueRef immoffset,
356 unsigned cache_policy);
357
358 LLVMValueRef
359 ac_build_struct_tbuffer_load(struct ac_llvm_context *ctx,
360 LLVMValueRef rsrc,
361 LLVMValueRef vindex,
362 LLVMValueRef voffset,
363 LLVMValueRef soffset,
364 LLVMValueRef immoffset,
365 unsigned num_channels,
366 unsigned dfmt,
367 unsigned nfmt,
368 unsigned cache_policy,
369 bool can_speculate);
370
371 LLVMValueRef
372 ac_build_raw_tbuffer_load(struct ac_llvm_context *ctx,
373 LLVMValueRef rsrc,
374 LLVMValueRef voffset,
375 LLVMValueRef soffset,
376 LLVMValueRef immoffset,
377 unsigned num_channels,
378 unsigned dfmt,
379 unsigned nfmt,
380 unsigned cache_policy,
381 bool can_speculate);
382
383 /* For ac_build_fetch_format.
384 *
385 * Note: FLOAT must be 0 (used for convenience of encoding in radeonsi).
386 */
387 enum {
388 AC_FETCH_FORMAT_FLOAT = 0,
389 AC_FETCH_FORMAT_FIXED,
390 AC_FETCH_FORMAT_UNORM,
391 AC_FETCH_FORMAT_SNORM,
392 AC_FETCH_FORMAT_USCALED,
393 AC_FETCH_FORMAT_SSCALED,
394 AC_FETCH_FORMAT_UINT,
395 AC_FETCH_FORMAT_SINT,
396 };
397
398 LLVMValueRef
399 ac_build_opencoded_load_format(struct ac_llvm_context *ctx,
400 unsigned log_size,
401 unsigned num_channels,
402 unsigned format,
403 bool reverse,
404 bool known_aligned,
405 LLVMValueRef rsrc,
406 LLVMValueRef vindex,
407 LLVMValueRef voffset,
408 LLVMValueRef soffset,
409 unsigned cache_policy,
410 bool can_speculate);
411
412 void
413 ac_build_tbuffer_store_short(struct ac_llvm_context *ctx,
414 LLVMValueRef rsrc,
415 LLVMValueRef vdata,
416 LLVMValueRef voffset,
417 LLVMValueRef soffset,
418 unsigned cache_policy);
419
420 void
421 ac_build_tbuffer_store_byte(struct ac_llvm_context *ctx,
422 LLVMValueRef rsrc,
423 LLVMValueRef vdata,
424 LLVMValueRef voffset,
425 LLVMValueRef soffset,
426 unsigned cache_policy);
427
428 void
429 ac_build_struct_tbuffer_store(struct ac_llvm_context *ctx,
430 LLVMValueRef rsrc,
431 LLVMValueRef vdata,
432 LLVMValueRef vindex,
433 LLVMValueRef voffset,
434 LLVMValueRef soffset,
435 LLVMValueRef immoffset,
436 unsigned num_channels,
437 unsigned dfmt,
438 unsigned nfmt,
439 unsigned cache_policy);
440
441 void
442 ac_build_raw_tbuffer_store(struct ac_llvm_context *ctx,
443 LLVMValueRef rsrc,
444 LLVMValueRef vdata,
445 LLVMValueRef voffset,
446 LLVMValueRef soffset,
447 LLVMValueRef immoffset,
448 unsigned num_channels,
449 unsigned dfmt,
450 unsigned nfmt,
451 unsigned cache_policy);
452
453 LLVMValueRef
454 ac_get_thread_id(struct ac_llvm_context *ctx);
455
456 #define AC_TID_MASK_TOP_LEFT 0xfffffffc
457 #define AC_TID_MASK_TOP 0xfffffffd
458 #define AC_TID_MASK_LEFT 0xfffffffe
459
460 LLVMValueRef
461 ac_build_ddxy(struct ac_llvm_context *ctx,
462 uint32_t mask,
463 int idx,
464 LLVMValueRef val);
465
466 #define AC_SENDMSG_GS 2
467 #define AC_SENDMSG_GS_DONE 3
468 #define AC_SENDMSG_GS_ALLOC_REQ 9
469
470 #define AC_SENDMSG_GS_OP_NOP (0 << 4)
471 #define AC_SENDMSG_GS_OP_CUT (1 << 4)
472 #define AC_SENDMSG_GS_OP_EMIT (2 << 4)
473 #define AC_SENDMSG_GS_OP_EMIT_CUT (3 << 4)
474
475 void ac_build_sendmsg(struct ac_llvm_context *ctx,
476 uint32_t msg,
477 LLVMValueRef wave_id);
478
479 LLVMValueRef ac_build_imsb(struct ac_llvm_context *ctx,
480 LLVMValueRef arg,
481 LLVMTypeRef dst_type);
482
483 LLVMValueRef ac_build_umsb(struct ac_llvm_context *ctx,
484 LLVMValueRef arg,
485 LLVMTypeRef dst_type);
486 LLVMValueRef ac_build_fmin(struct ac_llvm_context *ctx, LLVMValueRef a,
487 LLVMValueRef b);
488 LLVMValueRef ac_build_fmax(struct ac_llvm_context *ctx, LLVMValueRef a,
489 LLVMValueRef b);
490 LLVMValueRef ac_build_imin(struct ac_llvm_context *ctx, LLVMValueRef a,
491 LLVMValueRef b);
492 LLVMValueRef ac_build_imax(struct ac_llvm_context *ctx, LLVMValueRef a,
493 LLVMValueRef b);
494 LLVMValueRef ac_build_umin(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
495 LLVMValueRef ac_build_umax(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
496 LLVMValueRef ac_build_clamp(struct ac_llvm_context *ctx, LLVMValueRef value);
497
498 struct ac_export_args {
499 LLVMValueRef out[4];
500 unsigned target;
501 unsigned enabled_channels;
502 bool compr;
503 bool done;
504 bool valid_mask;
505 };
506
507 void ac_build_export(struct ac_llvm_context *ctx, struct ac_export_args *a);
508
509 void ac_build_export_null(struct ac_llvm_context *ctx);
510
511 enum ac_image_opcode {
512 ac_image_sample,
513 ac_image_gather4,
514 ac_image_load,
515 ac_image_load_mip,
516 ac_image_store,
517 ac_image_store_mip,
518 ac_image_get_lod,
519 ac_image_get_resinfo,
520 ac_image_atomic,
521 ac_image_atomic_cmpswap,
522 };
523
524 enum ac_atomic_op {
525 ac_atomic_swap,
526 ac_atomic_add,
527 ac_atomic_sub,
528 ac_atomic_smin,
529 ac_atomic_umin,
530 ac_atomic_smax,
531 ac_atomic_umax,
532 ac_atomic_and,
533 ac_atomic_or,
534 ac_atomic_xor,
535 ac_atomic_inc_wrap,
536 ac_atomic_dec_wrap,
537 };
538
539 /* These cache policy bits match the definitions used by the LLVM intrinsics. */
540 enum ac_image_cache_policy {
541 ac_glc = 1 << 0, /* per-CU cache control */
542 ac_slc = 1 << 1, /* global L2 cache control */
543 ac_dlc = 1 << 2, /* per-shader-array cache control */
544 ac_swizzled = 1 << 3, /* the access is swizzled, disabling load/store merging */
545 };
546
547 struct ac_image_args {
548 enum ac_image_opcode opcode : 4;
549 enum ac_atomic_op atomic : 4; /* for the ac_image_atomic opcode */
550 enum ac_image_dim dim : 3;
551 unsigned dmask : 4;
552 unsigned cache_policy : 3;
553 bool unorm : 1;
554 bool level_zero : 1;
555 unsigned attributes; /* additional call-site specific AC_FUNC_ATTRs */
556
557 LLVMValueRef resource;
558 LLVMValueRef sampler;
559 LLVMValueRef data[2]; /* data[0] is source data (vector); data[1] is cmp for cmpswap */
560 LLVMValueRef offset;
561 LLVMValueRef bias;
562 LLVMValueRef compare;
563 LLVMValueRef derivs[6];
564 LLVMValueRef coords[4];
565 LLVMValueRef lod; // also used by ac_image_get_resinfo
566 };
567
568 LLVMValueRef ac_build_image_opcode(struct ac_llvm_context *ctx,
569 struct ac_image_args *a);
570 LLVMValueRef ac_build_image_get_sample_count(struct ac_llvm_context *ctx,
571 LLVMValueRef rsrc);
572 LLVMValueRef ac_build_cvt_pkrtz_f16(struct ac_llvm_context *ctx,
573 LLVMValueRef args[2]);
574 LLVMValueRef ac_build_cvt_pknorm_i16(struct ac_llvm_context *ctx,
575 LLVMValueRef args[2]);
576 LLVMValueRef ac_build_cvt_pknorm_u16(struct ac_llvm_context *ctx,
577 LLVMValueRef args[2]);
578 LLVMValueRef ac_build_cvt_pk_i16(struct ac_llvm_context *ctx,
579 LLVMValueRef args[2], unsigned bits, bool hi);
580 LLVMValueRef ac_build_cvt_pk_u16(struct ac_llvm_context *ctx,
581 LLVMValueRef args[2], unsigned bits, bool hi);
582 LLVMValueRef ac_build_wqm_vote(struct ac_llvm_context *ctx, LLVMValueRef i1);
583 void ac_build_kill_if_false(struct ac_llvm_context *ctx, LLVMValueRef i1);
584 LLVMValueRef ac_build_bfe(struct ac_llvm_context *ctx, LLVMValueRef input,
585 LLVMValueRef offset, LLVMValueRef width,
586 bool is_signed);
587 LLVMValueRef ac_build_imad(struct ac_llvm_context *ctx, LLVMValueRef s0,
588 LLVMValueRef s1, LLVMValueRef s2);
589 LLVMValueRef ac_build_fmad(struct ac_llvm_context *ctx, LLVMValueRef s0,
590 LLVMValueRef s1, LLVMValueRef s2);
591
592 void ac_build_waitcnt(struct ac_llvm_context *ctx, unsigned wait_flags);
593
594 LLVMValueRef ac_build_fract(struct ac_llvm_context *ctx, LLVMValueRef src0,
595 unsigned bitsize);
596
597 LLVMValueRef ac_build_fmed3(struct ac_llvm_context *ctx, LLVMValueRef src0,
598 LLVMValueRef src1, LLVMValueRef src2,
599 unsigned bitsize);
600
601 LLVMValueRef ac_build_isign(struct ac_llvm_context *ctx, LLVMValueRef src0,
602 unsigned bitsize);
603
604 LLVMValueRef ac_build_fsign(struct ac_llvm_context *ctx, LLVMValueRef src0,
605 unsigned bitsize);
606
607 LLVMValueRef ac_build_bit_count(struct ac_llvm_context *ctx, LLVMValueRef src0);
608
609 LLVMValueRef ac_build_bitfield_reverse(struct ac_llvm_context *ctx,
610 LLVMValueRef src0);
611
612 void ac_optimize_vs_outputs(struct ac_llvm_context *ac,
613 LLVMValueRef main_fn,
614 uint8_t *vs_output_param_offset,
615 uint32_t num_outputs,
616 uint8_t *num_param_exports);
617 void ac_init_exec_full_mask(struct ac_llvm_context *ctx);
618
619 void ac_declare_lds_as_pointer(struct ac_llvm_context *ac);
620 LLVMValueRef ac_lds_load(struct ac_llvm_context *ctx,
621 LLVMValueRef dw_addr);
622 void ac_lds_store(struct ac_llvm_context *ctx,
623 LLVMValueRef dw_addr, LLVMValueRef value);
624
625 LLVMValueRef ac_find_lsb(struct ac_llvm_context *ctx,
626 LLVMTypeRef dst_type,
627 LLVMValueRef src0);
628
629 LLVMTypeRef ac_array_in_const_addr_space(LLVMTypeRef elem_type);
630 LLVMTypeRef ac_array_in_const32_addr_space(LLVMTypeRef elem_type);
631
632 void ac_build_bgnloop(struct ac_llvm_context *ctx, int lable_id);
633 void ac_build_break(struct ac_llvm_context *ctx);
634 void ac_build_continue(struct ac_llvm_context *ctx);
635 void ac_build_else(struct ac_llvm_context *ctx, int lable_id);
636 void ac_build_endif(struct ac_llvm_context *ctx, int lable_id);
637 void ac_build_endloop(struct ac_llvm_context *ctx, int lable_id);
638 void ac_build_ifcc(struct ac_llvm_context *ctx, LLVMValueRef cond, int label_id);
639 void ac_build_if(struct ac_llvm_context *ctx, LLVMValueRef value,
640 int lable_id);
641 void ac_build_uif(struct ac_llvm_context *ctx, LLVMValueRef value,
642 int lable_id);
643
644 LLVMValueRef ac_build_alloca(struct ac_llvm_context *ac, LLVMTypeRef type,
645 const char *name);
646 LLVMValueRef ac_build_alloca_undef(struct ac_llvm_context *ac, LLVMTypeRef type,
647 const char *name);
648
649 LLVMValueRef ac_cast_ptr(struct ac_llvm_context *ctx, LLVMValueRef ptr,
650 LLVMTypeRef type);
651
652 LLVMValueRef ac_trim_vector(struct ac_llvm_context *ctx, LLVMValueRef value,
653 unsigned count);
654
655 LLVMValueRef ac_unpack_param(struct ac_llvm_context *ctx, LLVMValueRef param,
656 unsigned rshift, unsigned bitwidth);
657
658 void ac_apply_fmask_to_sample(struct ac_llvm_context *ac, LLVMValueRef fmask,
659 LLVMValueRef *addr, bool is_array_tex);
660
661 LLVMValueRef
662 ac_build_ds_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src, unsigned mask);
663
664 LLVMValueRef ac_build_readlane_no_opt_barrier(struct ac_llvm_context *ctx,
665 LLVMValueRef src, LLVMValueRef lane);
666
667 LLVMValueRef
668 ac_build_readlane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef lane);
669
670 LLVMValueRef
671 ac_build_writelane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef value, LLVMValueRef lane);
672
673 LLVMValueRef
674 ac_build_mbcnt(struct ac_llvm_context *ctx, LLVMValueRef mask);
675
676 LLVMValueRef
677 ac_build_inclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
678
679 LLVMValueRef
680 ac_build_exclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
681
682 LLVMValueRef
683 ac_build_reduce(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op, unsigned cluster_size);
684
685 /**
686 * Common arguments for a scan/reduce operation that accumulates per-wave
687 * values across an entire workgroup, while respecting the order of waves.
688 */
689 struct ac_wg_scan {
690 bool enable_reduce;
691 bool enable_exclusive;
692 bool enable_inclusive;
693 nir_op op;
694 LLVMValueRef src; /* clobbered! */
695 LLVMValueRef result_reduce;
696 LLVMValueRef result_exclusive;
697 LLVMValueRef result_inclusive;
698 LLVMValueRef extra;
699 LLVMValueRef waveidx;
700 LLVMValueRef numwaves; /* only needed for "reduce" operations */
701
702 /* T addrspace(LDS) pointer to the same type as value, at least maxwaves entries */
703 LLVMValueRef scratch;
704 unsigned maxwaves;
705 };
706
707 void
708 ac_build_wg_wavescan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
709 void
710 ac_build_wg_wavescan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
711 void
712 ac_build_wg_wavescan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
713
714 void
715 ac_build_wg_scan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
716 void
717 ac_build_wg_scan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
718 void
719 ac_build_wg_scan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
720
721 LLVMValueRef
722 ac_build_quad_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src,
723 unsigned lane0, unsigned lane1, unsigned lane2, unsigned lane3);
724
725 LLVMValueRef
726 ac_build_shuffle(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef index);
727
728 LLVMValueRef
729 ac_build_frexp_exp(struct ac_llvm_context *ctx, LLVMValueRef src0,
730 unsigned bitsize);
731
732 LLVMValueRef
733 ac_build_frexp_mant(struct ac_llvm_context *ctx, LLVMValueRef src0,
734 unsigned bitsize);
735
736 LLVMValueRef
737 ac_build_canonicalize(struct ac_llvm_context *ctx, LLVMValueRef src0,
738 unsigned bitsize);
739
740 LLVMValueRef
741 ac_build_ddxy_interp(struct ac_llvm_context *ctx, LLVMValueRef interp_ij);
742
743 LLVMValueRef
744 ac_build_load_helper_invocation(struct ac_llvm_context *ctx);
745
746 LLVMValueRef
747 ac_build_is_helper_invocation(struct ac_llvm_context *ctx);
748
749 LLVMValueRef ac_build_call(struct ac_llvm_context *ctx, LLVMValueRef func,
750 LLVMValueRef *args, unsigned num_args);
751
752 LLVMValueRef ac_build_atomic_rmw(struct ac_llvm_context *ctx, LLVMAtomicRMWBinOp op,
753 LLVMValueRef ptr, LLVMValueRef val,
754 const char *sync_scope);
755
756 LLVMValueRef ac_build_atomic_cmp_xchg(struct ac_llvm_context *ctx, LLVMValueRef ptr,
757 LLVMValueRef cmp, LLVMValueRef val,
758 const char *sync_scope);
759
760 void
761 ac_export_mrt_z(struct ac_llvm_context *ctx, LLVMValueRef depth,
762 LLVMValueRef stencil, LLVMValueRef samplemask,
763 struct ac_export_args *args);
764
765 void ac_build_sendmsg_gs_alloc_req(struct ac_llvm_context *ctx, LLVMValueRef wave_id,
766 LLVMValueRef vtx_cnt, LLVMValueRef prim_cnt);
767
768 struct ac_ngg_prim {
769 unsigned num_vertices;
770 LLVMValueRef isnull;
771 LLVMValueRef index[3];
772 LLVMValueRef edgeflag[3];
773 LLVMValueRef passthrough;
774 };
775
776 LLVMValueRef ac_pack_prim_export(struct ac_llvm_context *ctx,
777 const struct ac_ngg_prim *prim);
778 void ac_build_export_prim(struct ac_llvm_context *ctx,
779 const struct ac_ngg_prim *prim);
780
781 static inline LLVMValueRef
782 ac_get_arg(struct ac_llvm_context *ctx, struct ac_arg arg)
783 {
784 assert(arg.used);
785 return LLVMGetParam(ctx->main_function, arg.arg_index);
786 }
787
788 enum ac_llvm_calling_convention {
789 AC_LLVM_AMDGPU_VS = 87,
790 AC_LLVM_AMDGPU_GS = 88,
791 AC_LLVM_AMDGPU_PS = 89,
792 AC_LLVM_AMDGPU_CS = 90,
793 AC_LLVM_AMDGPU_HS = 93,
794 };
795
796 LLVMValueRef ac_build_main(const struct ac_shader_args *args,
797 struct ac_llvm_context *ctx,
798 enum ac_llvm_calling_convention convention,
799 const char *name, LLVMTypeRef ret_type,
800 LLVMModuleRef module);
801 void ac_build_s_endpgm(struct ac_llvm_context *ctx);
802
803 LLVMValueRef ac_prefix_bitcount(struct ac_llvm_context *ctx,
804 LLVMValueRef mask, LLVMValueRef index);
805 LLVMValueRef ac_prefix_bitcount_2x64(struct ac_llvm_context *ctx,
806 LLVMValueRef mask[2], LLVMValueRef index);
807 void ac_build_triangle_strip_indices_to_triangle(struct ac_llvm_context *ctx,
808 LLVMValueRef is_odd,
809 LLVMValueRef flatshade_first,
810 LLVMValueRef index[3]);
811
812 #ifdef __cplusplus
813 }
814 #endif
815
816 #endif