radeonsi/nir: always lower ballot masks as 64-bit, codegen handles it
[mesa.git] / src / amd / common / ac_llvm_build.h
1 /*
2 * Copyright 2016 Bas Nieuwenhuizen
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
11 *
12 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
15 * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
16 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
17 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
18 * USE OR OTHER DEALINGS IN THE SOFTWARE.
19 *
20 * The above copyright notice and this permission notice (including the
21 * next paragraph) shall be included in all copies or substantial portions
22 * of the Software.
23 *
24 */
25 #ifndef AC_LLVM_BUILD_H
26 #define AC_LLVM_BUILD_H
27
28 #include <stdbool.h>
29 #include <llvm-c/Core.h>
30 #include "compiler/nir/nir.h"
31 #include "amd_family.h"
32
33 #ifdef __cplusplus
34 extern "C" {
35 #endif
36
37 enum {
38 AC_ADDR_SPACE_FLAT = 0, /* Slower than global. */
39 AC_ADDR_SPACE_GLOBAL = 1,
40 AC_ADDR_SPACE_GDS = 2,
41 AC_ADDR_SPACE_LDS = 3,
42 AC_ADDR_SPACE_CONST = 4, /* Global allowing SMEM. */
43 AC_ADDR_SPACE_CONST_32BIT = 6, /* same as CONST, but the pointer type has 32 bits */
44 };
45
46 #define AC_WAIT_LGKM (1 << 0) /* LDS, GDS, constant, message */
47 #define AC_WAIT_VLOAD (1 << 1) /* VMEM load/sample instructions */
48 #define AC_WAIT_VSTORE (1 << 2) /* VMEM store instructions */
49
50 struct ac_llvm_flow;
51 struct ac_llvm_compiler;
52 enum ac_float_mode;
53
54 struct ac_llvm_flow_state {
55 struct ac_llvm_flow *stack;
56 unsigned depth_max;
57 unsigned depth;
58 };
59
60 struct ac_llvm_context {
61 LLVMContextRef context;
62 LLVMModuleRef module;
63 LLVMBuilderRef builder;
64
65 LLVMTypeRef voidt;
66 LLVMTypeRef i1;
67 LLVMTypeRef i8;
68 LLVMTypeRef i16;
69 LLVMTypeRef i32;
70 LLVMTypeRef i64;
71 LLVMTypeRef intptr;
72 LLVMTypeRef f16;
73 LLVMTypeRef f32;
74 LLVMTypeRef f64;
75 LLVMTypeRef v2i16;
76 LLVMTypeRef v2i32;
77 LLVMTypeRef v3i32;
78 LLVMTypeRef v4i32;
79 LLVMTypeRef v2f32;
80 LLVMTypeRef v3f32;
81 LLVMTypeRef v4f32;
82 LLVMTypeRef v8i32;
83 LLVMTypeRef iN_wavemask;
84 LLVMTypeRef iN_ballotmask;
85
86 LLVMValueRef i8_0;
87 LLVMValueRef i8_1;
88 LLVMValueRef i16_0;
89 LLVMValueRef i16_1;
90 LLVMValueRef i32_0;
91 LLVMValueRef i32_1;
92 LLVMValueRef i64_0;
93 LLVMValueRef i64_1;
94 LLVMValueRef f16_0;
95 LLVMValueRef f16_1;
96 LLVMValueRef f32_0;
97 LLVMValueRef f32_1;
98 LLVMValueRef f64_0;
99 LLVMValueRef f64_1;
100 LLVMValueRef i1true;
101 LLVMValueRef i1false;
102
103 /* Since ac_nir_translate makes a local copy of ac_llvm_context, there
104 * are two ac_llvm_contexts. Declare a pointer here, so that the control
105 * flow stack is shared by both ac_llvm_contexts.
106 */
107 struct ac_llvm_flow_state *flow;
108
109 unsigned range_md_kind;
110 unsigned invariant_load_md_kind;
111 unsigned uniform_md_kind;
112 unsigned fpmath_md_kind;
113 LLVMValueRef fpmath_md_2p5_ulp;
114 LLVMValueRef empty_md;
115
116 enum chip_class chip_class;
117 enum radeon_family family;
118
119 unsigned wave_size;
120 unsigned ballot_mask_bits;
121
122 LLVMValueRef lds;
123 };
124
125 void
126 ac_llvm_context_init(struct ac_llvm_context *ctx,
127 struct ac_llvm_compiler *compiler,
128 enum chip_class chip_class, enum radeon_family family,
129 enum ac_float_mode float_mode, unsigned wave_size,
130 unsigned ballot_mask_bits);
131
132 void
133 ac_llvm_context_dispose(struct ac_llvm_context *ctx);
134
135 int
136 ac_get_llvm_num_components(LLVMValueRef value);
137
138 int
139 ac_get_elem_bits(struct ac_llvm_context *ctx, LLVMTypeRef type);
140
141 LLVMValueRef
142 ac_llvm_extract_elem(struct ac_llvm_context *ac,
143 LLVMValueRef value,
144 int index);
145
146 unsigned ac_get_type_size(LLVMTypeRef type);
147
148 LLVMTypeRef ac_to_integer_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
149 LLVMValueRef ac_to_integer(struct ac_llvm_context *ctx, LLVMValueRef v);
150 LLVMValueRef ac_to_integer_or_pointer(struct ac_llvm_context *ctx, LLVMValueRef v);
151 LLVMTypeRef ac_to_float_type(struct ac_llvm_context *ctx, LLVMTypeRef t);
152 LLVMValueRef ac_to_float(struct ac_llvm_context *ctx, LLVMValueRef v);
153
154 LLVMValueRef
155 ac_build_intrinsic(struct ac_llvm_context *ctx, const char *name,
156 LLVMTypeRef return_type, LLVMValueRef *params,
157 unsigned param_count, unsigned attrib_mask);
158
159 void ac_build_type_name_for_intr(LLVMTypeRef type, char *buf, unsigned bufsize);
160
161 LLVMValueRef
162 ac_build_phi(struct ac_llvm_context *ctx, LLVMTypeRef type,
163 unsigned count_incoming, LLVMValueRef *values,
164 LLVMBasicBlockRef *blocks);
165
166 void ac_build_s_barrier(struct ac_llvm_context *ctx);
167 void ac_build_optimization_barrier(struct ac_llvm_context *ctx,
168 LLVMValueRef *pvgpr);
169
170 LLVMValueRef ac_build_shader_clock(struct ac_llvm_context *ctx);
171
172 LLVMValueRef ac_build_ballot(struct ac_llvm_context *ctx, LLVMValueRef value);
173 LLVMValueRef ac_get_i1_sgpr_mask(struct ac_llvm_context *ctx,
174 LLVMValueRef value);
175
176 LLVMValueRef ac_build_vote_all(struct ac_llvm_context *ctx, LLVMValueRef value);
177
178 LLVMValueRef ac_build_vote_any(struct ac_llvm_context *ctx, LLVMValueRef value);
179
180 LLVMValueRef ac_build_vote_eq(struct ac_llvm_context *ctx, LLVMValueRef value);
181
182 LLVMValueRef
183 ac_build_varying_gather_values(struct ac_llvm_context *ctx, LLVMValueRef *values,
184 unsigned value_count, unsigned component);
185
186 LLVMValueRef
187 ac_build_gather_values_extended(struct ac_llvm_context *ctx,
188 LLVMValueRef *values,
189 unsigned value_count,
190 unsigned value_stride,
191 bool load,
192 bool always_vector);
193 LLVMValueRef
194 ac_build_gather_values(struct ac_llvm_context *ctx,
195 LLVMValueRef *values,
196 unsigned value_count);
197
198 LLVMValueRef
199 ac_extract_components(struct ac_llvm_context *ctx,
200 LLVMValueRef value,
201 unsigned start,
202 unsigned channels);
203
204 LLVMValueRef ac_build_expand_to_vec4(struct ac_llvm_context *ctx,
205 LLVMValueRef value,
206 unsigned num_channels);
207 LLVMValueRef ac_build_round(struct ac_llvm_context *ctx, LLVMValueRef value);
208
209 LLVMValueRef
210 ac_build_fdiv(struct ac_llvm_context *ctx,
211 LLVMValueRef num,
212 LLVMValueRef den);
213
214 LLVMValueRef ac_build_fast_udiv(struct ac_llvm_context *ctx,
215 LLVMValueRef num,
216 LLVMValueRef multiplier,
217 LLVMValueRef pre_shift,
218 LLVMValueRef post_shift,
219 LLVMValueRef increment);
220 LLVMValueRef ac_build_fast_udiv_nuw(struct ac_llvm_context *ctx,
221 LLVMValueRef num,
222 LLVMValueRef multiplier,
223 LLVMValueRef pre_shift,
224 LLVMValueRef post_shift,
225 LLVMValueRef increment);
226 LLVMValueRef ac_build_fast_udiv_u31_d_not_one(struct ac_llvm_context *ctx,
227 LLVMValueRef num,
228 LLVMValueRef multiplier,
229 LLVMValueRef post_shift);
230
231 void
232 ac_prepare_cube_coords(struct ac_llvm_context *ctx,
233 bool is_deriv, bool is_array, bool is_lod,
234 LLVMValueRef *coords_arg,
235 LLVMValueRef *derivs_arg);
236
237
238 LLVMValueRef
239 ac_build_fs_interp(struct ac_llvm_context *ctx,
240 LLVMValueRef llvm_chan,
241 LLVMValueRef attr_number,
242 LLVMValueRef params,
243 LLVMValueRef i,
244 LLVMValueRef j);
245
246 LLVMValueRef
247 ac_build_fs_interp_f16(struct ac_llvm_context *ctx,
248 LLVMValueRef llvm_chan,
249 LLVMValueRef attr_number,
250 LLVMValueRef params,
251 LLVMValueRef i,
252 LLVMValueRef j);
253
254 LLVMValueRef
255 ac_build_fs_interp_mov(struct ac_llvm_context *ctx,
256 LLVMValueRef parameter,
257 LLVMValueRef llvm_chan,
258 LLVMValueRef attr_number,
259 LLVMValueRef params);
260
261 LLVMValueRef
262 ac_build_gep_ptr(struct ac_llvm_context *ctx,
263 LLVMValueRef base_ptr,
264 LLVMValueRef index);
265
266 LLVMValueRef
267 ac_build_gep0(struct ac_llvm_context *ctx,
268 LLVMValueRef base_ptr,
269 LLVMValueRef index);
270 LLVMValueRef ac_build_pointer_add(struct ac_llvm_context *ctx, LLVMValueRef ptr,
271 LLVMValueRef index);
272
273 void
274 ac_build_indexed_store(struct ac_llvm_context *ctx,
275 LLVMValueRef base_ptr, LLVMValueRef index,
276 LLVMValueRef value);
277
278 LLVMValueRef ac_build_load(struct ac_llvm_context *ctx, LLVMValueRef base_ptr,
279 LLVMValueRef index);
280 LLVMValueRef ac_build_load_invariant(struct ac_llvm_context *ctx,
281 LLVMValueRef base_ptr, LLVMValueRef index);
282 LLVMValueRef ac_build_load_to_sgpr(struct ac_llvm_context *ctx,
283 LLVMValueRef base_ptr, LLVMValueRef index);
284 LLVMValueRef ac_build_load_to_sgpr_uint_wraparound(struct ac_llvm_context *ctx,
285 LLVMValueRef base_ptr, LLVMValueRef index);
286
287 void
288 ac_build_buffer_store_dword(struct ac_llvm_context *ctx,
289 LLVMValueRef rsrc,
290 LLVMValueRef vdata,
291 unsigned num_channels,
292 LLVMValueRef voffset,
293 LLVMValueRef soffset,
294 unsigned inst_offset,
295 unsigned cache_policy,
296 bool swizzle_enable_hint);
297
298 void
299 ac_build_buffer_store_format(struct ac_llvm_context *ctx,
300 LLVMValueRef rsrc,
301 LLVMValueRef data,
302 LLVMValueRef vindex,
303 LLVMValueRef voffset,
304 unsigned num_channels,
305 unsigned cache_policy);
306
307 LLVMValueRef
308 ac_build_buffer_load(struct ac_llvm_context *ctx,
309 LLVMValueRef rsrc,
310 int num_channels,
311 LLVMValueRef vindex,
312 LLVMValueRef voffset,
313 LLVMValueRef soffset,
314 unsigned inst_offset,
315 unsigned cache_policy,
316 bool can_speculate,
317 bool allow_smem);
318
319 LLVMValueRef ac_build_buffer_load_format(struct ac_llvm_context *ctx,
320 LLVMValueRef rsrc,
321 LLVMValueRef vindex,
322 LLVMValueRef voffset,
323 unsigned num_channels,
324 unsigned cache_policy,
325 bool can_speculate);
326
327 /* load_format that handles the stride & element count better if idxen is
328 * disabled by LLVM. */
329 LLVMValueRef ac_build_buffer_load_format_gfx9_safe(struct ac_llvm_context *ctx,
330 LLVMValueRef rsrc,
331 LLVMValueRef vindex,
332 LLVMValueRef voffset,
333 unsigned num_channels,
334 unsigned cache_policy,
335 bool can_speculate);
336
337 LLVMValueRef
338 ac_build_tbuffer_load_short(struct ac_llvm_context *ctx,
339 LLVMValueRef rsrc,
340 LLVMValueRef voffset,
341 LLVMValueRef soffset,
342 LLVMValueRef immoffset,
343 unsigned cache_policy);
344
345 LLVMValueRef
346 ac_build_tbuffer_load_byte(struct ac_llvm_context *ctx,
347 LLVMValueRef rsrc,
348 LLVMValueRef voffset,
349 LLVMValueRef soffset,
350 LLVMValueRef immoffset,
351 unsigned cache_policy);
352
353 LLVMValueRef
354 ac_build_struct_tbuffer_load(struct ac_llvm_context *ctx,
355 LLVMValueRef rsrc,
356 LLVMValueRef vindex,
357 LLVMValueRef voffset,
358 LLVMValueRef soffset,
359 LLVMValueRef immoffset,
360 unsigned num_channels,
361 unsigned dfmt,
362 unsigned nfmt,
363 unsigned cache_policy,
364 bool can_speculate);
365
366 LLVMValueRef
367 ac_build_raw_tbuffer_load(struct ac_llvm_context *ctx,
368 LLVMValueRef rsrc,
369 LLVMValueRef voffset,
370 LLVMValueRef soffset,
371 LLVMValueRef immoffset,
372 unsigned num_channels,
373 unsigned dfmt,
374 unsigned nfmt,
375 unsigned cache_policy,
376 bool can_speculate);
377
378 /* For ac_build_fetch_format.
379 *
380 * Note: FLOAT must be 0 (used for convenience of encoding in radeonsi).
381 */
382 enum {
383 AC_FETCH_FORMAT_FLOAT = 0,
384 AC_FETCH_FORMAT_FIXED,
385 AC_FETCH_FORMAT_UNORM,
386 AC_FETCH_FORMAT_SNORM,
387 AC_FETCH_FORMAT_USCALED,
388 AC_FETCH_FORMAT_SSCALED,
389 AC_FETCH_FORMAT_UINT,
390 AC_FETCH_FORMAT_SINT,
391 };
392
393 LLVMValueRef
394 ac_build_opencoded_load_format(struct ac_llvm_context *ctx,
395 unsigned log_size,
396 unsigned num_channels,
397 unsigned format,
398 bool reverse,
399 bool known_aligned,
400 LLVMValueRef rsrc,
401 LLVMValueRef vindex,
402 LLVMValueRef voffset,
403 LLVMValueRef soffset,
404 unsigned cache_policy,
405 bool can_speculate);
406
407 void
408 ac_build_tbuffer_store_short(struct ac_llvm_context *ctx,
409 LLVMValueRef rsrc,
410 LLVMValueRef vdata,
411 LLVMValueRef voffset,
412 LLVMValueRef soffset,
413 unsigned cache_policy);
414
415 void
416 ac_build_tbuffer_store_byte(struct ac_llvm_context *ctx,
417 LLVMValueRef rsrc,
418 LLVMValueRef vdata,
419 LLVMValueRef voffset,
420 LLVMValueRef soffset,
421 unsigned cache_policy);
422
423 void
424 ac_build_struct_tbuffer_store(struct ac_llvm_context *ctx,
425 LLVMValueRef rsrc,
426 LLVMValueRef vdata,
427 LLVMValueRef vindex,
428 LLVMValueRef voffset,
429 LLVMValueRef soffset,
430 LLVMValueRef immoffset,
431 unsigned num_channels,
432 unsigned dfmt,
433 unsigned nfmt,
434 unsigned cache_policy);
435
436 void
437 ac_build_raw_tbuffer_store(struct ac_llvm_context *ctx,
438 LLVMValueRef rsrc,
439 LLVMValueRef vdata,
440 LLVMValueRef voffset,
441 LLVMValueRef soffset,
442 LLVMValueRef immoffset,
443 unsigned num_channels,
444 unsigned dfmt,
445 unsigned nfmt,
446 unsigned cache_policy);
447
448 LLVMValueRef
449 ac_get_thread_id(struct ac_llvm_context *ctx);
450
451 #define AC_TID_MASK_TOP_LEFT 0xfffffffc
452 #define AC_TID_MASK_TOP 0xfffffffd
453 #define AC_TID_MASK_LEFT 0xfffffffe
454
455 LLVMValueRef
456 ac_build_ddxy(struct ac_llvm_context *ctx,
457 uint32_t mask,
458 int idx,
459 LLVMValueRef val);
460
461 #define AC_SENDMSG_GS 2
462 #define AC_SENDMSG_GS_DONE 3
463 #define AC_SENDMSG_GS_ALLOC_REQ 9
464
465 #define AC_SENDMSG_GS_OP_NOP (0 << 4)
466 #define AC_SENDMSG_GS_OP_CUT (1 << 4)
467 #define AC_SENDMSG_GS_OP_EMIT (2 << 4)
468 #define AC_SENDMSG_GS_OP_EMIT_CUT (3 << 4)
469
470 void ac_build_sendmsg(struct ac_llvm_context *ctx,
471 uint32_t msg,
472 LLVMValueRef wave_id);
473
474 LLVMValueRef ac_build_imsb(struct ac_llvm_context *ctx,
475 LLVMValueRef arg,
476 LLVMTypeRef dst_type);
477
478 LLVMValueRef ac_build_umsb(struct ac_llvm_context *ctx,
479 LLVMValueRef arg,
480 LLVMTypeRef dst_type);
481 LLVMValueRef ac_build_fmin(struct ac_llvm_context *ctx, LLVMValueRef a,
482 LLVMValueRef b);
483 LLVMValueRef ac_build_fmax(struct ac_llvm_context *ctx, LLVMValueRef a,
484 LLVMValueRef b);
485 LLVMValueRef ac_build_imin(struct ac_llvm_context *ctx, LLVMValueRef a,
486 LLVMValueRef b);
487 LLVMValueRef ac_build_imax(struct ac_llvm_context *ctx, LLVMValueRef a,
488 LLVMValueRef b);
489 LLVMValueRef ac_build_umin(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
490 LLVMValueRef ac_build_umax(struct ac_llvm_context *ctx, LLVMValueRef a, LLVMValueRef b);
491 LLVMValueRef ac_build_clamp(struct ac_llvm_context *ctx, LLVMValueRef value);
492
493 struct ac_export_args {
494 LLVMValueRef out[4];
495 unsigned target;
496 unsigned enabled_channels;
497 bool compr;
498 bool done;
499 bool valid_mask;
500 };
501
502 void ac_build_export(struct ac_llvm_context *ctx, struct ac_export_args *a);
503
504 void ac_build_export_null(struct ac_llvm_context *ctx);
505
506 enum ac_image_opcode {
507 ac_image_sample,
508 ac_image_gather4,
509 ac_image_load,
510 ac_image_load_mip,
511 ac_image_store,
512 ac_image_store_mip,
513 ac_image_get_lod,
514 ac_image_get_resinfo,
515 ac_image_atomic,
516 ac_image_atomic_cmpswap,
517 };
518
519 enum ac_atomic_op {
520 ac_atomic_swap,
521 ac_atomic_add,
522 ac_atomic_sub,
523 ac_atomic_smin,
524 ac_atomic_umin,
525 ac_atomic_smax,
526 ac_atomic_umax,
527 ac_atomic_and,
528 ac_atomic_or,
529 ac_atomic_xor,
530 ac_atomic_inc_wrap,
531 ac_atomic_dec_wrap,
532 };
533
534 enum ac_image_dim {
535 ac_image_1d,
536 ac_image_2d,
537 ac_image_3d,
538 ac_image_cube, // includes cube arrays
539 ac_image_1darray,
540 ac_image_2darray,
541 ac_image_2dmsaa,
542 ac_image_2darraymsaa,
543 };
544
545 /* These cache policy bits match the definitions used by the LLVM intrinsics. */
546 enum ac_image_cache_policy {
547 ac_glc = 1 << 0, /* per-CU cache control */
548 ac_slc = 1 << 1, /* global L2 cache control */
549 ac_dlc = 1 << 2, /* per-shader-array cache control */
550 };
551
552 struct ac_image_args {
553 enum ac_image_opcode opcode : 4;
554 enum ac_atomic_op atomic : 4; /* for the ac_image_atomic opcode */
555 enum ac_image_dim dim : 3;
556 unsigned dmask : 4;
557 unsigned cache_policy : 3;
558 bool unorm : 1;
559 bool level_zero : 1;
560 unsigned attributes; /* additional call-site specific AC_FUNC_ATTRs */
561
562 LLVMValueRef resource;
563 LLVMValueRef sampler;
564 LLVMValueRef data[2]; /* data[0] is source data (vector); data[1] is cmp for cmpswap */
565 LLVMValueRef offset;
566 LLVMValueRef bias;
567 LLVMValueRef compare;
568 LLVMValueRef derivs[6];
569 LLVMValueRef coords[4];
570 LLVMValueRef lod; // also used by ac_image_get_resinfo
571 };
572
573 LLVMValueRef ac_build_image_opcode(struct ac_llvm_context *ctx,
574 struct ac_image_args *a);
575 LLVMValueRef ac_build_cvt_pkrtz_f16(struct ac_llvm_context *ctx,
576 LLVMValueRef args[2]);
577 LLVMValueRef ac_build_cvt_pknorm_i16(struct ac_llvm_context *ctx,
578 LLVMValueRef args[2]);
579 LLVMValueRef ac_build_cvt_pknorm_u16(struct ac_llvm_context *ctx,
580 LLVMValueRef args[2]);
581 LLVMValueRef ac_build_cvt_pk_i16(struct ac_llvm_context *ctx,
582 LLVMValueRef args[2], unsigned bits, bool hi);
583 LLVMValueRef ac_build_cvt_pk_u16(struct ac_llvm_context *ctx,
584 LLVMValueRef args[2], unsigned bits, bool hi);
585 LLVMValueRef ac_build_wqm_vote(struct ac_llvm_context *ctx, LLVMValueRef i1);
586 void ac_build_kill_if_false(struct ac_llvm_context *ctx, LLVMValueRef i1);
587 LLVMValueRef ac_build_bfe(struct ac_llvm_context *ctx, LLVMValueRef input,
588 LLVMValueRef offset, LLVMValueRef width,
589 bool is_signed);
590 LLVMValueRef ac_build_imad(struct ac_llvm_context *ctx, LLVMValueRef s0,
591 LLVMValueRef s1, LLVMValueRef s2);
592 LLVMValueRef ac_build_fmad(struct ac_llvm_context *ctx, LLVMValueRef s0,
593 LLVMValueRef s1, LLVMValueRef s2);
594
595 void ac_build_waitcnt(struct ac_llvm_context *ctx, unsigned wait_flags);
596
597 LLVMValueRef ac_build_fract(struct ac_llvm_context *ctx, LLVMValueRef src0,
598 unsigned bitsize);
599
600 LLVMValueRef ac_build_fmed3(struct ac_llvm_context *ctx, LLVMValueRef src0,
601 LLVMValueRef src1, LLVMValueRef src2,
602 unsigned bitsize);
603
604 LLVMValueRef ac_build_isign(struct ac_llvm_context *ctx, LLVMValueRef src0,
605 unsigned bitsize);
606
607 LLVMValueRef ac_build_fsign(struct ac_llvm_context *ctx, LLVMValueRef src0,
608 unsigned bitsize);
609
610 LLVMValueRef ac_build_bit_count(struct ac_llvm_context *ctx, LLVMValueRef src0);
611
612 LLVMValueRef ac_build_bitfield_reverse(struct ac_llvm_context *ctx,
613 LLVMValueRef src0);
614
615 void ac_optimize_vs_outputs(struct ac_llvm_context *ac,
616 LLVMValueRef main_fn,
617 uint8_t *vs_output_param_offset,
618 uint32_t num_outputs,
619 uint8_t *num_param_exports);
620 void ac_init_exec_full_mask(struct ac_llvm_context *ctx);
621
622 void ac_declare_lds_as_pointer(struct ac_llvm_context *ac);
623 LLVMValueRef ac_lds_load(struct ac_llvm_context *ctx,
624 LLVMValueRef dw_addr);
625 void ac_lds_store(struct ac_llvm_context *ctx,
626 LLVMValueRef dw_addr, LLVMValueRef value);
627
628 LLVMValueRef ac_find_lsb(struct ac_llvm_context *ctx,
629 LLVMTypeRef dst_type,
630 LLVMValueRef src0);
631
632 LLVMTypeRef ac_array_in_const_addr_space(LLVMTypeRef elem_type);
633 LLVMTypeRef ac_array_in_const32_addr_space(LLVMTypeRef elem_type);
634
635 void ac_build_bgnloop(struct ac_llvm_context *ctx, int lable_id);
636 void ac_build_break(struct ac_llvm_context *ctx);
637 void ac_build_continue(struct ac_llvm_context *ctx);
638 void ac_build_else(struct ac_llvm_context *ctx, int lable_id);
639 void ac_build_endif(struct ac_llvm_context *ctx, int lable_id);
640 void ac_build_endloop(struct ac_llvm_context *ctx, int lable_id);
641 void ac_build_ifcc(struct ac_llvm_context *ctx, LLVMValueRef cond, int label_id);
642 void ac_build_if(struct ac_llvm_context *ctx, LLVMValueRef value,
643 int lable_id);
644 void ac_build_uif(struct ac_llvm_context *ctx, LLVMValueRef value,
645 int lable_id);
646
647 LLVMValueRef ac_build_alloca(struct ac_llvm_context *ac, LLVMTypeRef type,
648 const char *name);
649 LLVMValueRef ac_build_alloca_undef(struct ac_llvm_context *ac, LLVMTypeRef type,
650 const char *name);
651
652 LLVMValueRef ac_cast_ptr(struct ac_llvm_context *ctx, LLVMValueRef ptr,
653 LLVMTypeRef type);
654
655 LLVMValueRef ac_trim_vector(struct ac_llvm_context *ctx, LLVMValueRef value,
656 unsigned count);
657
658 LLVMValueRef ac_unpack_param(struct ac_llvm_context *ctx, LLVMValueRef param,
659 unsigned rshift, unsigned bitwidth);
660
661 void ac_apply_fmask_to_sample(struct ac_llvm_context *ac, LLVMValueRef fmask,
662 LLVMValueRef *addr, bool is_array_tex);
663
664 LLVMValueRef
665 ac_build_ds_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src, unsigned mask);
666
667 LLVMValueRef
668 ac_build_readlane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef lane);
669
670 LLVMValueRef
671 ac_build_writelane(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef value, LLVMValueRef lane);
672
673 LLVMValueRef
674 ac_build_mbcnt(struct ac_llvm_context *ctx, LLVMValueRef mask);
675
676 LLVMValueRef
677 ac_build_inclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
678
679 LLVMValueRef
680 ac_build_exclusive_scan(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op);
681
682 LLVMValueRef
683 ac_build_reduce(struct ac_llvm_context *ctx, LLVMValueRef src, nir_op op, unsigned cluster_size);
684
685 /**
686 * Common arguments for a scan/reduce operation that accumulates per-wave
687 * values across an entire workgroup, while respecting the order of waves.
688 */
689 struct ac_wg_scan {
690 bool enable_reduce;
691 bool enable_exclusive;
692 bool enable_inclusive;
693 nir_op op;
694 LLVMValueRef src; /* clobbered! */
695 LLVMValueRef result_reduce;
696 LLVMValueRef result_exclusive;
697 LLVMValueRef result_inclusive;
698 LLVMValueRef extra;
699 LLVMValueRef waveidx;
700 LLVMValueRef numwaves; /* only needed for "reduce" operations */
701
702 /* T addrspace(LDS) pointer to the same type as value, at least maxwaves entries */
703 LLVMValueRef scratch;
704 unsigned maxwaves;
705 };
706
707 void
708 ac_build_wg_wavescan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
709 void
710 ac_build_wg_wavescan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
711 void
712 ac_build_wg_wavescan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
713
714 void
715 ac_build_wg_scan_top(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
716 void
717 ac_build_wg_scan_bottom(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
718 void
719 ac_build_wg_scan(struct ac_llvm_context *ctx, struct ac_wg_scan *ws);
720
721 LLVMValueRef
722 ac_build_quad_swizzle(struct ac_llvm_context *ctx, LLVMValueRef src,
723 unsigned lane0, unsigned lane1, unsigned lane2, unsigned lane3);
724
725 LLVMValueRef
726 ac_build_shuffle(struct ac_llvm_context *ctx, LLVMValueRef src, LLVMValueRef index);
727
728 LLVMValueRef
729 ac_build_frexp_exp(struct ac_llvm_context *ctx, LLVMValueRef src0,
730 unsigned bitsize);
731
732 LLVMValueRef
733 ac_build_frexp_mant(struct ac_llvm_context *ctx, LLVMValueRef src0,
734 unsigned bitsize);
735
736 LLVMValueRef
737 ac_build_ddxy_interp(struct ac_llvm_context *ctx, LLVMValueRef interp_ij);
738
739 LLVMValueRef
740 ac_build_load_helper_invocation(struct ac_llvm_context *ctx);
741
742 LLVMValueRef ac_build_call(struct ac_llvm_context *ctx, LLVMValueRef func,
743 LLVMValueRef *args, unsigned num_args);
744
745 LLVMValueRef ac_build_atomic_rmw(struct ac_llvm_context *ctx, LLVMAtomicRMWBinOp op,
746 LLVMValueRef ptr, LLVMValueRef val,
747 const char *sync_scope);
748
749 LLVMValueRef ac_build_atomic_cmp_xchg(struct ac_llvm_context *ctx, LLVMValueRef ptr,
750 LLVMValueRef cmp, LLVMValueRef val,
751 const char *sync_scope);
752
753 #ifdef __cplusplus
754 }
755 #endif
756
757 #endif