panfrost: Clamp shader->uniform_count
[mesa.git] / src / gallium / drivers / radeon / radeon_uvd_enc_1_1.c
1 /**************************************************************************
2 *
3 * Copyright 2018 Advanced Micro Devices, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "pipe/p_video_codec.h"
29 #include "radeon_uvd_enc.h"
30 #include "radeon_video.h"
31 #include "radeonsi/si_pipe.h"
32 #include "util/u_memory.h"
33 #include "util/u_video.h"
34 #include "vl/vl_video_buffer.h"
35
36 #include <stdio.h>
37
38 #define RADEON_ENC_CS(value) (enc->cs->current.buf[enc->cs->current.cdw++] = (value))
39 #define RADEON_ENC_BEGIN(cmd) \
40 { \
41 uint32_t *begin = &enc->cs->current.buf[enc->cs->current.cdw++]; \
42 RADEON_ENC_CS(cmd)
43 #define RADEON_ENC_READ(buf, domain, off) \
44 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_READ, (domain), (off))
45 #define RADEON_ENC_WRITE(buf, domain, off) \
46 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_WRITE, (domain), (off))
47 #define RADEON_ENC_READWRITE(buf, domain, off) \
48 radeon_uvd_enc_add_buffer(enc, (buf), RADEON_USAGE_READWRITE, (domain), (off))
49 #define RADEON_ENC_END() \
50 *begin = (&enc->cs->current.buf[enc->cs->current.cdw] - begin) * 4; \
51 enc->total_task_size += *begin; \
52 }
53
54 static const unsigned index_to_shifts[4] = {24, 16, 8, 0};
55
56 static void radeon_uvd_enc_add_buffer(struct radeon_uvd_encoder *enc, struct pb_buffer *buf,
57 enum radeon_bo_usage usage, enum radeon_bo_domain domain,
58 signed offset)
59 {
60 enc->ws->cs_add_buffer(enc->cs, buf, usage | RADEON_USAGE_SYNCHRONIZED, domain, 0);
61 uint64_t addr;
62 addr = enc->ws->buffer_get_virtual_address(buf);
63 addr = addr + offset;
64 RADEON_ENC_CS(addr >> 32);
65 RADEON_ENC_CS(addr);
66 }
67
68 static void radeon_uvd_enc_set_emulation_prevention(struct radeon_uvd_encoder *enc, bool set)
69 {
70 if (set != enc->emulation_prevention) {
71 enc->emulation_prevention = set;
72 enc->num_zeros = 0;
73 }
74 }
75
76 static void radeon_uvd_enc_output_one_byte(struct radeon_uvd_encoder *enc, unsigned char byte)
77 {
78 if (enc->byte_index == 0)
79 enc->cs->current.buf[enc->cs->current.cdw] = 0;
80 enc->cs->current.buf[enc->cs->current.cdw] |=
81 ((unsigned int)(byte) << index_to_shifts[enc->byte_index]);
82 enc->byte_index++;
83
84 if (enc->byte_index >= 4) {
85 enc->byte_index = 0;
86 enc->cs->current.cdw++;
87 }
88 }
89
90 static void radeon_uvd_enc_emulation_prevention(struct radeon_uvd_encoder *enc, unsigned char byte)
91 {
92 if (enc->emulation_prevention) {
93 if ((enc->num_zeros >= 2) &&
94 ((byte == 0x00) || (byte == 0x01) || (byte == 0x02) || (byte == 0x03))) {
95 radeon_uvd_enc_output_one_byte(enc, 0x03);
96 enc->bits_output += 8;
97 enc->num_zeros = 0;
98 }
99 enc->num_zeros = (byte == 0 ? (enc->num_zeros + 1) : 0);
100 }
101 }
102
103 static void radeon_uvd_enc_code_fixed_bits(struct radeon_uvd_encoder *enc, unsigned int value,
104 unsigned int num_bits)
105 {
106 unsigned int bits_to_pack = 0;
107
108 while (num_bits > 0) {
109 unsigned int value_to_pack = value & (0xffffffff >> (32 - num_bits));
110 bits_to_pack =
111 num_bits > (32 - enc->bits_in_shifter) ? (32 - enc->bits_in_shifter) : num_bits;
112
113 if (bits_to_pack < num_bits)
114 value_to_pack = value_to_pack >> (num_bits - bits_to_pack);
115
116 enc->shifter |= value_to_pack << (32 - enc->bits_in_shifter - bits_to_pack);
117 num_bits -= bits_to_pack;
118 enc->bits_in_shifter += bits_to_pack;
119
120 while (enc->bits_in_shifter >= 8) {
121 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
122 enc->shifter <<= 8;
123 radeon_uvd_enc_emulation_prevention(enc, output_byte);
124 radeon_uvd_enc_output_one_byte(enc, output_byte);
125 enc->bits_in_shifter -= 8;
126 enc->bits_output += 8;
127 }
128 }
129 }
130
131 static void radeon_uvd_enc_reset(struct radeon_uvd_encoder *enc)
132 {
133 enc->emulation_prevention = false;
134 enc->shifter = 0;
135 enc->bits_in_shifter = 0;
136 enc->bits_output = 0;
137 enc->num_zeros = 0;
138 enc->byte_index = 0;
139 }
140
141 static void radeon_uvd_enc_byte_align(struct radeon_uvd_encoder *enc)
142 {
143 unsigned int num_padding_zeros = (32 - enc->bits_in_shifter) % 8;
144
145 if (num_padding_zeros > 0)
146 radeon_uvd_enc_code_fixed_bits(enc, 0, num_padding_zeros);
147 }
148
149 static void radeon_uvd_enc_flush_headers(struct radeon_uvd_encoder *enc)
150 {
151 if (enc->bits_in_shifter != 0) {
152 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
153 radeon_uvd_enc_emulation_prevention(enc, output_byte);
154 radeon_uvd_enc_output_one_byte(enc, output_byte);
155 enc->bits_output += enc->bits_in_shifter;
156 enc->shifter = 0;
157 enc->bits_in_shifter = 0;
158 enc->num_zeros = 0;
159 }
160
161 if (enc->byte_index > 0) {
162 enc->cs->current.cdw++;
163 enc->byte_index = 0;
164 }
165 }
166
167 static void radeon_uvd_enc_code_ue(struct radeon_uvd_encoder *enc, unsigned int value)
168 {
169 int x = -1;
170 unsigned int ue_code = value + 1;
171 value += 1;
172
173 while (value) {
174 value = (value >> 1);
175 x += 1;
176 }
177
178 unsigned int ue_length = (x << 1) + 1;
179 radeon_uvd_enc_code_fixed_bits(enc, ue_code, ue_length);
180 }
181
182 static void radeon_uvd_enc_code_se(struct radeon_uvd_encoder *enc, int value)
183 {
184 unsigned int v = 0;
185
186 if (value != 0)
187 v = (value < 0 ? ((unsigned int)(0 - value) << 1) : (((unsigned int)(value) << 1) - 1));
188
189 radeon_uvd_enc_code_ue(enc, v);
190 }
191
192 static void radeon_uvd_enc_session_info(struct radeon_uvd_encoder *enc)
193 {
194 unsigned int interface_version =
195 ((RENC_UVD_FW_INTERFACE_MAJOR_VERSION << RENC_UVD_IF_MAJOR_VERSION_SHIFT) |
196 (RENC_UVD_FW_INTERFACE_MINOR_VERSION << RENC_UVD_IF_MINOR_VERSION_SHIFT));
197 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SESSION_INFO);
198 RADEON_ENC_CS(0x00000000); // reserved
199 RADEON_ENC_CS(interface_version);
200 RADEON_ENC_READWRITE(enc->si->res->buf, enc->si->res->domains, 0x0);
201 RADEON_ENC_END();
202 }
203
204 static void radeon_uvd_enc_task_info(struct radeon_uvd_encoder *enc, bool need_feedback)
205 {
206 enc->enc_pic.task_info.task_id++;
207
208 if (need_feedback)
209 enc->enc_pic.task_info.allowed_max_num_feedbacks = 1;
210 else
211 enc->enc_pic.task_info.allowed_max_num_feedbacks = 0;
212
213 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_TASK_INFO);
214 enc->p_task_size = &enc->cs->current.buf[enc->cs->current.cdw++];
215 RADEON_ENC_CS(enc->enc_pic.task_info.task_id);
216 RADEON_ENC_CS(enc->enc_pic.task_info.allowed_max_num_feedbacks);
217 RADEON_ENC_END();
218 }
219
220 static void radeon_uvd_enc_session_init_hevc(struct radeon_uvd_encoder *enc)
221 {
222 enc->enc_pic.session_init.aligned_picture_width = align(enc->base.width, 64);
223 enc->enc_pic.session_init.aligned_picture_height = align(enc->base.height, 16);
224 enc->enc_pic.session_init.padding_width =
225 enc->enc_pic.session_init.aligned_picture_width - enc->base.width;
226 enc->enc_pic.session_init.padding_height =
227 enc->enc_pic.session_init.aligned_picture_height - enc->base.height;
228 enc->enc_pic.session_init.pre_encode_mode = RENC_UVD_PREENCODE_MODE_NONE;
229 enc->enc_pic.session_init.pre_encode_chroma_enabled = false;
230
231 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SESSION_INIT);
232 RADEON_ENC_CS(enc->enc_pic.session_init.aligned_picture_width);
233 RADEON_ENC_CS(enc->enc_pic.session_init.aligned_picture_height);
234 RADEON_ENC_CS(enc->enc_pic.session_init.padding_width);
235 RADEON_ENC_CS(enc->enc_pic.session_init.padding_height);
236 RADEON_ENC_CS(enc->enc_pic.session_init.pre_encode_mode);
237 RADEON_ENC_CS(enc->enc_pic.session_init.pre_encode_chroma_enabled);
238 RADEON_ENC_END();
239 }
240
241 static void radeon_uvd_enc_layer_control(struct radeon_uvd_encoder *enc)
242 {
243 enc->enc_pic.layer_ctrl.max_num_temporal_layers = 1;
244 enc->enc_pic.layer_ctrl.num_temporal_layers = 1;
245
246 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_LAYER_CONTROL);
247 RADEON_ENC_CS(enc->enc_pic.layer_ctrl.max_num_temporal_layers);
248 RADEON_ENC_CS(enc->enc_pic.layer_ctrl.num_temporal_layers);
249 RADEON_ENC_END();
250 }
251
252 static void radeon_uvd_enc_layer_select(struct radeon_uvd_encoder *enc)
253 {
254 enc->enc_pic.layer_sel.temporal_layer_index = 0;
255
256 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_LAYER_SELECT);
257 RADEON_ENC_CS(enc->enc_pic.layer_sel.temporal_layer_index);
258 RADEON_ENC_END();
259 }
260
261 static void radeon_uvd_enc_slice_control_hevc(struct radeon_uvd_encoder *enc)
262 {
263 enc->enc_pic.hevc_slice_ctrl.slice_control_mode = RENC_UVD_SLICE_CONTROL_MODE_FIXED_CTBS;
264 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice =
265 align(enc->base.width, 64) / 64 * align(enc->base.height, 64) / 64;
266 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment =
267 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice;
268
269 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SLICE_CONTROL);
270 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.slice_control_mode);
271 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice);
272 RADEON_ENC_CS(enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment);
273 RADEON_ENC_END();
274 }
275
276 static void radeon_uvd_enc_spec_misc_hevc(struct radeon_uvd_encoder *enc,
277 struct pipe_picture_desc *picture)
278 {
279 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
280 enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 =
281 pic->seq.log2_min_luma_coding_block_size_minus3;
282 enc->enc_pic.hevc_spec_misc.amp_disabled = !pic->seq.amp_enabled_flag;
283 enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled =
284 pic->seq.strong_intra_smoothing_enabled_flag;
285 enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag = pic->pic.constrained_intra_pred_flag;
286 enc->enc_pic.hevc_spec_misc.cabac_init_flag = pic->slice.cabac_init_flag;
287 enc->enc_pic.hevc_spec_misc.half_pel_enabled = 1;
288 enc->enc_pic.hevc_spec_misc.quarter_pel_enabled = 1;
289
290 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SPEC_MISC);
291 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3);
292 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.amp_disabled);
293 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled);
294 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag);
295 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.cabac_init_flag);
296 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.half_pel_enabled);
297 RADEON_ENC_CS(enc->enc_pic.hevc_spec_misc.quarter_pel_enabled);
298 RADEON_ENC_END();
299 }
300
301 static void radeon_uvd_enc_rc_session_init(struct radeon_uvd_encoder *enc,
302 struct pipe_picture_desc *picture)
303 {
304 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
305 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc.vbv_buf_lv;
306 switch (pic->rc.rate_ctrl_method) {
307 case PIPE_H265_ENC_RATE_CONTROL_METHOD_DISABLE:
308 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_NONE;
309 break;
310 case PIPE_H265_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
311 case PIPE_H265_ENC_RATE_CONTROL_METHOD_CONSTANT:
312 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_CBR;
313 break;
314 case PIPE_H265_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
315 case PIPE_H265_ENC_RATE_CONTROL_METHOD_VARIABLE:
316 enc->enc_pic.rc_session_init.rate_control_method =
317 RENC_UVD_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
318 break;
319 default:
320 enc->enc_pic.rc_session_init.rate_control_method = RENC_UVD_RATE_CONTROL_METHOD_NONE;
321 }
322
323 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_SESSION_INIT);
324 RADEON_ENC_CS(enc->enc_pic.rc_session_init.rate_control_method);
325 RADEON_ENC_CS(enc->enc_pic.rc_session_init.vbv_buffer_level);
326 RADEON_ENC_END();
327 }
328
329 static void radeon_uvd_enc_rc_layer_init(struct radeon_uvd_encoder *enc,
330 struct pipe_picture_desc *picture)
331 {
332 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
333 enc->enc_pic.rc_layer_init.target_bit_rate = pic->rc.target_bitrate;
334 enc->enc_pic.rc_layer_init.peak_bit_rate = pic->rc.peak_bitrate;
335 enc->enc_pic.rc_layer_init.frame_rate_num = pic->rc.frame_rate_num;
336 enc->enc_pic.rc_layer_init.frame_rate_den = pic->rc.frame_rate_den;
337 enc->enc_pic.rc_layer_init.vbv_buffer_size = pic->rc.vbv_buffer_size;
338 enc->enc_pic.rc_layer_init.avg_target_bits_per_picture = pic->rc.target_bits_picture;
339 enc->enc_pic.rc_layer_init.peak_bits_per_picture_integer = pic->rc.peak_bits_picture_integer;
340 enc->enc_pic.rc_layer_init.peak_bits_per_picture_fractional = pic->rc.peak_bits_picture_fraction;
341
342 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_LAYER_INIT);
343 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.target_bit_rate);
344 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bit_rate);
345 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.frame_rate_num);
346 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.frame_rate_den);
347 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.vbv_buffer_size);
348 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.avg_target_bits_per_picture);
349 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bits_per_picture_integer);
350 RADEON_ENC_CS(enc->enc_pic.rc_layer_init.peak_bits_per_picture_fractional);
351 RADEON_ENC_END();
352 }
353
354 static void radeon_uvd_enc_deblocking_filter_hevc(struct radeon_uvd_encoder *enc,
355 struct pipe_picture_desc *picture)
356 {
357 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
358 enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled =
359 pic->slice.slice_loop_filter_across_slices_enabled_flag;
360 enc->enc_pic.hevc_deblock.deblocking_filter_disabled =
361 pic->slice.slice_deblocking_filter_disabled_flag;
362 enc->enc_pic.hevc_deblock.beta_offset_div2 = pic->slice.slice_beta_offset_div2;
363 enc->enc_pic.hevc_deblock.tc_offset_div2 = pic->slice.slice_tc_offset_div2;
364 enc->enc_pic.hevc_deblock.cb_qp_offset = pic->slice.slice_cb_qp_offset;
365 enc->enc_pic.hevc_deblock.cr_qp_offset = pic->slice.slice_cr_qp_offset;
366
367 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_DEBLOCKING_FILTER);
368 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled);
369 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.deblocking_filter_disabled);
370 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.beta_offset_div2);
371 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.tc_offset_div2);
372 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.cb_qp_offset);
373 RADEON_ENC_CS(enc->enc_pic.hevc_deblock.cr_qp_offset);
374 RADEON_ENC_END();
375 }
376
377 static void radeon_uvd_enc_quality_params(struct radeon_uvd_encoder *enc)
378 {
379 enc->enc_pic.quality_params.vbaq_mode = 0;
380 enc->enc_pic.quality_params.scene_change_sensitivity = 0;
381 enc->enc_pic.quality_params.scene_change_min_idr_interval = 0;
382
383 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_QUALITY_PARAMS);
384 RADEON_ENC_CS(enc->enc_pic.quality_params.vbaq_mode);
385 RADEON_ENC_CS(enc->enc_pic.quality_params.scene_change_sensitivity);
386 RADEON_ENC_CS(enc->enc_pic.quality_params.scene_change_min_idr_interval);
387 RADEON_ENC_END();
388 }
389
390 static void radeon_uvd_enc_nalu_sps_hevc(struct radeon_uvd_encoder *enc)
391 {
392 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
393 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_SPS);
394 uint32_t *size_in_bytes = &enc->cs->current.buf[enc->cs->current.cdw++];
395 int i;
396
397 radeon_uvd_enc_reset(enc);
398 radeon_uvd_enc_set_emulation_prevention(enc, false);
399 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
400 radeon_uvd_enc_code_fixed_bits(enc, 0x4201, 16);
401 radeon_uvd_enc_byte_align(enc);
402 radeon_uvd_enc_set_emulation_prevention(enc, true);
403 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 4);
404 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1, 3);
405 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
406 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
407 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_tier_flag, 1);
408 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_profile_idc, 5);
409 radeon_uvd_enc_code_fixed_bits(enc, 0x60000000, 32);
410 radeon_uvd_enc_code_fixed_bits(enc, 0xb0000000, 32);
411 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 16);
412 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_level_idc, 8);
413
414 for (i = 0; i < (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i++)
415 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
416
417 if ((enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1) > 0) {
418 for (i = (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i < 8; i++)
419 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
420 }
421
422 radeon_uvd_enc_code_ue(enc, 0x0);
423 radeon_uvd_enc_code_ue(enc, enc->enc_pic.chroma_format_idc);
424 radeon_uvd_enc_code_ue(enc, enc->enc_pic.session_init.aligned_picture_width);
425 radeon_uvd_enc_code_ue(enc, enc->enc_pic.session_init.aligned_picture_height);
426
427 int conformance_window_flag = (enc->enc_pic.crop_top > 0) || (enc->enc_pic.crop_bottom > 0) ||
428 (enc->enc_pic.crop_left > 0) || (enc->enc_pic.crop_right > 0)
429 ? 0x1
430 : 0x0;
431 radeon_uvd_enc_code_fixed_bits(enc, conformance_window_flag, 1);
432 if (conformance_window_flag == 1) {
433 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_left);
434 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_right);
435 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_top);
436 radeon_uvd_enc_code_ue(enc, enc->enc_pic.crop_bottom);
437 }
438
439 radeon_uvd_enc_code_ue(enc, enc->enc_pic.bit_depth_luma_minus8);
440 radeon_uvd_enc_code_ue(enc, enc->enc_pic.bit_depth_chroma_minus8);
441 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_max_poc - 4);
442 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
443 radeon_uvd_enc_code_ue(enc, 1);
444 radeon_uvd_enc_code_ue(enc, 0x0);
445 radeon_uvd_enc_code_ue(enc, 0x0);
446 radeon_uvd_enc_code_ue(enc, enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3);
447 /* Only support CTBSize 64 */
448 radeon_uvd_enc_code_ue(
449 enc, 6 - (enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 + 3));
450 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_min_transform_block_size_minus2);
451 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_diff_max_min_transform_block_size);
452 radeon_uvd_enc_code_ue(enc, enc->enc_pic.max_transform_hierarchy_depth_inter);
453 radeon_uvd_enc_code_ue(enc, enc->enc_pic.max_transform_hierarchy_depth_intra);
454
455 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
456 radeon_uvd_enc_code_fixed_bits(enc, !enc->enc_pic.hevc_spec_misc.amp_disabled, 1);
457 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.sample_adaptive_offset_enabled_flag, 1);
458 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.pcm_enabled_flag, 1);
459
460 radeon_uvd_enc_code_ue(enc, 1);
461 radeon_uvd_enc_code_ue(enc, 1);
462 radeon_uvd_enc_code_ue(enc, 0);
463 radeon_uvd_enc_code_ue(enc, 0);
464 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
465
466 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
467
468 radeon_uvd_enc_code_fixed_bits(enc, 0, 1);
469 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled,
470 1);
471
472 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
473
474 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
475
476 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
477
478 radeon_uvd_enc_byte_align(enc);
479 radeon_uvd_enc_flush_headers(enc);
480 *size_in_bytes = (enc->bits_output + 7) / 8;
481 RADEON_ENC_END();
482 }
483
484 static void radeon_uvd_enc_nalu_pps_hevc(struct radeon_uvd_encoder *enc)
485 {
486 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
487 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_PPS);
488 uint32_t *size_in_bytes = &enc->cs->current.buf[enc->cs->current.cdw++];
489 radeon_uvd_enc_reset(enc);
490 radeon_uvd_enc_set_emulation_prevention(enc, false);
491 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
492 radeon_uvd_enc_code_fixed_bits(enc, 0x4401, 16);
493 radeon_uvd_enc_byte_align(enc);
494 radeon_uvd_enc_set_emulation_prevention(enc, true);
495 radeon_uvd_enc_code_ue(enc, 0x0);
496 radeon_uvd_enc_code_ue(enc, 0x0);
497 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
498 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* output_flag_resent_flag */
499 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 3); /* num_extra_slice_header_bits */
500 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
501 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
502 radeon_uvd_enc_code_ue(enc, 0x0);
503 radeon_uvd_enc_code_ue(enc, 0x0);
504 radeon_uvd_enc_code_se(enc, 0x0);
505 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag, 1);
506 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
507 if (enc->enc_pic.rc_session_init.rate_control_method == RENC_UVD_RATE_CONTROL_METHOD_NONE)
508 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
509 else {
510 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
511 radeon_uvd_enc_code_ue(enc, 0x0);
512 }
513 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.cb_qp_offset);
514 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.cr_qp_offset);
515 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
516 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
517 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
518 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
519 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
520 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled,
521 1);
522 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
523 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
524 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_deblock.deblocking_filter_disabled, 1);
525
526 if (!enc->enc_pic.hevc_deblock.deblocking_filter_disabled) {
527 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.beta_offset_div2);
528 radeon_uvd_enc_code_se(enc, enc->enc_pic.hevc_deblock.tc_offset_div2);
529 }
530
531 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
532 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
533 radeon_uvd_enc_code_ue(enc, enc->enc_pic.log2_parallel_merge_level_minus2);
534 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
535
536 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
537
538 radeon_uvd_enc_byte_align(enc);
539 radeon_uvd_enc_flush_headers(enc);
540 *size_in_bytes = (enc->bits_output + 7) / 8;
541 RADEON_ENC_END();
542 }
543
544 static void radeon_uvd_enc_nalu_vps_hevc(struct radeon_uvd_encoder *enc)
545 {
546 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
547 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_VPS);
548 uint32_t *size_in_bytes = &enc->cs->current.buf[enc->cs->current.cdw++];
549 int i;
550
551 radeon_uvd_enc_reset(enc);
552 radeon_uvd_enc_set_emulation_prevention(enc, false);
553 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
554 radeon_uvd_enc_code_fixed_bits(enc, 0x4001, 16);
555 radeon_uvd_enc_byte_align(enc);
556 radeon_uvd_enc_set_emulation_prevention(enc, true);
557
558 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 4);
559 radeon_uvd_enc_code_fixed_bits(enc, 0x3, 2);
560 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
561 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1, 3);
562 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
563 radeon_uvd_enc_code_fixed_bits(enc, 0xffff, 16);
564 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
565 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_tier_flag, 1);
566 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_profile_idc, 5);
567 radeon_uvd_enc_code_fixed_bits(enc, 0x60000000, 32);
568 radeon_uvd_enc_code_fixed_bits(enc, 0xb0000000, 32);
569 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 16);
570 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.general_level_idc, 8);
571
572 for (i = 0; i < (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i++)
573 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
574
575 if ((enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1) > 0) {
576 for (i = (enc->enc_pic.layer_ctrl.max_num_temporal_layers - 1); i < 8; i++)
577 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 2);
578 }
579
580 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
581 radeon_uvd_enc_code_ue(enc, 0x1);
582 radeon_uvd_enc_code_ue(enc, 0x0);
583 radeon_uvd_enc_code_ue(enc, 0x0);
584
585 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
586 radeon_uvd_enc_code_ue(enc, 0x0);
587 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
588 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
589
590 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
591
592 radeon_uvd_enc_byte_align(enc);
593 radeon_uvd_enc_flush_headers(enc);
594 *size_in_bytes = (enc->bits_output + 7) / 8;
595 RADEON_ENC_END();
596 }
597
598 static void radeon_uvd_enc_nalu_aud_hevc(struct radeon_uvd_encoder *enc)
599 {
600 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INSERT_NALU_BUFFER);
601 RADEON_ENC_CS(RENC_UVD_NALU_TYPE_AUD);
602 uint32_t *size_in_bytes = &enc->cs->current.buf[enc->cs->current.cdw++];
603 radeon_uvd_enc_reset(enc);
604 radeon_uvd_enc_set_emulation_prevention(enc, false);
605 radeon_uvd_enc_code_fixed_bits(enc, 0x00000001, 32);
606 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
607 radeon_uvd_enc_code_fixed_bits(enc, 35, 6);
608 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
609 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 3);
610 radeon_uvd_enc_byte_align(enc);
611 radeon_uvd_enc_set_emulation_prevention(enc, true);
612 switch (enc->enc_pic.picture_type) {
613 case PIPE_H265_ENC_PICTURE_TYPE_I:
614 case PIPE_H265_ENC_PICTURE_TYPE_IDR:
615 radeon_uvd_enc_code_fixed_bits(enc, 0x00, 3);
616 break;
617 case PIPE_H265_ENC_PICTURE_TYPE_P:
618 radeon_uvd_enc_code_fixed_bits(enc, 0x01, 3);
619 break;
620 case PIPE_H265_ENC_PICTURE_TYPE_B:
621 radeon_uvd_enc_code_fixed_bits(enc, 0x02, 3);
622 break;
623 default:
624 assert(0 && "Unsupported picture type!");
625 }
626
627 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
628
629 radeon_uvd_enc_byte_align(enc);
630 radeon_uvd_enc_flush_headers(enc);
631 *size_in_bytes = (enc->bits_output + 7) / 8;
632 RADEON_ENC_END();
633 }
634
635 static void radeon_uvd_enc_slice_header_hevc(struct radeon_uvd_encoder *enc)
636 {
637 uint32_t instruction[RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS] = {0};
638 uint32_t num_bits[RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS] = {0};
639 unsigned int inst_index = 0;
640 unsigned int bit_index = 0;
641 unsigned int bits_copied = 0;
642 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_SLICE_HEADER);
643 radeon_uvd_enc_reset(enc);
644 radeon_uvd_enc_set_emulation_prevention(enc, false);
645
646 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
647 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.nal_unit_type, 6);
648 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 6);
649 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 3);
650
651 radeon_uvd_enc_flush_headers(enc);
652 bit_index++;
653 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
654 num_bits[inst_index] = enc->bits_output - bits_copied;
655 bits_copied = enc->bits_output;
656 inst_index++;
657
658 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_FIRST_SLICE;
659 inst_index++;
660
661 if ((enc->enc_pic.nal_unit_type >= 16) && (enc->enc_pic.nal_unit_type <= 23))
662 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
663
664 radeon_uvd_enc_code_ue(enc, 0x0);
665
666 radeon_uvd_enc_flush_headers(enc);
667 bit_index++;
668 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
669 num_bits[inst_index] = enc->bits_output - bits_copied;
670 bits_copied = enc->bits_output;
671 inst_index++;
672
673 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_SLICE_SEGMENT;
674 inst_index++;
675
676 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_DEPENDENT_SLICE_END;
677 inst_index++;
678
679 switch (enc->enc_pic.picture_type) {
680 case PIPE_H265_ENC_PICTURE_TYPE_I:
681 case PIPE_H265_ENC_PICTURE_TYPE_IDR:
682 radeon_uvd_enc_code_ue(enc, 0x2);
683 break;
684 case PIPE_H265_ENC_PICTURE_TYPE_P:
685 case PIPE_H265_ENC_PICTURE_TYPE_SKIP:
686 radeon_uvd_enc_code_ue(enc, 0x1);
687 break;
688 case PIPE_H265_ENC_PICTURE_TYPE_B:
689 radeon_uvd_enc_code_ue(enc, 0x0);
690 break;
691 default:
692 radeon_uvd_enc_code_ue(enc, 0x1);
693 }
694
695 if ((enc->enc_pic.nal_unit_type != 19) && (enc->enc_pic.nal_unit_type != 20)) {
696 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.pic_order_cnt, enc->enc_pic.log2_max_poc);
697 if (enc->enc_pic.picture_type == PIPE_H265_ENC_PICTURE_TYPE_P)
698 radeon_uvd_enc_code_fixed_bits(enc, 0x1, 1);
699 else {
700 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
701 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
702 radeon_uvd_enc_code_ue(enc, 0x0);
703 radeon_uvd_enc_code_ue(enc, 0x0);
704 }
705 }
706
707 if (enc->enc_pic.sample_adaptive_offset_enabled_flag)
708 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1); /* slice_sao_luma_flag */
709
710 if ((enc->enc_pic.picture_type == PIPE_H265_ENC_PICTURE_TYPE_P) ||
711 (enc->enc_pic.picture_type == PIPE_H265_ENC_PICTURE_TYPE_B)) {
712 radeon_uvd_enc_code_fixed_bits(enc, 0x0, 1);
713 radeon_uvd_enc_code_fixed_bits(enc, enc->enc_pic.hevc_spec_misc.cabac_init_flag, 1);
714 radeon_uvd_enc_code_ue(enc, 5 - enc->enc_pic.max_num_merge_cand);
715 }
716
717 radeon_uvd_enc_flush_headers(enc);
718 bit_index++;
719 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
720 num_bits[inst_index] = enc->bits_output - bits_copied;
721 bits_copied = enc->bits_output;
722 inst_index++;
723
724 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_SLICE_QP_DELTA;
725 inst_index++;
726
727 if ((enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled) &&
728 (!enc->enc_pic.hevc_deblock.deblocking_filter_disabled)) {
729 radeon_uvd_enc_code_fixed_bits(
730 enc, enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled, 1);
731
732 radeon_uvd_enc_flush_headers(enc);
733 bit_index++;
734 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_COPY;
735 num_bits[inst_index] = enc->bits_output - bits_copied;
736 bits_copied = enc->bits_output;
737 inst_index++;
738 }
739
740 instruction[inst_index] = RENC_UVD_HEADER_INSTRUCTION_END;
741
742 for (int i = bit_index; i < RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_TEMPLATE_SIZE_IN_DWORDS; i++)
743 RADEON_ENC_CS(0x00000000);
744
745 for (int j = 0; j < RENC_UVD_SLICE_HEADER_TEMPLATE_MAX_NUM_INSTRUCTIONS; j++) {
746 RADEON_ENC_CS(instruction[j]);
747 RADEON_ENC_CS(num_bits[j]);
748 }
749
750 RADEON_ENC_END();
751 }
752
753 static void radeon_uvd_enc_ctx(struct radeon_uvd_encoder *enc)
754 {
755 struct si_screen *sscreen = (struct si_screen *)enc->screen;
756
757 enc->enc_pic.ctx_buf.swizzle_mode = 0;
758 if (sscreen->info.chip_class < GFX9) {
759 enc->enc_pic.ctx_buf.rec_luma_pitch = (enc->luma->u.legacy.level[0].nblk_x * enc->luma->bpe);
760 enc->enc_pic.ctx_buf.rec_chroma_pitch =
761 (enc->chroma->u.legacy.level[0].nblk_x * enc->chroma->bpe);
762 } else {
763 enc->enc_pic.ctx_buf.rec_luma_pitch = enc->luma->u.gfx9.surf_pitch * enc->luma->bpe;
764 enc->enc_pic.ctx_buf.rec_chroma_pitch = enc->chroma->u.gfx9.surf_pitch * enc->chroma->bpe;
765 }
766 enc->enc_pic.ctx_buf.num_reconstructed_pictures = 2;
767
768 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_ENCODE_CONTEXT_BUFFER);
769 RADEON_ENC_READWRITE(enc->cpb.res->buf, enc->cpb.res->domains, 0);
770 RADEON_ENC_CS(0x00000000); // reserved
771 RADEON_ENC_CS(enc->enc_pic.ctx_buf.swizzle_mode);
772 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_luma_pitch);
773 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch);
774 RADEON_ENC_CS(enc->enc_pic.ctx_buf.num_reconstructed_pictures);
775 /* reconstructed_picture_1_luma_offset */
776 RADEON_ENC_CS(0x00000000);
777 /* reconstructed_picture_1_chroma_offset */
778 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch * align(enc->base.height, 16));
779 /* reconstructed_picture_2_luma_offset */
780 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_luma_pitch * align(enc->base.height, 16) * 3 / 2);
781 /* reconstructed_picture_2_chroma_offset */
782 RADEON_ENC_CS(enc->enc_pic.ctx_buf.rec_chroma_pitch * align(enc->base.height, 16) * 5 / 2);
783
784 for (int i = 0; i < 136; i++)
785 RADEON_ENC_CS(0x00000000);
786
787 RADEON_ENC_END();
788 }
789
790 static void radeon_uvd_enc_bitstream(struct radeon_uvd_encoder *enc)
791 {
792 enc->enc_pic.bit_buf.mode = RENC_UVD_SWIZZLE_MODE_LINEAR;
793 enc->enc_pic.bit_buf.video_bitstream_buffer_size = enc->bs_size;
794 enc->enc_pic.bit_buf.video_bitstream_data_offset = 0;
795
796 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_VIDEO_BITSTREAM_BUFFER);
797 RADEON_ENC_CS(enc->enc_pic.bit_buf.mode);
798 RADEON_ENC_WRITE(enc->bs_handle, RADEON_DOMAIN_GTT, 0);
799 RADEON_ENC_CS(enc->enc_pic.bit_buf.video_bitstream_buffer_size);
800 RADEON_ENC_CS(enc->enc_pic.bit_buf.video_bitstream_data_offset);
801 RADEON_ENC_END();
802 }
803
804 static void radeon_uvd_enc_feedback(struct radeon_uvd_encoder *enc)
805 {
806 enc->enc_pic.fb_buf.mode = RENC_UVD_FEEDBACK_BUFFER_MODE_LINEAR;
807 enc->enc_pic.fb_buf.feedback_buffer_size = 16;
808 enc->enc_pic.fb_buf.feedback_data_size = 40;
809
810 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_FEEDBACK_BUFFER);
811 RADEON_ENC_CS(enc->enc_pic.fb_buf.mode);
812 RADEON_ENC_WRITE(enc->fb->res->buf, enc->fb->res->domains, 0x0);
813 RADEON_ENC_CS(enc->enc_pic.fb_buf.feedback_buffer_size);
814 RADEON_ENC_CS(enc->enc_pic.fb_buf.feedback_data_size);
815 RADEON_ENC_END();
816 }
817
818 static void radeon_uvd_enc_intra_refresh(struct radeon_uvd_encoder *enc)
819 {
820 enc->enc_pic.intra_ref.intra_refresh_mode = RENC_UVD_INTRA_REFRESH_MODE_NONE;
821 enc->enc_pic.intra_ref.offset = 0;
822 enc->enc_pic.intra_ref.region_size = 0;
823
824 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_INTRA_REFRESH);
825 RADEON_ENC_CS(enc->enc_pic.intra_ref.intra_refresh_mode);
826 RADEON_ENC_CS(enc->enc_pic.intra_ref.offset);
827 RADEON_ENC_CS(enc->enc_pic.intra_ref.region_size);
828 RADEON_ENC_END();
829 }
830
831 static void radeon_uvd_enc_rc_per_pic(struct radeon_uvd_encoder *enc,
832 struct pipe_picture_desc *picture)
833 {
834 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
835 enc->enc_pic.rc_per_pic.qp = pic->rc.quant_i_frames;
836 enc->enc_pic.rc_per_pic.min_qp_app = 0;
837 enc->enc_pic.rc_per_pic.max_qp_app = 51;
838 enc->enc_pic.rc_per_pic.max_au_size = 0;
839 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc.fill_data_enable;
840 enc->enc_pic.rc_per_pic.skip_frame_enable = false;
841 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc.enforce_hrd;
842
843 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_RATE_CONTROL_PER_PICTURE);
844 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.qp);
845 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.min_qp_app);
846 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.max_qp_app);
847 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.max_au_size);
848 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.enabled_filler_data);
849 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.skip_frame_enable);
850 RADEON_ENC_CS(enc->enc_pic.rc_per_pic.enforce_hrd);
851 RADEON_ENC_END();
852 }
853
854 static void radeon_uvd_enc_encode_params_hevc(struct radeon_uvd_encoder *enc)
855 {
856 struct si_screen *sscreen = (struct si_screen *)enc->screen;
857 switch (enc->enc_pic.picture_type) {
858 case PIPE_H265_ENC_PICTURE_TYPE_I:
859 case PIPE_H265_ENC_PICTURE_TYPE_IDR:
860 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_I;
861 break;
862 case PIPE_H265_ENC_PICTURE_TYPE_P:
863 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_P;
864 break;
865 case PIPE_H265_ENC_PICTURE_TYPE_SKIP:
866 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_P_SKIP;
867 break;
868 case PIPE_H265_ENC_PICTURE_TYPE_B:
869 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_B;
870 break;
871 default:
872 enc->enc_pic.enc_params.pic_type = RENC_UVD_PICTURE_TYPE_I;
873 }
874
875 enc->enc_pic.enc_params.allowed_max_bitstream_size = enc->bs_size;
876 if (sscreen->info.chip_class < GFX9) {
877 enc->enc_pic.enc_params.input_pic_luma_pitch =
878 (enc->luma->u.legacy.level[0].nblk_x * enc->luma->bpe);
879 enc->enc_pic.enc_params.input_pic_chroma_pitch =
880 (enc->chroma->u.legacy.level[0].nblk_x * enc->chroma->bpe);
881 } else {
882 enc->enc_pic.enc_params.input_pic_luma_pitch = enc->luma->u.gfx9.surf_pitch * enc->luma->bpe;
883 enc->enc_pic.enc_params.input_pic_chroma_pitch =
884 enc->chroma->u.gfx9.surf_pitch * enc->chroma->bpe;
885 }
886 enc->enc_pic.enc_params.input_pic_swizzle_mode = RENC_UVD_SWIZZLE_MODE_LINEAR;
887
888 if (enc->enc_pic.enc_params.pic_type == RENC_UVD_PICTURE_TYPE_I)
889 enc->enc_pic.enc_params.reference_picture_index = 0xFFFFFFFF;
890 else
891 enc->enc_pic.enc_params.reference_picture_index = (enc->enc_pic.frame_num - 1) % 2;
892
893 enc->enc_pic.enc_params.reconstructed_picture_index = enc->enc_pic.frame_num % 2;
894
895 RADEON_ENC_BEGIN(RENC_UVD_IB_PARAM_ENCODE_PARAMS);
896 RADEON_ENC_CS(enc->enc_pic.enc_params.pic_type);
897 RADEON_ENC_CS(enc->enc_pic.enc_params.allowed_max_bitstream_size);
898
899 if (sscreen->info.chip_class < GFX9) {
900 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->luma->u.legacy.level[0].offset);
901 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->chroma->u.legacy.level[0].offset);
902 } else {
903 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->luma->u.gfx9.surf_offset);
904 RADEON_ENC_READ(enc->handle, RADEON_DOMAIN_VRAM, enc->chroma->u.gfx9.surf_offset);
905 }
906 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_luma_pitch);
907 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_chroma_pitch);
908 RADEON_ENC_CS(0x00000000); // reserved
909 RADEON_ENC_CS(enc->enc_pic.enc_params.input_pic_swizzle_mode);
910 RADEON_ENC_CS(enc->enc_pic.enc_params.reference_picture_index);
911 RADEON_ENC_CS(enc->enc_pic.enc_params.reconstructed_picture_index);
912 RADEON_ENC_END();
913 }
914
915 static void radeon_uvd_enc_op_init(struct radeon_uvd_encoder *enc)
916 {
917 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INITIALIZE);
918 RADEON_ENC_END();
919 }
920
921 static void radeon_uvd_enc_op_close(struct radeon_uvd_encoder *enc)
922 {
923 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_CLOSE_SESSION);
924 RADEON_ENC_END();
925 }
926
927 static void radeon_uvd_enc_op_enc(struct radeon_uvd_encoder *enc)
928 {
929 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_ENCODE);
930 RADEON_ENC_END();
931 }
932
933 static void radeon_uvd_enc_op_init_rc(struct radeon_uvd_encoder *enc)
934 {
935 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INIT_RC);
936 RADEON_ENC_END();
937 }
938
939 static void radeon_uvd_enc_op_init_rc_vbv(struct radeon_uvd_encoder *enc)
940 {
941 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_INIT_RC_VBV_BUFFER_LEVEL);
942 RADEON_ENC_END();
943 }
944
945 static void radeon_uvd_enc_op_speed(struct radeon_uvd_encoder *enc)
946 {
947 RADEON_ENC_BEGIN(RENC_UVD_IB_OP_SET_SPEED_ENCODING_MODE);
948 RADEON_ENC_END();
949 }
950
951 static void begin(struct radeon_uvd_encoder *enc, struct pipe_picture_desc *pic)
952 {
953 radeon_uvd_enc_session_info(enc);
954 enc->total_task_size = 0;
955 radeon_uvd_enc_task_info(enc, enc->need_feedback);
956 radeon_uvd_enc_op_init(enc);
957
958 radeon_uvd_enc_session_init_hevc(enc);
959 radeon_uvd_enc_slice_control_hevc(enc);
960 radeon_uvd_enc_spec_misc_hevc(enc, pic);
961 radeon_uvd_enc_deblocking_filter_hevc(enc, pic);
962
963 radeon_uvd_enc_layer_control(enc);
964 radeon_uvd_enc_rc_session_init(enc, pic);
965 radeon_uvd_enc_quality_params(enc);
966 radeon_uvd_enc_layer_select(enc);
967 radeon_uvd_enc_rc_layer_init(enc, pic);
968 radeon_uvd_enc_layer_select(enc);
969 radeon_uvd_enc_rc_per_pic(enc, pic);
970 radeon_uvd_enc_op_init_rc(enc);
971 radeon_uvd_enc_op_init_rc_vbv(enc);
972 *enc->p_task_size = (enc->total_task_size);
973 }
974
975 static void encode(struct radeon_uvd_encoder *enc)
976 {
977 radeon_uvd_enc_session_info(enc);
978 enc->total_task_size = 0;
979 radeon_uvd_enc_task_info(enc, enc->need_feedback);
980
981 radeon_uvd_enc_nalu_aud_hevc(enc);
982
983 if (enc->enc_pic.is_iframe) {
984 radeon_uvd_enc_nalu_vps_hevc(enc);
985 radeon_uvd_enc_nalu_pps_hevc(enc);
986 radeon_uvd_enc_nalu_sps_hevc(enc);
987 }
988 radeon_uvd_enc_slice_header_hevc(enc);
989 radeon_uvd_enc_encode_params_hevc(enc);
990
991 radeon_uvd_enc_ctx(enc);
992 radeon_uvd_enc_bitstream(enc);
993 radeon_uvd_enc_feedback(enc);
994 radeon_uvd_enc_intra_refresh(enc);
995
996 radeon_uvd_enc_op_speed(enc);
997 radeon_uvd_enc_op_enc(enc);
998 *enc->p_task_size = (enc->total_task_size);
999 }
1000
1001 static void destroy(struct radeon_uvd_encoder *enc)
1002 {
1003 radeon_uvd_enc_session_info(enc);
1004 enc->total_task_size = 0;
1005 radeon_uvd_enc_task_info(enc, enc->need_feedback);
1006 radeon_uvd_enc_op_close(enc);
1007 *enc->p_task_size = (enc->total_task_size);
1008 }
1009
1010 void radeon_uvd_enc_1_1_init(struct radeon_uvd_encoder *enc)
1011 {
1012 enc->begin = begin;
1013 enc->encode = encode;
1014 enc->destroy = destroy;
1015 }