i965: Use unreachable() instead of unconditional assert().
[mesa.git] / src / mesa / drivers / dri / i965 / brw_state_upload.c
1 /*
2 Copyright (C) Intel Corp. 2006. All Rights Reserved.
3 Intel funded Tungsten Graphics to
4 develop this 3D driver.
5
6 Permission is hereby granted, free of charge, to any person obtaining
7 a copy of this software and associated documentation files (the
8 "Software"), to deal in the Software without restriction, including
9 without limitation the rights to use, copy, modify, merge, publish,
10 distribute, sublicense, and/or sell copies of the Software, and to
11 permit persons to whom the Software is furnished to do so, subject to
12 the following conditions:
13
14 The above copyright notice and this permission notice (including the
15 next paragraph) shall be included in all copies or substantial
16 portions of the Software.
17
18 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
21 IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
22 LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
24 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25
26 **********************************************************************/
27 /*
28 * Authors:
29 * Keith Whitwell <keithw@vmware.com>
30 */
31
32
33
34 #include "brw_context.h"
35 #include "brw_state.h"
36 #include "drivers/common/meta.h"
37 #include "intel_batchbuffer.h"
38 #include "intel_buffers.h"
39
40 static const struct brw_tracked_state *gen4_atoms[] =
41 {
42 &brw_vs_prog, /* must do before GS prog, state base address. */
43 &brw_ff_gs_prog, /* must do before state base address */
44
45 &brw_interpolation_map,
46
47 &brw_clip_prog, /* must do before state base address */
48 &brw_sf_prog, /* must do before state base address */
49 &brw_wm_prog, /* must do before state base address */
50
51 /* Once all the programs are done, we know how large urb entry
52 * sizes need to be and can decide if we need to change the urb
53 * layout.
54 */
55 &brw_curbe_offsets,
56 &brw_recalculate_urb_fence,
57
58 &brw_cc_vp,
59 &brw_cc_unit,
60
61 /* Surface state setup. Must come before the VS/WM unit. The binding
62 * table upload must be last.
63 */
64 &brw_vs_pull_constants,
65 &brw_wm_pull_constants,
66 &brw_renderbuffer_surfaces,
67 &brw_texture_surfaces,
68 &brw_vs_binding_table,
69 &brw_wm_binding_table,
70
71 &brw_fs_samplers,
72 &brw_vs_samplers,
73
74 /* These set up state for brw_psp_urb_cbs */
75 &brw_wm_unit,
76 &brw_sf_vp,
77 &brw_sf_unit,
78 &brw_vs_unit, /* always required, enabled or not */
79 &brw_clip_unit,
80 &brw_gs_unit,
81
82 /* Command packets:
83 */
84 &brw_invariant_state,
85 &brw_state_base_address,
86
87 &brw_binding_table_pointers,
88 &brw_blend_constant_color,
89
90 &brw_depthbuffer,
91
92 &brw_polygon_stipple,
93 &brw_polygon_stipple_offset,
94
95 &brw_line_stipple,
96 &brw_aa_line_parameters,
97
98 &brw_psp_urb_cbs,
99
100 &brw_drawing_rect,
101 &brw_indices,
102 &brw_index_buffer,
103 &brw_vertices,
104
105 &brw_constant_buffer
106 };
107
108 static const struct brw_tracked_state *gen6_atoms[] =
109 {
110 &brw_vs_prog, /* must do before state base address */
111 &brw_ff_gs_prog, /* must do before state base address */
112 &brw_wm_prog, /* must do before state base address */
113
114 &gen6_clip_vp,
115 &gen6_sf_vp,
116
117 /* Command packets: */
118
119 /* must do before binding table pointers, cc state ptrs */
120 &brw_state_base_address,
121
122 &brw_cc_vp,
123 &gen6_viewport_state, /* must do after *_vp stages */
124
125 &gen6_urb,
126 &gen6_blend_state, /* must do before cc unit */
127 &gen6_color_calc_state, /* must do before cc unit */
128 &gen6_depth_stencil_state, /* must do before cc unit */
129
130 &gen6_vs_push_constants, /* Before vs_state */
131 &gen6_wm_push_constants, /* Before wm_state */
132
133 /* Surface state setup. Must come before the VS/WM unit. The binding
134 * table upload must be last.
135 */
136 &brw_vs_pull_constants,
137 &brw_vs_ubo_surfaces,
138 &brw_wm_pull_constants,
139 &brw_wm_ubo_surfaces,
140 &gen6_renderbuffer_surfaces,
141 &brw_texture_surfaces,
142 &gen6_sol_surface,
143 &brw_vs_binding_table,
144 &gen6_gs_binding_table,
145 &brw_wm_binding_table,
146
147 &brw_fs_samplers,
148 &brw_vs_samplers,
149 &gen6_sampler_state,
150 &gen6_multisample_state,
151
152 &gen6_vs_state,
153 &gen6_gs_state,
154 &gen6_clip_state,
155 &gen6_sf_state,
156 &gen6_wm_state,
157
158 &gen6_scissor_state,
159
160 &gen6_binding_table_pointers,
161
162 &brw_depthbuffer,
163
164 &brw_polygon_stipple,
165 &brw_polygon_stipple_offset,
166
167 &brw_line_stipple,
168 &brw_aa_line_parameters,
169
170 &brw_drawing_rect,
171
172 &brw_indices,
173 &brw_index_buffer,
174 &brw_vertices,
175 };
176
177 static const struct brw_tracked_state *gen7_atoms[] =
178 {
179 &brw_vs_prog,
180 &brw_gs_prog,
181 &brw_wm_prog,
182
183 /* Command packets: */
184
185 /* must do before binding table pointers, cc state ptrs */
186 &brw_state_base_address,
187
188 &brw_cc_vp,
189 &gen7_cc_viewport_state_pointer, /* must do after brw_cc_vp */
190 &gen7_sf_clip_viewport,
191
192 &gen7_push_constant_space,
193 &gen7_urb,
194 &gen6_blend_state, /* must do before cc unit */
195 &gen6_color_calc_state, /* must do before cc unit */
196 &gen6_depth_stencil_state, /* must do before cc unit */
197
198 &gen6_vs_push_constants, /* Before vs_state */
199 &gen7_gs_push_constants, /* Before gs_state */
200 &gen6_wm_push_constants, /* Before wm_surfaces and constant_buffer */
201
202 /* Surface state setup. Must come before the VS/WM unit. The binding
203 * table upload must be last.
204 */
205 &brw_vs_pull_constants,
206 &brw_vs_ubo_surfaces,
207 &brw_vs_abo_surfaces,
208 &brw_gs_pull_constants,
209 &brw_gs_ubo_surfaces,
210 &brw_gs_abo_surfaces,
211 &brw_wm_pull_constants,
212 &brw_wm_ubo_surfaces,
213 &brw_wm_abo_surfaces,
214 &gen6_renderbuffer_surfaces,
215 &brw_texture_surfaces,
216 &brw_vs_binding_table,
217 &brw_gs_binding_table,
218 &brw_wm_binding_table,
219
220 &brw_fs_samplers,
221 &brw_vs_samplers,
222 &brw_gs_samplers,
223 &gen6_multisample_state,
224
225 &gen7_disable_stages,
226 &gen7_vs_state,
227 &gen7_gs_state,
228 &gen7_sol_state,
229 &gen7_clip_state,
230 &gen7_sbe_state,
231 &gen7_sf_state,
232 &gen7_wm_state,
233 &gen7_ps_state,
234
235 &gen6_scissor_state,
236
237 &gen7_depthbuffer,
238
239 &brw_polygon_stipple,
240 &brw_polygon_stipple_offset,
241
242 &brw_line_stipple,
243 &brw_aa_line_parameters,
244
245 &brw_drawing_rect,
246
247 &brw_indices,
248 &brw_index_buffer,
249 &brw_vertices,
250
251 &haswell_cut_index,
252 };
253
254 static const struct brw_tracked_state *gen8_atoms[] =
255 {
256 &brw_vs_prog,
257 &brw_gs_prog,
258 &brw_wm_prog,
259
260 /* Command packets: */
261 &gen8_state_base_address,
262
263 &brw_cc_vp,
264 &gen7_cc_viewport_state_pointer, /* must do after brw_cc_vp */
265 &gen8_sf_clip_viewport,
266
267 &gen7_push_constant_space,
268 &gen7_urb,
269 &gen8_blend_state,
270 &gen6_color_calc_state,
271
272 &gen6_vs_push_constants, /* Before vs_state */
273 &gen7_gs_push_constants, /* Before gs_state */
274 &gen6_wm_push_constants, /* Before wm_surfaces and constant_buffer */
275
276 /* Surface state setup. Must come before the VS/WM unit. The binding
277 * table upload must be last.
278 */
279 &brw_vs_pull_constants,
280 &brw_vs_ubo_surfaces,
281 &brw_vs_abo_surfaces,
282 &brw_gs_pull_constants,
283 &brw_gs_ubo_surfaces,
284 &brw_gs_abo_surfaces,
285 &brw_wm_pull_constants,
286 &brw_wm_ubo_surfaces,
287 &brw_wm_abo_surfaces,
288 &gen6_renderbuffer_surfaces,
289 &brw_texture_surfaces,
290 &brw_vs_binding_table,
291 &brw_gs_binding_table,
292 &brw_wm_binding_table,
293
294 &brw_fs_samplers,
295 &brw_vs_samplers,
296 &brw_gs_samplers,
297 &gen8_multisample_state,
298
299 &gen8_disable_stages,
300 &gen8_vs_state,
301 &gen8_gs_state,
302 &gen8_sol_state,
303 &gen6_clip_state,
304 &gen8_raster_state,
305 &gen8_sbe_state,
306 &gen8_sf_state,
307 &gen8_ps_blend,
308 &gen8_ps_extra,
309 &gen8_ps_state,
310 &gen8_wm_depth_stencil,
311 &gen8_wm_state,
312
313 &gen6_scissor_state,
314
315 &gen7_depthbuffer,
316
317 &brw_polygon_stipple,
318 &brw_polygon_stipple_offset,
319
320 &brw_line_stipple,
321 &brw_aa_line_parameters,
322
323 &brw_drawing_rect,
324
325 &gen8_vf_topology,
326
327 &brw_indices,
328 &gen8_index_buffer,
329 &gen8_vertices,
330
331 &haswell_cut_index,
332 };
333
334 static void
335 brw_upload_initial_gpu_state(struct brw_context *brw)
336 {
337 /* On platforms with hardware contexts, we can set our initial GPU state
338 * right away rather than doing it via state atoms. This saves a small
339 * amount of overhead on every draw call.
340 */
341 if (!brw->hw_ctx)
342 return;
343
344 brw_upload_invariant_state(brw);
345
346 if (brw->gen >= 8) {
347 gen8_emit_3dstate_sample_pattern(brw);
348 }
349 }
350
351 void brw_init_state( struct brw_context *brw )
352 {
353 struct gl_context *ctx = &brw->ctx;
354 const struct brw_tracked_state **atoms;
355 int num_atoms;
356
357 brw_init_caches(brw);
358
359 if (brw->gen >= 8) {
360 atoms = gen8_atoms;
361 num_atoms = ARRAY_SIZE(gen8_atoms);
362 } else if (brw->gen == 7) {
363 atoms = gen7_atoms;
364 num_atoms = ARRAY_SIZE(gen7_atoms);
365 } else if (brw->gen == 6) {
366 atoms = gen6_atoms;
367 num_atoms = ARRAY_SIZE(gen6_atoms);
368 } else {
369 atoms = gen4_atoms;
370 num_atoms = ARRAY_SIZE(gen4_atoms);
371 }
372
373 brw->atoms = atoms;
374 brw->num_atoms = num_atoms;
375
376 while (num_atoms--) {
377 assert((*atoms)->dirty.mesa |
378 (*atoms)->dirty.brw |
379 (*atoms)->dirty.cache);
380 assert((*atoms)->emit);
381 atoms++;
382 }
383
384 brw_upload_initial_gpu_state(brw);
385
386 brw->state.dirty.mesa = ~0;
387 brw->state.dirty.brw = ~0;
388
389 /* Make sure that brw->state.dirty.brw has enough bits to hold all possible
390 * dirty flags.
391 */
392 STATIC_ASSERT(BRW_NUM_STATE_BITS <= 8 * sizeof(brw->state.dirty.brw));
393
394 ctx->DriverFlags.NewTransformFeedback = BRW_NEW_TRANSFORM_FEEDBACK;
395 ctx->DriverFlags.NewTransformFeedbackProg = BRW_NEW_TRANSFORM_FEEDBACK;
396 ctx->DriverFlags.NewRasterizerDiscard = BRW_NEW_RASTERIZER_DISCARD;
397 ctx->DriverFlags.NewUniformBuffer = BRW_NEW_UNIFORM_BUFFER;
398 ctx->DriverFlags.NewAtomicBuffer = BRW_NEW_ATOMIC_BUFFER;
399 }
400
401
402 void brw_destroy_state( struct brw_context *brw )
403 {
404 brw_destroy_caches(brw);
405 }
406
407 /***********************************************************************
408 */
409
410 static bool
411 check_state(const struct brw_state_flags *a, const struct brw_state_flags *b)
412 {
413 return ((a->mesa & b->mesa) |
414 (a->brw & b->brw) |
415 (a->cache & b->cache)) != 0;
416 }
417
418 static void accumulate_state( struct brw_state_flags *a,
419 const struct brw_state_flags *b )
420 {
421 a->mesa |= b->mesa;
422 a->brw |= b->brw;
423 a->cache |= b->cache;
424 }
425
426
427 static void xor_states( struct brw_state_flags *result,
428 const struct brw_state_flags *a,
429 const struct brw_state_flags *b )
430 {
431 result->mesa = a->mesa ^ b->mesa;
432 result->brw = a->brw ^ b->brw;
433 result->cache = a->cache ^ b->cache;
434 }
435
436 struct dirty_bit_map {
437 uint32_t bit;
438 char *name;
439 uint32_t count;
440 };
441
442 #define DEFINE_BIT(name) {name, #name, 0}
443
444 static struct dirty_bit_map mesa_bits[] = {
445 DEFINE_BIT(_NEW_MODELVIEW),
446 DEFINE_BIT(_NEW_PROJECTION),
447 DEFINE_BIT(_NEW_TEXTURE_MATRIX),
448 DEFINE_BIT(_NEW_COLOR),
449 DEFINE_BIT(_NEW_DEPTH),
450 DEFINE_BIT(_NEW_EVAL),
451 DEFINE_BIT(_NEW_FOG),
452 DEFINE_BIT(_NEW_HINT),
453 DEFINE_BIT(_NEW_LIGHT),
454 DEFINE_BIT(_NEW_LINE),
455 DEFINE_BIT(_NEW_PIXEL),
456 DEFINE_BIT(_NEW_POINT),
457 DEFINE_BIT(_NEW_POLYGON),
458 DEFINE_BIT(_NEW_POLYGONSTIPPLE),
459 DEFINE_BIT(_NEW_SCISSOR),
460 DEFINE_BIT(_NEW_STENCIL),
461 DEFINE_BIT(_NEW_TEXTURE),
462 DEFINE_BIT(_NEW_TRANSFORM),
463 DEFINE_BIT(_NEW_VIEWPORT),
464 DEFINE_BIT(_NEW_ARRAY),
465 DEFINE_BIT(_NEW_RENDERMODE),
466 DEFINE_BIT(_NEW_BUFFERS),
467 DEFINE_BIT(_NEW_CURRENT_ATTRIB),
468 DEFINE_BIT(_NEW_MULTISAMPLE),
469 DEFINE_BIT(_NEW_TRACK_MATRIX),
470 DEFINE_BIT(_NEW_PROGRAM),
471 DEFINE_BIT(_NEW_PROGRAM_CONSTANTS),
472 DEFINE_BIT(_NEW_BUFFER_OBJECT),
473 DEFINE_BIT(_NEW_FRAG_CLAMP),
474 DEFINE_BIT(_NEW_VARYING_VP_INPUTS),
475 {0, 0, 0}
476 };
477
478 static struct dirty_bit_map brw_bits[] = {
479 DEFINE_BIT(BRW_NEW_URB_FENCE),
480 DEFINE_BIT(BRW_NEW_FRAGMENT_PROGRAM),
481 DEFINE_BIT(BRW_NEW_GEOMETRY_PROGRAM),
482 DEFINE_BIT(BRW_NEW_VERTEX_PROGRAM),
483 DEFINE_BIT(BRW_NEW_CURBE_OFFSETS),
484 DEFINE_BIT(BRW_NEW_REDUCED_PRIMITIVE),
485 DEFINE_BIT(BRW_NEW_PRIMITIVE),
486 DEFINE_BIT(BRW_NEW_CONTEXT),
487 DEFINE_BIT(BRW_NEW_PSP),
488 DEFINE_BIT(BRW_NEW_SURFACES),
489 DEFINE_BIT(BRW_NEW_VS_BINDING_TABLE),
490 DEFINE_BIT(BRW_NEW_GS_BINDING_TABLE),
491 DEFINE_BIT(BRW_NEW_PS_BINDING_TABLE),
492 DEFINE_BIT(BRW_NEW_INDICES),
493 DEFINE_BIT(BRW_NEW_VERTICES),
494 DEFINE_BIT(BRW_NEW_BATCH),
495 DEFINE_BIT(BRW_NEW_INDEX_BUFFER),
496 DEFINE_BIT(BRW_NEW_VS_CONSTBUF),
497 DEFINE_BIT(BRW_NEW_GS_CONSTBUF),
498 DEFINE_BIT(BRW_NEW_PROGRAM_CACHE),
499 DEFINE_BIT(BRW_NEW_STATE_BASE_ADDRESS),
500 DEFINE_BIT(BRW_NEW_VUE_MAP_VS),
501 DEFINE_BIT(BRW_NEW_VUE_MAP_GEOM_OUT),
502 DEFINE_BIT(BRW_NEW_TRANSFORM_FEEDBACK),
503 DEFINE_BIT(BRW_NEW_RASTERIZER_DISCARD),
504 DEFINE_BIT(BRW_NEW_STATS_WM),
505 DEFINE_BIT(BRW_NEW_UNIFORM_BUFFER),
506 DEFINE_BIT(BRW_NEW_ATOMIC_BUFFER),
507 DEFINE_BIT(BRW_NEW_META_IN_PROGRESS),
508 DEFINE_BIT(BRW_NEW_INTERPOLATION_MAP),
509 DEFINE_BIT(BRW_NEW_PUSH_CONSTANT_ALLOCATION),
510 DEFINE_BIT(BRW_NEW_NUM_SAMPLES),
511 {0, 0, 0}
512 };
513
514 static struct dirty_bit_map cache_bits[] = {
515 DEFINE_BIT(CACHE_NEW_CC_VP),
516 DEFINE_BIT(CACHE_NEW_CC_UNIT),
517 DEFINE_BIT(CACHE_NEW_WM_PROG),
518 DEFINE_BIT(CACHE_NEW_BLORP_BLIT_PROG),
519 DEFINE_BIT(CACHE_NEW_BLORP_CONST_COLOR_PROG),
520 DEFINE_BIT(CACHE_NEW_SAMPLER),
521 DEFINE_BIT(CACHE_NEW_WM_UNIT),
522 DEFINE_BIT(CACHE_NEW_SF_PROG),
523 DEFINE_BIT(CACHE_NEW_SF_VP),
524 DEFINE_BIT(CACHE_NEW_SF_UNIT),
525 DEFINE_BIT(CACHE_NEW_VS_UNIT),
526 DEFINE_BIT(CACHE_NEW_VS_PROG),
527 DEFINE_BIT(CACHE_NEW_FF_GS_UNIT),
528 DEFINE_BIT(CACHE_NEW_FF_GS_PROG),
529 DEFINE_BIT(CACHE_NEW_GS_PROG),
530 DEFINE_BIT(CACHE_NEW_CLIP_VP),
531 DEFINE_BIT(CACHE_NEW_CLIP_UNIT),
532 DEFINE_BIT(CACHE_NEW_CLIP_PROG),
533 {0, 0, 0}
534 };
535
536
537 static void
538 brw_update_dirty_count(struct dirty_bit_map *bit_map, int32_t bits)
539 {
540 int i;
541
542 for (i = 0; i < 32; i++) {
543 if (bit_map[i].bit == 0)
544 return;
545
546 if (bit_map[i].bit & bits)
547 bit_map[i].count++;
548 }
549 }
550
551 static void
552 brw_print_dirty_count(struct dirty_bit_map *bit_map)
553 {
554 int i;
555
556 for (i = 0; i < 32; i++) {
557 if (bit_map[i].bit == 0)
558 return;
559
560 fprintf(stderr, "0x%08x: %12d (%s)\n",
561 bit_map[i].bit, bit_map[i].count, bit_map[i].name);
562 }
563 }
564
565 /***********************************************************************
566 * Emit all state:
567 */
568 void brw_upload_state(struct brw_context *brw)
569 {
570 struct gl_context *ctx = &brw->ctx;
571 struct brw_state_flags *state = &brw->state.dirty;
572 int i;
573 static int dirty_count = 0;
574
575 state->mesa |= brw->NewGLState;
576 brw->NewGLState = 0;
577
578 state->brw |= ctx->NewDriverState;
579 ctx->NewDriverState = 0;
580
581 if (0) {
582 /* Always re-emit all state. */
583 state->mesa |= ~0;
584 state->brw |= ~0;
585 state->cache |= ~0;
586 }
587
588 if (brw->fragment_program != ctx->FragmentProgram._Current) {
589 brw->fragment_program = ctx->FragmentProgram._Current;
590 brw->state.dirty.brw |= BRW_NEW_FRAGMENT_PROGRAM;
591 }
592
593 if (brw->geometry_program != ctx->GeometryProgram._Current) {
594 brw->geometry_program = ctx->GeometryProgram._Current;
595 brw->state.dirty.brw |= BRW_NEW_GEOMETRY_PROGRAM;
596 }
597
598 if (brw->vertex_program != ctx->VertexProgram._Current) {
599 brw->vertex_program = ctx->VertexProgram._Current;
600 brw->state.dirty.brw |= BRW_NEW_VERTEX_PROGRAM;
601 }
602
603 if (brw->meta_in_progress != _mesa_meta_in_progress(ctx)) {
604 brw->meta_in_progress = _mesa_meta_in_progress(ctx);
605 brw->state.dirty.brw |= BRW_NEW_META_IN_PROGRESS;
606 }
607
608 if (brw->num_samples != ctx->DrawBuffer->Visual.samples) {
609 brw->num_samples = ctx->DrawBuffer->Visual.samples;
610 brw->state.dirty.brw |= BRW_NEW_NUM_SAMPLES;
611 }
612
613 if ((state->mesa | state->cache | state->brw) == 0)
614 return;
615
616 if (unlikely(INTEL_DEBUG)) {
617 /* Debug version which enforces various sanity checks on the
618 * state flags which are generated and checked to help ensure
619 * state atoms are ordered correctly in the list.
620 */
621 struct brw_state_flags examined, prev;
622 memset(&examined, 0, sizeof(examined));
623 prev = *state;
624
625 for (i = 0; i < brw->num_atoms; i++) {
626 const struct brw_tracked_state *atom = brw->atoms[i];
627 struct brw_state_flags generated;
628
629 if (check_state(state, &atom->dirty)) {
630 atom->emit(brw);
631 }
632
633 accumulate_state(&examined, &atom->dirty);
634
635 /* generated = (prev ^ state)
636 * if (examined & generated)
637 * fail;
638 */
639 xor_states(&generated, &prev, state);
640 assert(!check_state(&examined, &generated));
641 prev = *state;
642 }
643 }
644 else {
645 for (i = 0; i < brw->num_atoms; i++) {
646 const struct brw_tracked_state *atom = brw->atoms[i];
647
648 if (check_state(state, &atom->dirty)) {
649 atom->emit(brw);
650 }
651 }
652 }
653
654 if (unlikely(INTEL_DEBUG & DEBUG_STATE)) {
655 STATIC_ASSERT(ARRAY_SIZE(brw_bits) == BRW_NUM_STATE_BITS + 1);
656 STATIC_ASSERT(ARRAY_SIZE(cache_bits) == BRW_MAX_CACHE + 1);
657
658 brw_update_dirty_count(mesa_bits, state->mesa);
659 brw_update_dirty_count(brw_bits, state->brw);
660 brw_update_dirty_count(cache_bits, state->cache);
661 if (dirty_count++ % 1000 == 0) {
662 brw_print_dirty_count(mesa_bits);
663 brw_print_dirty_count(brw_bits);
664 brw_print_dirty_count(cache_bits);
665 fprintf(stderr, "\n");
666 }
667 }
668 }
669
670
671 /**
672 * Clear dirty bits to account for the fact that the state emitted by
673 * brw_upload_state() has been committed to the hardware. This is a separate
674 * call from brw_upload_state() because it's possible that after the call to
675 * brw_upload_state(), we will discover that we've run out of aperture space,
676 * and need to rewind the batch buffer to the state it had before the
677 * brw_upload_state() call.
678 */
679 void
680 brw_clear_dirty_bits(struct brw_context *brw)
681 {
682 struct brw_state_flags *state = &brw->state.dirty;
683 memset(state, 0, sizeof(*state));
684 }