intel_batchbuffer_free(brw);
- drm_intel_bo_unreference(brw->first_post_swapbuffers_batch);
- brw->first_post_swapbuffers_batch = NULL;
+ drm_intel_bo_unreference(brw->throttle_batch[1]);
+ drm_intel_bo_unreference(brw->throttle_batch[0]);
+ brw->throttle_batch[1] = NULL;
+ brw->throttle_batch[0] = NULL;
driDestroyOptionCache(&brw->optionCache);
* the swap, and getting our hands on that doesn't seem worth it,
* so we just us the first batch we emitted after the last swap.
*/
- if (brw->need_swap_throttle && brw->first_post_swapbuffers_batch) {
- if (!brw->disable_throttling)
- drm_intel_bo_wait_rendering(brw->first_post_swapbuffers_batch);
- drm_intel_bo_unreference(brw->first_post_swapbuffers_batch);
- brw->first_post_swapbuffers_batch = NULL;
+ if (brw->need_swap_throttle && brw->throttle_batch[0]) {
+ if (brw->throttle_batch[1]) {
+ if (!brw->disable_throttling)
+ drm_intel_bo_wait_rendering(brw->throttle_batch[1]);
+ drm_intel_bo_unreference(brw->throttle_batch[1]);
+ }
+ brw->throttle_batch[1] = brw->throttle_batch[0];
+ brw->throttle_batch[0] = NULL;
brw->need_swap_throttle = false;
/* Throttling here is more precise than the throttle ioctl, so skip it */
brw->need_flush_throttle = false;
bool front_buffer_dirty;
/** Framerate throttling: @{ */
- drm_intel_bo *first_post_swapbuffers_batch;
+ drm_intel_bo *throttle_batch[2];
/* Limit the number of outstanding SwapBuffers by waiting for an earlier
* frame of rendering to complete. This gives a very precise cap to the
brw_new_batch(struct brw_context *brw)
{
/* Create a new batchbuffer and reset the associated state: */
+ drm_intel_gem_bo_clear_relocs(brw->batch.bo, 0);
intel_batchbuffer_reset(brw);
/* If the kernel supports hardware contexts, then most hardware state is
if (brw->batch.used == 0)
return 0;
- if (brw->first_post_swapbuffers_batch == NULL) {
- brw->first_post_swapbuffers_batch = brw->batch.bo;
- drm_intel_bo_reference(brw->first_post_swapbuffers_batch);
+ if (brw->throttle_batch[0] == NULL) {
+ brw->throttle_batch[0] = brw->batch.bo;
+ drm_intel_bo_reference(brw->throttle_batch[0]);
}
if (unlikely(INTEL_DEBUG & DEBUG_BATCH)) {