vk/cmd_buffer: Move batch buffer padding to anv_batch_bo_finish()
authorJason Ekstrand <jason.ekstrand@intel.com>
Thu, 16 Jul 2015 00:10:58 +0000 (17:10 -0700)
committerJason Ekstrand <jason.ekstrand@intel.com>
Thu, 16 Jul 2015 00:11:04 +0000 (17:11 -0700)
src/vulkan/anv_cmd_buffer.c

index 8ccecd8f63b11f4f702bbd54217a970caed771df..0d24d0bda028c6ab151e46af234598de46553a8d 100644 (file)
@@ -241,6 +241,10 @@ anv_batch_bo_start(struct anv_batch_bo *bbo, struct anv_batch *batch,
 static void
 anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
 {
+   /* Round batch up to an even number of dwords. */
+   if ((batch->next - batch->start) & 4)
+      anv_batch_emit(batch, GEN8_MI_NOOP);
+
    assert(batch->start == bbo->bo.map);
    bbo->length = batch->next - batch->start;
    VG(VALGRIND_CHECK_MEM_IS_DEFINED(batch->start, bbo->length));
@@ -283,12 +287,6 @@ anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
       .BatchBufferStartAddress = { &new_bbo->bo, 0 },
    );
 
-   /* Pad out to a 2-dword aligned boundary with zeros */
-   if ((uintptr_t)batch->next % 8 != 0) {
-      *(uint32_t *)batch->next = 0;
-      batch->next += 4;
-   }
-
    anv_batch_bo_finish(cmd_buffer->last_batch_bo, batch);
 
    new_bbo->prev_batch_bo = old_bbo;
@@ -607,10 +605,6 @@ VkResult anv_EndCommandBuffer(
 
    anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
 
-   /* Round batch up to an even number of dwords. */
-   if ((batch->next - batch->start) & 4)
-      anv_batch_emit(batch, GEN8_MI_NOOP);
-
    anv_batch_bo_finish(cmd_buffer->last_batch_bo, &cmd_buffer->batch);
    cmd_buffer->surface_batch_bo->num_relocs =
       cmd_buffer->surface_relocs.num_relocs - cmd_buffer->surface_batch_bo->first_reloc;