dnl Optional flags, check for compiler support
dnl
AX_CHECK_COMPILE_FLAG([-msse4.1], [SSE41_SUPPORTED=1], [SSE41_SUPPORTED=0])
+if test "x$SSE41_SUPPORTED" = x1; then
+ DEFINES="$DEFINES -DUSE_SSE41"
+fi
AM_CONDITIONAL([SSE41_SUPPORTED], [test x$SSE41_SUPPORTED = x1])
dnl
/**
* "Map" a buffer by copying it to an untiled temporary using MOVNTDQA.
*/
+#if defined(USE_SSE41)
static void
intel_miptree_map_movntdqa(struct brw_context *brw,
struct intel_mipmap_tree *mt,
map->buffer = NULL;
map->ptr = NULL;
}
+#endif
static void
intel_miptree_map_s8(struct brw_context *brw,
mt->bo->size >= brw->max_gtt_map_object_size) {
assert(can_blit_slice(mt, level, slice));
intel_miptree_map_blit(brw, mt, map, level, slice);
+#if defined(USE_SSE41)
} else if (!(mode & GL_MAP_WRITE_BIT) && !mt->compressed && cpu_has_sse4_1) {
intel_miptree_map_movntdqa(brw, mt, map, level, slice);
+#endif
} else {
intel_miptree_map_gtt(brw, mt, map, level, slice);
}
intel_miptree_unmap_depthstencil(brw, mt, map, level, slice);
} else if (map->mt) {
intel_miptree_unmap_blit(brw, mt, map, level, slice);
+#if defined(USE_SSE41)
} else if (map->buffer && cpu_has_sse4_1) {
intel_miptree_unmap_movntdqa(brw, mt, map, level, slice);
+#endif
} else {
intel_miptree_unmap_gtt(brw, mt, map, level, slice);
}