use movdqu instead of movdqa for unaligned load avoiding a segfault (bug 10265)
authorRoland Scheidegger <sroland@tungstengraphics.com>
Tue, 13 Mar 2007 12:44:23 +0000 (13:44 +0100)
committerRoland Scheidegger <sroland@tungstengraphics.com>
Tue, 13 Mar 2007 12:44:23 +0000 (13:44 +0100)
src/mesa/x86/read_rgba_span_x86.S

index a69028381644ce93fd8f19a72d2fac7a8799afd1..3cbcd719960e13b7d8739e9cdb8a2404677411fa 100644 (file)
@@ -369,7 +369,7 @@ _generic_read_RGBA_span_BGRA8888_REV_SSE2:
        movdqa  mask, %xmm1
        movdqa  mask+16, %xmm2
  */
-       LOAD_MASK(movdqa,%xmm1,%xmm2)
+       LOAD_MASK(movdqu,%xmm1,%xmm2)
 
        movl    12(%esp), %ebx  /* source pointer */
        movl    20(%esp), %edx  /* number of pixels to copy */