etnaviv: enable texture upload memory throttling
[mesa.git] / src / gallium / drivers / etnaviv / etnaviv_format.c
index 15ad7afd4c1004d770dfd9a216b6f965fcb6e9b4..1e1486de8c0b19218adc7760ca98677731dae738 100644 (file)
 struct etna_format {
    unsigned vtx;
    unsigned tex;
-   unsigned rs;
+   unsigned pe;
    bool present;
-   const unsigned char tex_swiz[4];
 };
 
-#define RS_FORMAT_NONE ~0
+#define PE_FORMAT_NONE ~0
 
-#define RS_FORMAT_MASK        0xf
-#define RS_FORMAT(x)          ((x) & RS_FORMAT_MASK)
-#define RS_FORMAT_RB_SWAP     0x10
+#define PE_FORMAT_MASK        0x7f
+#define PE_FORMAT(x)          ((x) & PE_FORMAT_MASK)
+#define PE_FORMAT_RB_SWAP     0x80
 
-#define RS_FORMAT_X8B8G8R8    (RS_FORMAT_X8R8G8B8 | RS_FORMAT_RB_SWAP)
-#define RS_FORMAT_A8B8G8R8    (RS_FORMAT_A8R8G8B8 | RS_FORMAT_RB_SWAP)
+#define PE_FORMAT_X8B8G8R8    (PE_FORMAT_X8R8G8B8 | PE_FORMAT_RB_SWAP)
+#define PE_FORMAT_A8B8G8R8    (PE_FORMAT_A8R8G8B8 | PE_FORMAT_RB_SWAP)
 
 #define TS_SAMPLER_FORMAT_NONE      ETNA_NO_MATCH
 
-#define SWIZ(x,y,z,w) {    \
-   PIPE_SWIZZLE_##x,       \
-   PIPE_SWIZZLE_##y,       \
-   PIPE_SWIZZLE_##z,       \
-   PIPE_SWIZZLE_##w        \
-}
-
 /* vertex + texture */
-#define VT(pipe, vtxfmt, texfmt, texswiz, rsfmt)          \
+#define VT(pipe, vtxfmt, texfmt, rsfmt)          \
    [PIPE_FORMAT_##pipe] = {                               \
       .vtx = FE_DATA_TYPE_##vtxfmt, \
       .tex = TEXTURE_FORMAT_##texfmt,                     \
-      .rs = RS_FORMAT_##rsfmt,                            \
+      .pe = PE_FORMAT_##rsfmt,                            \
       .present = 1,                                       \
-      .tex_swiz = texswiz,                                \
    }
 
 /* texture-only */
-#define _T(pipe, fmt, swiz, rsfmt) \
+#define _T(pipe, fmt, rsfmt) \
    [PIPE_FORMAT_##pipe] = {        \
       .vtx = ETNA_NO_MATCH,        \
       .tex = TEXTURE_FORMAT_##fmt, \
-      .rs = RS_FORMAT_##rsfmt,     \
+      .pe = PE_FORMAT_##rsfmt,     \
       .present = 1,                \
-      .tex_swiz = swiz,            \
    }
 
 /* vertex-only */
@@ -87,103 +77,107 @@ struct etna_format {
    [PIPE_FORMAT_##pipe] = {                            \
       .vtx = FE_DATA_TYPE_##fmt, \
       .tex = ETNA_NO_MATCH,                            \
-      .rs = RS_FORMAT_##rsfmt,                         \
+      .pe = PE_FORMAT_##rsfmt,                         \
       .present = 1,                                    \
    }
 
 static struct etna_format formats[PIPE_FORMAT_COUNT] = {
    /* 8-bit */
-   VT(R8_UNORM,   UNSIGNED_BYTE, L8, SWIZ(X, 0, 0, 1), NONE),
-   V_(R8_SNORM,   BYTE,          NONE),
-   V_(R8_UINT,    UNSIGNED_BYTE, NONE),
-   V_(R8_SINT,    BYTE,          NONE),
+   VT(R8_UNORM,   UNSIGNED_BYTE, L8,                        R8),
+   VT(R8_SNORM,   BYTE,          EXT_R8_SNORM | EXT_FORMAT, NONE),
+   VT(R8_UINT,    BYTE_I,        EXT_R8I | EXT_FORMAT,      R8I),
+   VT(R8_SINT,    BYTE_I,        EXT_R8I | EXT_FORMAT,      R8I),
    V_(R8_USCALED, UNSIGNED_BYTE, NONE),
    V_(R8_SSCALED, BYTE,          NONE),
 
-   _T(A8_UNORM, A8, SWIZ(X, Y, Z, W), NONE),
-   _T(L8_UNORM, L8, SWIZ(X, Y, Z, W), NONE),
-   _T(I8_UNORM, I8, SWIZ(X, Y, Z, W), NONE),
+   _T(A8_UNORM, A8, NONE),
+   _T(L8_UNORM, L8, NONE),
+   _T(I8_UNORM, I8, NONE),
 
    /* 16-bit */
    V_(R16_UNORM,   UNSIGNED_SHORT, NONE),
    V_(R16_SNORM,   SHORT,          NONE),
-   V_(R16_UINT,    UNSIGNED_SHORT, NONE),
-   V_(R16_SINT,    SHORT,          NONE),
+   VT(R16_UINT,    SHORT_I,        EXT_R16I | EXT_FORMAT, R16I),
+   VT(R16_SINT,    SHORT_I,        EXT_R16I | EXT_FORMAT, R16I),
    V_(R16_USCALED, UNSIGNED_SHORT, NONE),
    V_(R16_SSCALED, SHORT,          NONE),
-   V_(R16_FLOAT,   HALF_FLOAT,     NONE),
+   VT(R16_FLOAT,   HALF_FLOAT,     EXT_R16F | EXT_FORMAT, R16F),
 
-   _T(B4G4R4A4_UNORM, A4R4G4B4, SWIZ(X, Y, Z, W), A4R4G4B4),
-   _T(B4G4R4X4_UNORM, X4R4G4B4, SWIZ(X, Y, Z, W), X4R4G4B4),
+   _T(B4G4R4A4_UNORM, A4R4G4B4, A4R4G4B4),
+   _T(B4G4R4X4_UNORM, X4R4G4B4, X4R4G4B4),
 
-   _T(L8A8_UNORM, A8L8, SWIZ(X, Y, Z, W), NONE),
+   _T(L8A8_UNORM, A8L8, NONE),
 
-   _T(Z16_UNORM,      D16,      SWIZ(X, Y, Z, W), A4R4G4B4),
-   _T(B5G6R5_UNORM,   R5G6B5,   SWIZ(X, Y, Z, W), R5G6B5),
-   _T(B5G5R5A1_UNORM, A1R5G5B5, SWIZ(X, Y, Z, W), A1R5G5B5),
-   _T(B5G5R5X1_UNORM, X1R5G5B5, SWIZ(X, Y, Z, W), X1R5G5B5),
+   _T(Z16_UNORM,      D16,      NONE),
+   _T(B5G6R5_UNORM,   R5G6B5,   R5G6B5),
+   _T(B5G5R5A1_UNORM, A1R5G5B5, A1R5G5B5),
+   _T(B5G5R5X1_UNORM, X1R5G5B5, X1R5G5B5),
 
-   VT(R8G8_UNORM,   UNSIGNED_BYTE,  EXT_G8R8 | EXT_FORMAT, SWIZ(X, Y, 0, 1), NONE),
-   V_(R8G8_SNORM,   BYTE,           NONE),
-   V_(R8G8_UINT,    UNSIGNED_BYTE,  NONE),
-   V_(R8G8_SINT,    BYTE,           NONE),
+   VT(R8G8_UNORM,   UNSIGNED_BYTE,  EXT_G8R8 | EXT_FORMAT,       G8R8),
+   VT(R8G8_SNORM,   BYTE,           EXT_G8R8_SNORM | EXT_FORMAT, NONE),
+   VT(R8G8_UINT,    BYTE_I,         EXT_G8R8I | EXT_FORMAT,      G8R8I),
+   VT(R8G8_SINT,    BYTE_I,         EXT_G8R8I | EXT_FORMAT,      G8R8I),
    V_(R8G8_USCALED, UNSIGNED_BYTE,  NONE),
    V_(R8G8_SSCALED, BYTE,           NONE),
 
    /* 24-bit */
    V_(R8G8B8_UNORM,   UNSIGNED_BYTE, NONE),
    V_(R8G8B8_SNORM,   BYTE,          NONE),
-   V_(R8G8B8_UINT,    UNSIGNED_BYTE, NONE),
-   V_(R8G8B8_SINT,    BYTE,          NONE),
+   V_(R8G8B8_UINT,    BYTE_I,        NONE),
+   V_(R8G8B8_SINT,    BYTE_I,        NONE),
    V_(R8G8B8_USCALED, UNSIGNED_BYTE, NONE),
    V_(R8G8B8_SSCALED, BYTE,          NONE),
 
    /* 32-bit */
    V_(R32_UNORM,   UNSIGNED_INT, NONE),
    V_(R32_SNORM,   INT,          NONE),
-   V_(R32_SINT,    INT,          NONE),
-   V_(R32_UINT,    UNSIGNED_INT, NONE),
+   VT(R32_SINT,    FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
+   VT(R32_UINT,    FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
    V_(R32_USCALED, UNSIGNED_INT, NONE),
    V_(R32_SSCALED, INT,          NONE),
-   V_(R32_FLOAT,   FLOAT,        NONE),
+   VT(R32_FLOAT,   FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
    V_(R32_FIXED,   FIXED,        NONE),
 
    V_(R16G16_UNORM,   UNSIGNED_SHORT, NONE),
    V_(R16G16_SNORM,   SHORT,          NONE),
-   V_(R16G16_UINT,    UNSIGNED_SHORT, NONE),
-   V_(R16G16_SINT,    SHORT,          NONE),
+   VT(R16G16_UINT,    SHORT_I,        EXT_G16R16I | EXT_FORMAT, G16R16I),
+   VT(R16G16_SINT,    SHORT_I,        EXT_G16R16I | EXT_FORMAT, G16R16I),
    V_(R16G16_USCALED, UNSIGNED_SHORT, NONE),
    V_(R16G16_SSCALED, SHORT,          NONE),
-   V_(R16G16_FLOAT,   HALF_FLOAT,     NONE),
+   VT(R16G16_FLOAT,   HALF_FLOAT,     EXT_G16R16F | EXT_FORMAT, G16R16F),
 
    V_(A8B8G8R8_UNORM,   UNSIGNED_BYTE, NONE),
 
-   VT(R8G8B8A8_UNORM,   UNSIGNED_BYTE, A8B8G8R8, SWIZ(X, Y, Z, W), A8B8G8R8),
-   V_(R8G8B8A8_SNORM,   BYTE,          A8B8G8R8),
-   _T(R8G8B8X8_UNORM,   X8B8G8R8,      SWIZ(X, Y, Z, W), X8B8G8R8),
-   V_(R8G8B8A8_UINT,    UNSIGNED_BYTE, A8B8G8R8),
-   V_(R8G8B8A8_SINT,    BYTE,          A8B8G8R8),
+   VT(R8G8B8A8_UNORM,   UNSIGNED_BYTE, A8B8G8R8, A8B8G8R8),
+   VT(R8G8B8A8_SNORM,   BYTE,          EXT_A8B8G8R8_SNORM | EXT_FORMAT, NONE),
+   _T(R8G8B8X8_UNORM,   X8B8G8R8,      X8B8G8R8),
+   _T(R8G8B8X8_SNORM,                  EXT_X8B8G8R8_SNORM | EXT_FORMAT, NONE),
+   VT(R8G8B8A8_UINT,    BYTE_I,        EXT_A8B8G8R8I | EXT_FORMAT,      A8B8G8R8I),
+   VT(R8G8B8A8_SINT,    BYTE_I,        EXT_A8B8G8R8I | EXT_FORMAT,      A8B8G8R8I),
    V_(R8G8B8A8_USCALED, UNSIGNED_BYTE, A8B8G8R8),
    V_(R8G8B8A8_SSCALED, BYTE,          A8B8G8R8),
 
-   _T(B8G8R8A8_UNORM, A8R8G8B8, SWIZ(X, Y, Z, W), A8R8G8B8),
-   _T(B8G8R8X8_UNORM, X8R8G8B8, SWIZ(X, Y, Z, W), X8R8G8B8),
-   _T(B8G8R8A8_SRGB,  A8R8G8B8, SWIZ(X, Y, Z, W), A8R8G8B8),
-   _T(B8G8R8X8_SRGB,  X8R8G8B8, SWIZ(X, Y, Z, W), X8R8G8B8),
+   _T(B8G8R8A8_UNORM, A8R8G8B8, A8R8G8B8),
+   _T(B8G8R8X8_UNORM, X8R8G8B8, X8R8G8B8),
 
-   V_(R10G10B10A2_UNORM,   UNSIGNED_INT_10_10_10_2, NONE),
-   V_(R10G10B10A2_SNORM,   INT_10_10_10_2,          NONE),
-   V_(R10G10B10A2_USCALED, UNSIGNED_INT_10_10_10_2, NONE),
-   V_(R10G10B10A2_SSCALED, INT_10_10_10_2,          NONE),
+   VT(R10G10B10A2_UNORM,   UNSIGNED_INT_2_10_10_10_REV, EXT_A2B10G10R10 | EXT_FORMAT, A2B10G10R10),
+   _T(R10G10B10X2_UNORM,                                EXT_A2B10G10R10 | EXT_FORMAT, A2B10G10R10),
+   V_(R10G10B10A2_SNORM,   INT_2_10_10_10_REV,          NONE),
+   _T(R10G10B10A2_UINT,                                 EXT_A2B10G10R10UI | EXT_FORMAT, A2B10G10R10UI),
+   V_(R10G10B10A2_USCALED, UNSIGNED_INT_2_10_10_10_REV, NONE),
+   V_(R10G10B10A2_SSCALED, INT_2_10_10_10_REV,          NONE),
 
-   _T(X8Z24_UNORM,       D24X8, SWIZ(X, Y, Z, W), A8R8G8B8),
-   _T(S8_UINT_Z24_UNORM, D24X8, SWIZ(X, Y, Z, W), A8R8G8B8),
+   _T(X8Z24_UNORM,       D24X8, NONE),
+   _T(S8_UINT_Z24_UNORM, D24X8, NONE),
+
+   _T(R9G9B9E5_FLOAT,  E5B9G9R9,                    NONE),
+   _T(R11G11B10_FLOAT, EXT_B10G11R11F | EXT_FORMAT, B10G11R11F),
 
    /* 48-bit */
    V_(R16G16B16_UNORM,   UNSIGNED_SHORT, NONE),
    V_(R16G16B16_SNORM,   SHORT,          NONE),
-   V_(R16G16B16_UINT,    UNSIGNED_SHORT, NONE),
-   V_(R16G16B16_SINT,    SHORT,          NONE),
+   V_(R16G16B16_UINT,    SHORT_I,        NONE),
+   V_(R16G16B16_SINT,    SHORT_I,        NONE),
    V_(R16G16B16_USCALED, UNSIGNED_SHORT, NONE),
    V_(R16G16B16_SSCALED, SHORT,          NONE),
    V_(R16G16B16_FLOAT,   HALF_FLOAT,     NONE),
@@ -191,26 +185,26 @@ static struct etna_format formats[PIPE_FORMAT_COUNT] = {
    /* 64-bit */
    V_(R16G16B16A16_UNORM,   UNSIGNED_SHORT, NONE),
    V_(R16G16B16A16_SNORM,   SHORT,          NONE),
-   V_(R16G16B16A16_UINT,    UNSIGNED_SHORT, NONE),
-   V_(R16G16B16A16_SINT,    SHORT,          NONE),
+   VT(R16G16B16A16_UINT,    SHORT_I,        EXT_A16B16G16R16I | EXT_FORMAT, A16B16G16R16I),
+   VT(R16G16B16A16_SINT,    SHORT_I,        EXT_A16B16G16R16I | EXT_FORMAT, A16B16G16R16I),
    V_(R16G16B16A16_USCALED, UNSIGNED_SHORT, NONE),
    V_(R16G16B16A16_SSCALED, SHORT,          NONE),
-   V_(R16G16B16A16_FLOAT,   HALF_FLOAT,     NONE),
+   VT(R16G16B16A16_FLOAT,   HALF_FLOAT,     EXT_A16B16G16R16F | EXT_FORMAT, A16B16G16R16F),
 
    V_(R32G32_UNORM,   UNSIGNED_INT, NONE),
    V_(R32G32_SNORM,   INT,          NONE),
-   V_(R32G32_UINT,    UNSIGNED_INT, NONE),
-   V_(R32G32_SINT,    INT,          NONE),
+   VT(R32G32_UINT,    FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
+   VT(R32G32_SINT,    FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
    V_(R32G32_USCALED, UNSIGNED_INT, NONE),
    V_(R32G32_SSCALED, INT,          NONE),
-   V_(R32G32_FLOAT,   FLOAT,        NONE),
+   VT(R32G32_FLOAT,   FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
    V_(R32G32_FIXED,   FIXED,        NONE),
 
    /* 96-bit */
    V_(R32G32B32_UNORM,   UNSIGNED_INT, NONE),
    V_(R32G32B32_SNORM,   INT,          NONE),
-   V_(R32G32B32_UINT,    UNSIGNED_INT, NONE),
-   V_(R32G32B32_SINT,    INT,          NONE),
+   V_(R32G32B32_UINT,    FLOAT,        NONE),
+   V_(R32G32B32_SINT,    FLOAT,        NONE),
    V_(R32G32B32_USCALED, UNSIGNED_INT, NONE),
    V_(R32G32B32_SSCALED, INT,          NONE),
    V_(R32G32B32_FLOAT,   FLOAT,        NONE),
@@ -219,73 +213,54 @@ static struct etna_format formats[PIPE_FORMAT_COUNT] = {
    /* 128-bit */
    V_(R32G32B32A32_UNORM,   UNSIGNED_INT, NONE),
    V_(R32G32B32A32_SNORM,   INT,          NONE),
-   V_(R32G32B32A32_UINT,    UNSIGNED_INT, NONE),
-   V_(R32G32B32A32_SINT,    INT,          NONE),
+   V_(R32G32B32A32_UINT,    FLOAT,        NONE),
+   V_(R32G32B32A32_SINT,    FLOAT,        NONE),
    V_(R32G32B32A32_USCALED, UNSIGNED_INT, NONE),
    V_(R32G32B32A32_SSCALED, INT,          NONE),
    V_(R32G32B32A32_FLOAT,   FLOAT,        NONE),
    V_(R32G32B32A32_FIXED,   FIXED,        NONE),
 
    /* compressed */
-   _T(ETC1_RGB8, ETC1, SWIZ(X, Y, Z, W), NONE),
-
-   _T(DXT1_RGB,  DXT1,      SWIZ(X, Y, Z, W), NONE),
-   _T(DXT1_SRGBA,DXT1,      SWIZ(X, Y, Z, W), NONE),
-   _T(DXT1_RGBA, DXT1,      SWIZ(X, Y, Z, W), NONE),
-   _T(DXT3_SRGBA,DXT2_DXT3, SWIZ(X, Y, Z, W), NONE),
-   _T(DXT3_RGBA, DXT2_DXT3, SWIZ(X, Y, Z, W), NONE),
-   _T(DXT5_SRGBA,DXT4_DXT5, SWIZ(X, Y, Z, W), NONE),
-   _T(DXT5_RGBA, DXT4_DXT5, SWIZ(X, Y, Z, W), NONE),
-
-   _T(ETC2_RGB8,       EXT_NONE | EXT_FORMAT,                          SWIZ(X, Y, Z, W), NONE), /* Extd. format NONE doubles as ETC2_RGB8 */
-   _T(ETC2_SRGB8,      EXT_NONE | EXT_FORMAT,                          SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_RGB8A1,     EXT_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 | EXT_FORMAT, SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_SRGB8A1,    EXT_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 | EXT_FORMAT, SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_RGBA8,      EXT_RGBA8_ETC2_EAC | EXT_FORMAT,                SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_SRGBA8,     EXT_RGBA8_ETC2_EAC | EXT_FORMAT,                SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_R11_UNORM,  EXT_R11_EAC | EXT_FORMAT,                       SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_R11_SNORM,  EXT_SIGNED_R11_EAC | EXT_FORMAT,                SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_RG11_UNORM, EXT_RG11_EAC | EXT_FORMAT,                      SWIZ(X, Y, Z, W), NONE),
-   _T(ETC2_RG11_SNORM, EXT_SIGNED_RG11_EAC | EXT_FORMAT,               SWIZ(X, Y, Z, W), NONE),
-
-   _T(ASTC_4x4,        ASTC_RGBA_4x4 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_5x4,        ASTC_RGBA_5x4 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_5x5,        ASTC_RGBA_5x5 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_6x5,        ASTC_RGBA_6x5 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_6x6,        ASTC_RGBA_6x6 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x5,        ASTC_RGBA_8x5 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x6,        ASTC_RGBA_8x6 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x8,        ASTC_RGBA_8x8 | ASTC_FORMAT,                    SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x5,       ASTC_RGBA_10x5 | ASTC_FORMAT,                   SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x6,       ASTC_RGBA_10x6 | ASTC_FORMAT,                   SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x8,       ASTC_RGBA_10x8 | ASTC_FORMAT,                   SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x10,      ASTC_RGBA_10x10 | ASTC_FORMAT,                  SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_12x10,      ASTC_RGBA_12x10 | ASTC_FORMAT,                  SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_12x12,      ASTC_RGBA_12x12 | ASTC_FORMAT,                  SWIZ(X, Y, Z, W), NONE),
-
-   _T(ASTC_4x4_SRGB,   ASTC_SRGB8_ALPHA8_4x4 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_5x4_SRGB,   ASTC_SRGB8_ALPHA8_5x4 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_5x5_SRGB,   ASTC_SRGB8_ALPHA8_5x5 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_6x5_SRGB,   ASTC_SRGB8_ALPHA8_6x5 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_6x6_SRGB,   ASTC_SRGB8_ALPHA8_6x6 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x5_SRGB,   ASTC_SRGB8_ALPHA8_8x5 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x6_SRGB,   ASTC_SRGB8_ALPHA8_8x6 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_8x8_SRGB,   ASTC_SRGB8_ALPHA8_8x8 | ASTC_FORMAT,            SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x5_SRGB,  ASTC_SRGB8_ALPHA8_10x5 | ASTC_FORMAT,           SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x6_SRGB,  ASTC_SRGB8_ALPHA8_10x6 | ASTC_FORMAT,           SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x8_SRGB,  ASTC_SRGB8_ALPHA8_10x8 | ASTC_FORMAT,           SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_10x10_SRGB, ASTC_SRGB8_ALPHA8_10x10 | ASTC_FORMAT,          SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_12x10_SRGB, ASTC_SRGB8_ALPHA8_12x10 | ASTC_FORMAT,          SWIZ(X, Y, Z, W), NONE),
-   _T(ASTC_12x12_SRGB, ASTC_SRGB8_ALPHA8_12x12 | ASTC_FORMAT,          SWIZ(X, Y, Z, W), NONE),
+   _T(ETC1_RGB8, ETC1, NONE),
+
+   _T(DXT1_RGB,  DXT1,      NONE),
+   _T(DXT1_RGBA, DXT1,      NONE),
+   _T(DXT3_RGBA, DXT2_DXT3, NONE),
+   _T(DXT5_RGBA, DXT4_DXT5, NONE),
+
+   _T(ETC2_RGB8,       EXT_NONE | EXT_FORMAT,                          NONE), /* Extd. format NONE doubles as ETC2_RGB8 */
+   _T(ETC2_RGB8A1,     EXT_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 | EXT_FORMAT, NONE),
+   _T(ETC2_RGBA8,      EXT_RGBA8_ETC2_EAC | EXT_FORMAT,                NONE),
+   _T(ETC2_R11_UNORM,  EXT_R11_EAC | EXT_FORMAT,                       NONE),
+   _T(ETC2_R11_SNORM,  EXT_SIGNED_R11_EAC | EXT_FORMAT,                NONE),
+   _T(ETC2_RG11_UNORM, EXT_RG11_EAC | EXT_FORMAT,                      NONE),
+   _T(ETC2_RG11_SNORM, EXT_SIGNED_RG11_EAC | EXT_FORMAT,               NONE),
+
+   _T(ASTC_4x4,        ASTC_RGBA_4x4 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_5x4,        ASTC_RGBA_5x4 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_5x5,        ASTC_RGBA_5x5 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_6x5,        ASTC_RGBA_6x5 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_6x6,        ASTC_RGBA_6x6 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_8x5,        ASTC_RGBA_8x5 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_8x6,        ASTC_RGBA_8x6 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_8x8,        ASTC_RGBA_8x8 | ASTC_FORMAT,                    NONE),
+   _T(ASTC_10x5,       ASTC_RGBA_10x5 | ASTC_FORMAT,                   NONE),
+   _T(ASTC_10x6,       ASTC_RGBA_10x6 | ASTC_FORMAT,                   NONE),
+   _T(ASTC_10x8,       ASTC_RGBA_10x8 | ASTC_FORMAT,                   NONE),
+   _T(ASTC_10x10,      ASTC_RGBA_10x10 | ASTC_FORMAT,                  NONE),
+   _T(ASTC_12x10,      ASTC_RGBA_12x10 | ASTC_FORMAT,                  NONE),
+   _T(ASTC_12x12,      ASTC_RGBA_12x12 | ASTC_FORMAT,                  NONE),
 
    /* YUV */
-   _T(YUYV, YUY2, SWIZ(X, Y, Z, W), YUY2),
-   _T(UYVY, UYVY, SWIZ(X, Y, Z, W), NONE),
+   _T(YUYV, YUY2, YUY2),
+   _T(UYVY, UYVY, NONE),
 };
 
 uint32_t
 translate_texture_format(enum pipe_format fmt)
 {
+   fmt = util_format_linear(fmt);
+
    if (!formats[fmt].present)
       return ETNA_NO_MATCH;
 
@@ -293,15 +268,43 @@ translate_texture_format(enum pipe_format fmt)
 }
 
 bool
-texture_format_needs_swiz(enum pipe_format fmt)
+texture_use_int_filter(const struct pipe_sampler_view *so, bool tex_desc)
 {
-   static const unsigned char def[4] = SWIZ(X, Y, Z, W);
-   bool swiz = false;
+   switch (so->target) {
+   case PIPE_TEXTURE_1D_ARRAY:
+   case PIPE_TEXTURE_2D_ARRAY:
+      if (tex_desc)
+         break;
+   case PIPE_TEXTURE_3D:
+      return false;
+   default:
+      break;
+   }
 
-   if (formats[fmt].present)
-      swiz = !!memcmp(def, formats[fmt].tex_swiz, sizeof(formats[fmt].tex_swiz));
+   /* only unorm formats can use int filter */
+   if (!util_format_is_unorm(so->format))
+      return false;
+
+   if (util_format_is_srgb(so->format))
+      return false;
+
+   switch (so->format) {
+   /* apparently D16 can't use int filter but D24 can */
+   case PIPE_FORMAT_Z16_UNORM:
+   case PIPE_FORMAT_R10G10B10A2_UNORM:
+   case PIPE_FORMAT_R10G10B10X2_UNORM:
+   case PIPE_FORMAT_ETC2_R11_UNORM:
+   case PIPE_FORMAT_ETC2_RG11_UNORM:
+      return false;
+   default:
+      return true;
+   }
+}
 
-   return swiz;
+bool
+texture_format_needs_swiz(enum pipe_format fmt)
+{
+   return util_format_linear(fmt) == PIPE_FORMAT_R8_UNORM;
 }
 
 uint32_t
@@ -310,10 +313,15 @@ get_texture_swiz(enum pipe_format fmt, unsigned swizzle_r,
 {
    unsigned char swiz[4] = {
       swizzle_r, swizzle_g, swizzle_b, swizzle_a,
-   }, rswiz[4];
-
-   assert(formats[fmt].present);
-   util_format_compose_swizzles(formats[fmt].tex_swiz, swiz, rswiz);
+   };
+
+   if (util_format_linear(fmt) == PIPE_FORMAT_R8_UNORM) {
+      /* R8 is emulated with L8, needs yz channels set to zero */
+      for (unsigned i = 0; i < 4; i++) {
+         if (swiz[i] == PIPE_SWIZZLE_Y || swiz[i] == PIPE_SWIZZLE_Z)
+            swiz[i] = PIPE_SWIZZLE_0;
+      }
+   }
 
    /* PIPE_SWIZZLE_ maps 1:1 to TEXTURE_SWIZZLE_ */
    STATIC_ASSERT(PIPE_SWIZZLE_X == TEXTURE_SWIZZLE_RED);
@@ -323,30 +331,33 @@ get_texture_swiz(enum pipe_format fmt, unsigned swizzle_r,
    STATIC_ASSERT(PIPE_SWIZZLE_0 == TEXTURE_SWIZZLE_ZERO);
    STATIC_ASSERT(PIPE_SWIZZLE_1 == TEXTURE_SWIZZLE_ONE);
 
-   return VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_R(rswiz[0]) |
-          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_G(rswiz[1]) |
-          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_B(rswiz[2]) |
-          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_A(rswiz[3]);
+   return VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_R(swiz[0]) |
+          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_G(swiz[1]) |
+          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_B(swiz[2]) |
+          VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_A(swiz[3]);
 }
 
 uint32_t
-translate_rs_format(enum pipe_format fmt)
+translate_pe_format(enum pipe_format fmt)
 {
+   fmt = util_format_linear(fmt);
+
    if (!formats[fmt].present)
       return ETNA_NO_MATCH;
 
-   if (formats[fmt].rs == ETNA_NO_MATCH)
+   if (formats[fmt].pe == ETNA_NO_MATCH)
       return ETNA_NO_MATCH;
 
-   return RS_FORMAT(formats[fmt].rs);
+   return PE_FORMAT(formats[fmt].pe);
 }
 
 int
-translate_rs_format_rb_swap(enum pipe_format fmt)
+translate_pe_format_rb_swap(enum pipe_format fmt)
 {
+   fmt = util_format_linear(fmt);
    assert(formats[fmt].present);
 
-   return formats[fmt].rs & RS_FORMAT_RB_SWAP;
+   return formats[fmt].pe & PE_FORMAT_RB_SWAP;
 }
 
 /* Return type flags for vertex element format */