2 * Copyright (c) 2016 Etnaviv Project
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sub license,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the
12 * next paragraph) shall be included in all copies or substantial portions
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 * Christian Gmeiner <christian.gmeiner@gmail.com>
27 #include "etnaviv_format.h"
29 #include "hw/state.xml.h"
30 #include "hw/state_3d.xml.h"
32 #include "pipe/p_defines.h"
34 /* Specifies the table of all the formats and their features. Also supplies
35 * the helpers that look up various data in those tables.
45 #define RS_FORMAT_NONE ~0
47 #define RS_FORMAT_MASK 0xf
48 #define RS_FORMAT(x) ((x) & RS_FORMAT_MASK)
49 #define RS_FORMAT_RB_SWAP 0x10
51 #define RS_FORMAT_X8B8G8R8 (RS_FORMAT_X8R8G8B8 | RS_FORMAT_RB_SWAP)
52 #define RS_FORMAT_A8B8G8R8 (RS_FORMAT_A8R8G8B8 | RS_FORMAT_RB_SWAP)
54 /* vertex + texture */
55 #define VT(pipe, vtxfmt, texfmt, rsfmt) \
56 [PIPE_FORMAT_##pipe] = { \
57 .vtx = VIVS_FE_VERTEX_ELEMENT_CONFIG_TYPE_##vtxfmt, \
58 .tex = TEXTURE_FORMAT_##texfmt, \
59 .rs = RS_FORMAT_##rsfmt, \
64 #define _T(pipe, fmt, rsfmt) \
65 [PIPE_FORMAT_##pipe] = { \
66 .vtx = ETNA_NO_MATCH, \
67 .tex = TEXTURE_FORMAT_##fmt, \
68 .rs = RS_FORMAT_##rsfmt, \
73 #define V_(pipe, fmt, rsfmt) \
74 [PIPE_FORMAT_##pipe] = { \
75 .vtx = VIVS_FE_VERTEX_ELEMENT_CONFIG_TYPE_##fmt, \
76 .tex = ETNA_NO_MATCH, \
77 .rs = RS_FORMAT_##rsfmt, \
81 static struct etna_format formats
[PIPE_FORMAT_COUNT
] = {
83 V_(R8_UNORM
, UNSIGNED_BYTE
, NONE
),
84 V_(R8_SNORM
, BYTE
, NONE
),
85 V_(R8_UINT
, UNSIGNED_BYTE
, NONE
),
86 V_(R8_SINT
, BYTE
, NONE
),
87 V_(R8_USCALED
, UNSIGNED_BYTE
, NONE
),
88 V_(R8_SSCALED
, BYTE
, NONE
),
90 _T(A8_UNORM
, A8
, NONE
),
91 _T(L8_UNORM
, L8
, NONE
),
92 _T(I8_UNORM
, I8
, NONE
),
95 V_(R16_UNORM
, UNSIGNED_SHORT
, NONE
),
96 V_(R16_SNORM
, SHORT
, NONE
),
97 V_(R16_UINT
, UNSIGNED_SHORT
, NONE
),
98 V_(R16_SINT
, SHORT
, NONE
),
99 V_(R16_USCALED
, UNSIGNED_SHORT
, NONE
),
100 V_(R16_SSCALED
, SHORT
, NONE
),
101 V_(R16_FLOAT
, HALF_FLOAT
, NONE
),
103 _T(B4G4R4A4_UNORM
, A4R4G4B4
, A4R4G4B4
),
104 _T(B4G4R4X4_UNORM
, X4R4G4B4
, X4R4G4B4
),
106 _T(L8A8_UNORM
, A8L8
, NONE
),
108 _T(Z16_UNORM
, D16
, A4R4G4B4
),
109 _T(B5G6R5_UNORM
, R5G6B5
, R5G6B5
),
110 _T(B5G5R5A1_UNORM
, A1R5G5B5
, A1R5G5B5
),
111 _T(B5G5R5X1_UNORM
, X1R5G5B5
, X1R5G5B5
),
113 V_(R8G8_UNORM
, UNSIGNED_BYTE
, NONE
),
114 V_(R8G8_SNORM
, BYTE
, NONE
),
115 V_(R8G8_UINT
, UNSIGNED_BYTE
, NONE
),
116 V_(R8G8_SINT
, BYTE
, NONE
),
117 V_(R8G8_USCALED
, UNSIGNED_BYTE
, NONE
),
118 V_(R8G8_SSCALED
, BYTE
, NONE
),
121 V_(R8G8B8_UNORM
, UNSIGNED_BYTE
, NONE
),
122 V_(R8G8B8_SNORM
, BYTE
, NONE
),
123 V_(R8G8B8_UINT
, UNSIGNED_BYTE
, NONE
),
124 V_(R8G8B8_SINT
, BYTE
, NONE
),
125 V_(R8G8B8_USCALED
, UNSIGNED_BYTE
, NONE
),
126 V_(R8G8B8_SSCALED
, BYTE
, NONE
),
129 V_(R32_UNORM
, UNSIGNED_INT
, NONE
),
130 V_(R32_SNORM
, INT
, NONE
),
131 V_(R32_SINT
, INT
, NONE
),
132 V_(R32_UINT
, UNSIGNED_INT
, NONE
),
133 V_(R32_USCALED
, UNSIGNED_INT
, NONE
),
134 V_(R32_SSCALED
, INT
, NONE
),
135 V_(R32_FLOAT
, FLOAT
, NONE
),
136 V_(R32_FIXED
, FIXED
, NONE
),
138 V_(R16G16_UNORM
, UNSIGNED_SHORT
, NONE
),
139 V_(R16G16_SNORM
, SHORT
, NONE
),
140 V_(R16G16_UINT
, UNSIGNED_SHORT
, NONE
),
141 V_(R16G16_SINT
, SHORT
, NONE
),
142 V_(R16G16_USCALED
, UNSIGNED_SHORT
, NONE
),
143 V_(R16G16_SSCALED
, SHORT
, NONE
),
144 V_(R16G16_FLOAT
, HALF_FLOAT
, NONE
),
146 V_(A8B8G8R8_UNORM
, UNSIGNED_BYTE
, NONE
),
148 V_(R8G8B8A8_UNORM
, UNSIGNED_BYTE
, A8B8G8R8
),
149 V_(R8G8B8A8_SNORM
, BYTE
, A8B8G8R8
),
150 _T(R8G8B8X8_UNORM
, X8B8G8R8
, X8B8G8R8
),
151 V_(R8G8B8A8_UINT
, UNSIGNED_BYTE
, A8B8G8R8
),
152 V_(R8G8B8A8_SINT
, BYTE
, A8B8G8R8
),
153 V_(R8G8B8A8_USCALED
, UNSIGNED_BYTE
, A8B8G8R8
),
154 V_(R8G8B8A8_SSCALED
, BYTE
, A8B8G8R8
),
156 _T(R8G8B8A8_UNORM
, A8B8G8R8
, A8B8G8R8
),
157 _T(R8G8B8X8_UNORM
, X8B8G8R8
, X8B8G8R8
),
159 _T(B8G8R8A8_UNORM
, A8R8G8B8
, A8R8G8B8
),
160 _T(B8G8R8X8_UNORM
, X8R8G8B8
, X8R8G8B8
),
162 V_(R10G10B10A2_UNORM
, UNSIGNED_INT_10_10_10_2
, NONE
),
163 V_(R10G10B10A2_SNORM
, INT_10_10_10_2
, NONE
),
164 V_(R10G10B10A2_USCALED
, UNSIGNED_INT_10_10_10_2
, NONE
),
165 V_(R10G10B10A2_SSCALED
, INT_10_10_10_2
, NONE
),
167 _T(X8Z24_UNORM
, D24S8
, A8R8G8B8
),
168 _T(S8_UINT_Z24_UNORM
, D24S8
, A8R8G8B8
),
171 V_(R16G16B16_UNORM
, UNSIGNED_SHORT
, NONE
),
172 V_(R16G16B16_SNORM
, SHORT
, NONE
),
173 V_(R16G16B16_UINT
, UNSIGNED_SHORT
, NONE
),
174 V_(R16G16B16_SINT
, SHORT
, NONE
),
175 V_(R16G16B16_USCALED
, UNSIGNED_SHORT
, NONE
),
176 V_(R16G16B16_SSCALED
, SHORT
, NONE
),
177 V_(R16G16B16_FLOAT
, HALF_FLOAT
, NONE
),
180 V_(R16G16B16A16_UNORM
, UNSIGNED_SHORT
, NONE
),
181 V_(R16G16B16A16_SNORM
, SHORT
, NONE
),
182 V_(R16G16B16A16_UINT
, UNSIGNED_SHORT
, NONE
),
183 V_(R16G16B16A16_SINT
, SHORT
, NONE
),
184 V_(R16G16B16A16_USCALED
, UNSIGNED_SHORT
, NONE
),
185 V_(R16G16B16A16_SSCALED
, SHORT
, NONE
),
186 V_(R16G16B16A16_FLOAT
, HALF_FLOAT
, NONE
),
188 V_(R32G32_UNORM
, UNSIGNED_INT
, NONE
),
189 V_(R32G32_SNORM
, INT
, NONE
),
190 V_(R32G32_UINT
, UNSIGNED_INT
, NONE
),
191 V_(R32G32_SINT
, INT
, NONE
),
192 V_(R32G32_USCALED
, UNSIGNED_INT
, NONE
),
193 V_(R32G32_SSCALED
, INT
, NONE
),
194 V_(R32G32_FLOAT
, FLOAT
, NONE
),
195 V_(R32G32_FIXED
, FIXED
, NONE
),
198 V_(R32G32B32_UNORM
, UNSIGNED_INT
, NONE
),
199 V_(R32G32B32_SNORM
, INT
, NONE
),
200 V_(R32G32B32_UINT
, UNSIGNED_INT
, NONE
),
201 V_(R32G32B32_SINT
, INT
, NONE
),
202 V_(R32G32B32_USCALED
, UNSIGNED_INT
, NONE
),
203 V_(R32G32B32_SSCALED
, INT
, NONE
),
204 V_(R32G32B32_FLOAT
, FLOAT
, NONE
),
205 V_(R32G32B32_FIXED
, FIXED
, NONE
),
208 V_(R32G32B32A32_UNORM
, UNSIGNED_INT
, NONE
),
209 V_(R32G32B32A32_SNORM
, INT
, NONE
),
210 V_(R32G32B32A32_UINT
, UNSIGNED_INT
, NONE
),
211 V_(R32G32B32A32_SINT
, INT
, NONE
),
212 V_(R32G32B32A32_USCALED
, UNSIGNED_INT
, NONE
),
213 V_(R32G32B32A32_SSCALED
, INT
, NONE
),
214 V_(R32G32B32A32_FLOAT
, FLOAT
, NONE
),
215 V_(R32G32B32A32_FIXED
, FIXED
, NONE
),
218 _T(ETC1_RGB8
, ETC1
, NONE
),
220 _T(DXT1_RGB
, DXT1
, NONE
),
221 _T(DXT1_RGBA
, DXT1
, NONE
),
222 _T(DXT3_RGBA
, DXT2_DXT3
, NONE
),
223 _T(DXT3_RGBA
, DXT2_DXT3
, NONE
),
224 _T(DXT5_RGBA
, DXT4_DXT5
, NONE
),
227 _T(YUYV
, YUY2
, YUY2
),
228 _T(UYVY
, UYVY
, NONE
),
232 translate_texture_format(enum pipe_format fmt
)
234 /* XXX with TEXTURE_FORMAT_EXT and swizzle on newer chips we can
235 * support much more */
236 if (!formats
[fmt
].present
)
237 return ETNA_NO_MATCH
;
239 return formats
[fmt
].tex
;
243 translate_rs_format(enum pipe_format fmt
)
245 if (!formats
[fmt
].present
)
246 return ETNA_NO_MATCH
;
248 if (formats
[fmt
].rs
== ETNA_NO_MATCH
)
249 return ETNA_NO_MATCH
;
251 return RS_FORMAT(formats
[fmt
].rs
);
255 translate_rs_format_rb_swap(enum pipe_format fmt
)
257 assert(formats
[fmt
].present
);
259 return formats
[fmt
].rs
& RS_FORMAT_RB_SWAP
;
262 /* Return type flags for vertex element format */
264 translate_vertex_format_type(enum pipe_format fmt
)
266 if (!formats
[fmt
].present
)
267 return ETNA_NO_MATCH
;
269 return formats
[fmt
].vtx
;