2 * Copyright (c) 2016 Etnaviv Project
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sub license,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the
12 * next paragraph) shall be included in all copies or substantial portions
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 * Christian Gmeiner <christian.gmeiner@gmail.com>
27 #include "etnaviv_format.h"
29 #include "hw/state.xml.h"
30 #include "hw/state_3d.xml.h"
32 #include "pipe/p_defines.h"
34 /* Specifies the table of all the formats and their features. Also supplies
35 * the helpers that look up various data in those tables.
45 #define RS_FORMAT_NONE ~0
47 #define RS_FORMAT_MASK 0xf
48 #define RS_FORMAT(x) ((x) & RS_FORMAT_MASK)
49 #define RS_FORMAT_RB_SWAP 0x10
51 #define RS_FORMAT_X8B8G8R8 (RS_FORMAT_X8R8G8B8 | RS_FORMAT_RB_SWAP)
52 #define RS_FORMAT_A8B8G8R8 (RS_FORMAT_A8R8G8B8 | RS_FORMAT_RB_SWAP)
54 /* vertex + texture */
55 #define VT(pipe, vtxfmt, texfmt, rsfmt) \
56 [PIPE_FORMAT_##pipe] = { \
57 .vtx = VIVS_FE_VERTEX_ELEMENT_CONFIG_TYPE_##vtxfmt, \
58 .tex = TEXTURE_FORMAT_##texfmt, \
59 .rs = RS_FORMAT_##rsfmt, \
64 #define _T(pipe, fmt, rsfmt) \
65 [PIPE_FORMAT_##pipe] = { \
66 .vtx = ETNA_NO_MATCH, \
67 .tex = TEXTURE_FORMAT_##fmt, \
68 .rs = RS_FORMAT_##rsfmt, \
73 #define V_(pipe, fmt, rsfmt) \
74 [PIPE_FORMAT_##pipe] = { \
75 .vtx = VIVS_FE_VERTEX_ELEMENT_CONFIG_TYPE_##fmt, \
76 .tex = ETNA_NO_MATCH, \
77 .rs = RS_FORMAT_##rsfmt, \
81 static struct etna_format formats
[PIPE_FORMAT_COUNT
] = {
83 V_(R8_UNORM
, UNSIGNED_BYTE
, NONE
),
84 V_(R8_SNORM
, BYTE
, NONE
),
85 V_(R8_UINT
, UNSIGNED_BYTE
, NONE
),
86 V_(R8_SINT
, BYTE
, NONE
),
87 V_(R8_USCALED
, UNSIGNED_BYTE
, NONE
),
88 V_(R8_SSCALED
, BYTE
, NONE
),
90 _T(A8_UNORM
, A8
, NONE
),
91 _T(L8_UNORM
, L8
, NONE
),
92 _T(I8_UNORM
, I8
, NONE
),
95 V_(R16_UNORM
, UNSIGNED_SHORT
, NONE
),
96 V_(R16_SNORM
, SHORT
, NONE
),
97 V_(R16_UINT
, UNSIGNED_SHORT
, NONE
),
98 V_(R16_SINT
, SHORT
, NONE
),
99 V_(R16_USCALED
, UNSIGNED_SHORT
, NONE
),
100 V_(R16_SSCALED
, SHORT
, NONE
),
101 V_(R16_FLOAT
, HALF_FLOAT
, NONE
),
103 _T(B4G4R4A4_UNORM
, A4R4G4B4
, A4R4G4B4
),
104 _T(B4G4R4X4_UNORM
, X4R4G4B4
, X4R4G4B4
),
106 _T(Z16_UNORM
, D16
, A4R4G4B4
),
107 _T(B5G6R5_UNORM
, R5G6B5
, R5G6B5
),
108 _T(B5G5R5A1_UNORM
, A1R5G5B5
, A1R5G5B5
),
109 _T(B5G5R5X1_UNORM
, X1R5G5B5
, X1R5G5B5
),
111 V_(R8G8_UNORM
, UNSIGNED_BYTE
, NONE
),
112 V_(R8G8_SNORM
, BYTE
, NONE
),
113 V_(R8G8_UINT
, UNSIGNED_BYTE
, NONE
),
114 V_(R8G8_SINT
, BYTE
, NONE
),
115 V_(R8G8_USCALED
, UNSIGNED_BYTE
, NONE
),
116 V_(R8G8_SSCALED
, BYTE
, NONE
),
119 V_(R8G8B8_UNORM
, UNSIGNED_BYTE
, NONE
),
120 V_(R8G8B8_SNORM
, BYTE
, NONE
),
121 V_(R8G8B8_UINT
, UNSIGNED_BYTE
, NONE
),
122 V_(R8G8B8_SINT
, BYTE
, NONE
),
123 V_(R8G8B8_USCALED
, UNSIGNED_BYTE
, NONE
),
124 V_(R8G8B8_SSCALED
, BYTE
, NONE
),
127 V_(R32_UNORM
, UNSIGNED_INT
, NONE
),
128 V_(R32_SNORM
, INT
, NONE
),
129 V_(R32_SINT
, INT
, NONE
),
130 V_(R32_UINT
, UNSIGNED_INT
, NONE
),
131 V_(R32_USCALED
, UNSIGNED_INT
, NONE
),
132 V_(R32_SSCALED
, INT
, NONE
),
133 V_(R32_FLOAT
, FLOAT
, NONE
),
134 V_(R32_FIXED
, FIXED
, NONE
),
136 V_(R16G16_UNORM
, UNSIGNED_SHORT
, NONE
),
137 V_(R16G16_SNORM
, SHORT
, NONE
),
138 V_(R16G16_UINT
, UNSIGNED_SHORT
, NONE
),
139 V_(R16G16_SINT
, SHORT
, NONE
),
140 V_(R16G16_USCALED
, UNSIGNED_SHORT
, NONE
),
141 V_(R16G16_SSCALED
, SHORT
, NONE
),
142 V_(R16G16_FLOAT
, HALF_FLOAT
, NONE
),
144 V_(A8B8G8R8_UNORM
, UNSIGNED_BYTE
, NONE
),
146 V_(R8G8B8A8_UNORM
, UNSIGNED_BYTE
, A8B8G8R8
),
147 V_(R8G8B8A8_SNORM
, BYTE
, A8B8G8R8
),
148 _T(R8G8B8X8_UNORM
, X8B8G8R8
, X8B8G8R8
),
149 V_(R8G8B8A8_UINT
, UNSIGNED_BYTE
, A8B8G8R8
),
150 V_(R8G8B8A8_SINT
, BYTE
, A8B8G8R8
),
151 V_(R8G8B8A8_USCALED
, UNSIGNED_BYTE
, A8B8G8R8
),
152 V_(R8G8B8A8_SSCALED
, BYTE
, A8B8G8R8
),
154 _T(R8G8B8A8_UNORM
, A8B8G8R8
, A8B8G8R8
),
155 _T(R8G8B8X8_UNORM
, X8B8G8R8
, X8B8G8R8
),
157 _T(B8G8R8A8_UNORM
, A8R8G8B8
, A8R8G8B8
),
158 _T(B8G8R8X8_UNORM
, X8R8G8B8
, X8R8G8B8
),
160 V_(R10G10B10A2_UNORM
, UNSIGNED_INT_10_10_10_2
, NONE
),
161 V_(R10G10B10A2_SNORM
, INT_10_10_10_2
, NONE
),
162 V_(R10G10B10A2_USCALED
, UNSIGNED_INT_10_10_10_2
, NONE
),
163 V_(R10G10B10A2_SSCALED
, INT_10_10_10_2
, NONE
),
165 _T(X8Z24_UNORM
, D24S8
, A8R8G8B8
),
166 _T(S8_UINT_Z24_UNORM
, D24S8
, A8R8G8B8
),
169 V_(R16G16B16_UNORM
, UNSIGNED_SHORT
, NONE
),
170 V_(R16G16B16_SNORM
, SHORT
, NONE
),
171 V_(R16G16B16_UINT
, UNSIGNED_SHORT
, NONE
),
172 V_(R16G16B16_SINT
, SHORT
, NONE
),
173 V_(R16G16B16_USCALED
, UNSIGNED_SHORT
, NONE
),
174 V_(R16G16B16_SSCALED
, SHORT
, NONE
),
175 V_(R16G16B16_FLOAT
, HALF_FLOAT
, NONE
),
178 V_(R16G16B16A16_UNORM
, UNSIGNED_SHORT
, NONE
),
179 V_(R16G16B16A16_SNORM
, SHORT
, NONE
),
180 V_(R16G16B16A16_UINT
, UNSIGNED_SHORT
, NONE
),
181 V_(R16G16B16A16_SINT
, SHORT
, NONE
),
182 V_(R16G16B16A16_USCALED
, UNSIGNED_SHORT
, NONE
),
183 V_(R16G16B16A16_SSCALED
, SHORT
, NONE
),
184 V_(R16G16B16A16_FLOAT
, HALF_FLOAT
, NONE
),
186 V_(R32G32_UNORM
, UNSIGNED_INT
, NONE
),
187 V_(R32G32_SNORM
, INT
, NONE
),
188 V_(R32G32_UINT
, UNSIGNED_INT
, NONE
),
189 V_(R32G32_SINT
, INT
, NONE
),
190 V_(R32G32_USCALED
, UNSIGNED_INT
, NONE
),
191 V_(R32G32_SSCALED
, INT
, NONE
),
192 V_(R32G32_FLOAT
, FLOAT
, NONE
),
193 V_(R32G32_FIXED
, FIXED
, NONE
),
196 V_(R32G32B32_UNORM
, UNSIGNED_INT
, NONE
),
197 V_(R32G32B32_SNORM
, INT
, NONE
),
198 V_(R32G32B32_UINT
, UNSIGNED_INT
, NONE
),
199 V_(R32G32B32_SINT
, INT
, NONE
),
200 V_(R32G32B32_USCALED
, UNSIGNED_INT
, NONE
),
201 V_(R32G32B32_SSCALED
, INT
, NONE
),
202 V_(R32G32B32_FLOAT
, FLOAT
, NONE
),
203 V_(R32G32B32_FIXED
, FIXED
, NONE
),
206 V_(R32G32B32A32_UNORM
, UNSIGNED_INT
, NONE
),
207 V_(R32G32B32A32_SNORM
, INT
, NONE
),
208 V_(R32G32B32A32_UINT
, UNSIGNED_INT
, NONE
),
209 V_(R32G32B32A32_SINT
, INT
, NONE
),
210 V_(R32G32B32A32_USCALED
, UNSIGNED_INT
, NONE
),
211 V_(R32G32B32A32_SSCALED
, INT
, NONE
),
212 V_(R32G32B32A32_FLOAT
, FLOAT
, NONE
),
213 V_(R32G32B32A32_FIXED
, FIXED
, NONE
),
216 _T(ETC1_RGB8
, ETC1
, NONE
),
218 _T(DXT1_RGB
, DXT1
, NONE
),
219 _T(DXT1_RGBA
, DXT1
, NONE
),
220 _T(DXT3_RGBA
, DXT2_DXT3
, NONE
),
221 _T(DXT3_RGBA
, DXT2_DXT3
, NONE
),
222 _T(DXT5_RGBA
, DXT4_DXT5
, NONE
),
225 _T(YUYV
, YUY2
, YUY2
),
226 _T(UYVY
, UYVY
, NONE
),
230 translate_texture_format(enum pipe_format fmt
)
232 /* XXX with TEXTURE_FORMAT_EXT and swizzle on newer chips we can
233 * support much more */
234 if (!formats
[fmt
].present
)
235 return ETNA_NO_MATCH
;
237 return formats
[fmt
].tex
;
241 translate_rs_format(enum pipe_format fmt
)
243 if (!formats
[fmt
].present
)
244 return ETNA_NO_MATCH
;
246 if (formats
[fmt
].rs
== ETNA_NO_MATCH
)
247 return ETNA_NO_MATCH
;
249 return RS_FORMAT(formats
[fmt
].rs
);
253 translate_rs_format_rb_swap(enum pipe_format fmt
)
255 assert(formats
[fmt
].present
);
257 return formats
[fmt
].rs
& RS_FORMAT_RB_SWAP
;
260 /* Return type flags for vertex element format */
262 translate_vertex_format_type(enum pipe_format fmt
)
264 if (!formats
[fmt
].present
)
265 return ETNA_NO_MATCH
;
267 return formats
[fmt
].vtx
;