translate: typo in emit_B8G8R8A8_UNORM
[mesa.git] / src / gallium / auxiliary / translate / translate_generic.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /*
29 * Authors:
30 * Keith Whitwell <keith@tungstengraphics.com>
31 */
32
33 #include "pipe/p_util.h"
34 #include "pipe/p_state.h"
35 #include "translate.h"
36
37
38 #define DRAW_DBG 0
39
40 typedef void (*fetch_func)(const void *ptr, float *attrib);
41 typedef void (*emit_func)(const float *attrib, void *ptr);
42
43
44
45 struct translate_generic {
46 struct translate translate;
47
48 struct {
49 fetch_func fetch;
50 unsigned buffer;
51 unsigned input_offset;
52
53 emit_func emit;
54 unsigned output_offset;
55
56 char *input_ptr;
57 unsigned input_stride;
58
59 } attrib[PIPE_MAX_ATTRIBS];
60
61 unsigned nr_attrib;
62 unsigned output_stride;
63 };
64
65
66 static struct translate_generic *translate_generic( struct translate *translate )
67 {
68 return (struct translate_generic *)translate;
69 }
70
71 /**
72 * Fetch a float[4] vertex attribute from memory, doing format/type
73 * conversion as needed.
74 *
75 * This is probably needed/dupliocated elsewhere, eg format
76 * conversion, texture sampling etc.
77 */
78 #define ATTRIB( NAME, SZ, TYPE, FROM, TO ) \
79 static void \
80 fetch_##NAME(const void *ptr, float *attrib) \
81 { \
82 const float defaults[4] = { 0,0,0,1 }; \
83 int i; \
84 \
85 for (i = 0; i < SZ; i++) { \
86 attrib[i] = FROM(i); \
87 } \
88 \
89 for (; i < 4; i++) { \
90 attrib[i] = defaults[i]; \
91 } \
92 } \
93 \
94 static void \
95 emit_##NAME(const float *attrib, void *ptr) \
96 { \
97 unsigned i; \
98 TYPE *out = (TYPE *)ptr; \
99 \
100 for (i = 0; i < SZ; i++) { \
101 out[i] = TO(attrib[i]); \
102 } \
103 }
104
105
106 #define FROM_64_FLOAT(i) ((float) ((double *) ptr)[i])
107 #define FROM_32_FLOAT(i) (((float *) ptr)[i])
108
109 #define FROM_8_USCALED(i) ((float) ((unsigned char *) ptr)[i])
110 #define FROM_16_USCALED(i) ((float) ((unsigned short *) ptr)[i])
111 #define FROM_32_USCALED(i) ((float) ((unsigned int *) ptr)[i])
112
113 #define FROM_8_SSCALED(i) ((float) ((char *) ptr)[i])
114 #define FROM_16_SSCALED(i) ((float) ((short *) ptr)[i])
115 #define FROM_32_SSCALED(i) ((float) ((int *) ptr)[i])
116
117 #define FROM_8_UNORM(i) ((float) ((unsigned char *) ptr)[i] / 255.0f)
118 #define FROM_16_UNORM(i) ((float) ((unsigned short *) ptr)[i] / 65535.0f)
119 #define FROM_32_UNORM(i) ((float) ((unsigned int *) ptr)[i] / 4294967295.0f)
120
121 #define FROM_8_SNORM(i) ((float) ((char *) ptr)[i] / 127.0f)
122 #define FROM_16_SNORM(i) ((float) ((short *) ptr)[i] / 32767.0f)
123 #define FROM_32_SNORM(i) ((float) ((int *) ptr)[i] / 2147483647.0f)
124
125 #define TO_64_FLOAT(f) ((double) f)
126 #define TO_32_FLOAT(f) (f)
127
128 #define TO_8_USCALED(f) ((unsigned char) f)
129 #define TO_16_USCALED(f) ((unsigned short) f)
130 #define TO_32_USCALED(f) ((unsigned int) f)
131
132 #define TO_8_SSCALED(f) ((char) f)
133 #define TO_16_SSCALED(f) ((short) f)
134 #define TO_32_SSCALED(f) ((int) f)
135
136 #define TO_8_UNORM(f) ((unsigned char) (f * 255.0f))
137 #define TO_16_UNORM(f) ((unsigned short) (f * 65535.0f))
138 #define TO_32_UNORM(f) ((unsigned int) (f * 4294967295.0f))
139
140 #define TO_8_SNORM(f) ((char) (f * 127.0f))
141 #define TO_16_SNORM(f) ((short) (f * 32767.0f))
142 #define TO_32_SNORM(f) ((int) (f * 2147483647.0f))
143
144
145
146 ATTRIB( R64G64B64A64_FLOAT, 4, double, FROM_64_FLOAT, TO_64_FLOAT )
147 ATTRIB( R64G64B64_FLOAT, 3, double, FROM_64_FLOAT, TO_64_FLOAT )
148 ATTRIB( R64G64_FLOAT, 2, double, FROM_64_FLOAT, TO_64_FLOAT )
149 ATTRIB( R64_FLOAT, 1, double, FROM_64_FLOAT, TO_64_FLOAT )
150
151 ATTRIB( R32G32B32A32_FLOAT, 4, float, FROM_32_FLOAT, TO_32_FLOAT )
152 ATTRIB( R32G32B32_FLOAT, 3, float, FROM_32_FLOAT, TO_32_FLOAT )
153 ATTRIB( R32G32_FLOAT, 2, float, FROM_32_FLOAT, TO_32_FLOAT )
154 ATTRIB( R32_FLOAT, 1, float, FROM_32_FLOAT, TO_32_FLOAT )
155
156 ATTRIB( R32G32B32A32_USCALED, 4, unsigned, FROM_32_USCALED, TO_32_USCALED )
157 ATTRIB( R32G32B32_USCALED, 3, unsigned, FROM_32_USCALED, TO_32_USCALED )
158 ATTRIB( R32G32_USCALED, 2, unsigned, FROM_32_USCALED, TO_32_USCALED )
159 ATTRIB( R32_USCALED, 1, unsigned, FROM_32_USCALED, TO_32_USCALED )
160
161 ATTRIB( R32G32B32A32_SSCALED, 4, int, FROM_32_SSCALED, TO_32_SSCALED )
162 ATTRIB( R32G32B32_SSCALED, 3, int, FROM_32_SSCALED, TO_32_SSCALED )
163 ATTRIB( R32G32_SSCALED, 2, int, FROM_32_SSCALED, TO_32_SSCALED )
164 ATTRIB( R32_SSCALED, 1, int, FROM_32_SSCALED, TO_32_SSCALED )
165
166 ATTRIB( R32G32B32A32_UNORM, 4, unsigned, FROM_32_UNORM, TO_32_UNORM )
167 ATTRIB( R32G32B32_UNORM, 3, unsigned, FROM_32_UNORM, TO_32_UNORM )
168 ATTRIB( R32G32_UNORM, 2, unsigned, FROM_32_UNORM, TO_32_UNORM )
169 ATTRIB( R32_UNORM, 1, unsigned, FROM_32_UNORM, TO_32_UNORM )
170
171 ATTRIB( R32G32B32A32_SNORM, 4, int, FROM_32_SNORM, TO_32_SNORM )
172 ATTRIB( R32G32B32_SNORM, 3, int, FROM_32_SNORM, TO_32_SNORM )
173 ATTRIB( R32G32_SNORM, 2, int, FROM_32_SNORM, TO_32_SNORM )
174 ATTRIB( R32_SNORM, 1, int, FROM_32_SNORM, TO_32_SNORM )
175
176 ATTRIB( R16G16B16A16_USCALED, 4, ushort, FROM_16_USCALED, TO_16_USCALED )
177 ATTRIB( R16G16B16_USCALED, 3, ushort, FROM_16_USCALED, TO_16_USCALED )
178 ATTRIB( R16G16_USCALED, 2, ushort, FROM_16_USCALED, TO_16_USCALED )
179 ATTRIB( R16_USCALED, 1, ushort, FROM_16_USCALED, TO_16_USCALED )
180
181 ATTRIB( R16G16B16A16_SSCALED, 4, short, FROM_16_SSCALED, TO_16_SSCALED )
182 ATTRIB( R16G16B16_SSCALED, 3, short, FROM_16_SSCALED, TO_16_SSCALED )
183 ATTRIB( R16G16_SSCALED, 2, short, FROM_16_SSCALED, TO_16_SSCALED )
184 ATTRIB( R16_SSCALED, 1, short, FROM_16_SSCALED, TO_16_SSCALED )
185
186 ATTRIB( R16G16B16A16_UNORM, 4, ushort, FROM_16_UNORM, TO_16_UNORM )
187 ATTRIB( R16G16B16_UNORM, 3, ushort, FROM_16_UNORM, TO_16_UNORM )
188 ATTRIB( R16G16_UNORM, 2, ushort, FROM_16_UNORM, TO_16_UNORM )
189 ATTRIB( R16_UNORM, 1, ushort, FROM_16_UNORM, TO_16_UNORM )
190
191 ATTRIB( R16G16B16A16_SNORM, 4, short, FROM_16_SNORM, TO_16_SNORM )
192 ATTRIB( R16G16B16_SNORM, 3, short, FROM_16_SNORM, TO_16_SNORM )
193 ATTRIB( R16G16_SNORM, 2, short, FROM_16_SNORM, TO_16_SNORM )
194 ATTRIB( R16_SNORM, 1, short, FROM_16_SNORM, TO_16_SNORM )
195
196 ATTRIB( R8G8B8A8_USCALED, 4, ubyte, FROM_8_USCALED, TO_8_USCALED )
197 ATTRIB( R8G8B8_USCALED, 3, ubyte, FROM_8_USCALED, TO_8_USCALED )
198 ATTRIB( R8G8_USCALED, 2, ubyte, FROM_8_USCALED, TO_8_USCALED )
199 ATTRIB( R8_USCALED, 1, ubyte, FROM_8_USCALED, TO_8_USCALED )
200
201 ATTRIB( R8G8B8A8_SSCALED, 4, char, FROM_8_SSCALED, TO_8_SSCALED )
202 ATTRIB( R8G8B8_SSCALED, 3, char, FROM_8_SSCALED, TO_8_SSCALED )
203 ATTRIB( R8G8_SSCALED, 2, char, FROM_8_SSCALED, TO_8_SSCALED )
204 ATTRIB( R8_SSCALED, 1, char, FROM_8_SSCALED, TO_8_SSCALED )
205
206 ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
207 ATTRIB( R8G8B8_UNORM, 3, ubyte, FROM_8_UNORM, TO_8_UNORM )
208 ATTRIB( R8G8_UNORM, 2, ubyte, FROM_8_UNORM, TO_8_UNORM )
209 ATTRIB( R8_UNORM, 1, ubyte, FROM_8_UNORM, TO_8_UNORM )
210
211 ATTRIB( R8G8B8A8_SNORM, 4, char, FROM_8_SNORM, TO_8_SNORM )
212 ATTRIB( R8G8B8_SNORM, 3, char, FROM_8_SNORM, TO_8_SNORM )
213 ATTRIB( R8G8_SNORM, 2, char, FROM_8_SNORM, TO_8_SNORM )
214 ATTRIB( R8_SNORM, 1, char, FROM_8_SNORM, TO_8_SNORM )
215
216 ATTRIB( A8R8G8B8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
217 //ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
218
219
220
221 static void
222 fetch_B8G8R8A8_UNORM(const void *ptr, float *attrib)
223 {
224 attrib[2] = FROM_8_UNORM(0);
225 attrib[1] = FROM_8_UNORM(1);
226 attrib[0] = FROM_8_UNORM(2);
227 attrib[3] = FROM_8_UNORM(3);
228 }
229
230 static void
231 emit_B8G8R8A8_UNORM( const float *attrib, void *ptr)
232 {
233 ubyte *out = (ubyte *)ptr;
234 out[2] = TO_8_UNORM(attrib[0]);
235 out[1] = TO_8_UNORM(attrib[1]);
236 out[0] = TO_8_UNORM(attrib[2]);
237 out[3] = TO_8_UNORM(attrib[3]);
238 }
239
240 static void
241 fetch_NULL( const void *ptr, float *attrib )
242 {
243 attrib[0] = 0;
244 attrib[1] = 0;
245 attrib[2] = 0;
246 attrib[3] = 1;
247 }
248
249 static void
250 emit_NULL( const float *attrib, void *ptr )
251 {
252 /* do nothing is the only sensible option */
253 }
254
255 static fetch_func get_fetch_func( enum pipe_format format )
256 {
257 switch (format) {
258 case PIPE_FORMAT_R64_FLOAT:
259 return fetch_R64_FLOAT;
260 case PIPE_FORMAT_R64G64_FLOAT:
261 return fetch_R64G64_FLOAT;
262 case PIPE_FORMAT_R64G64B64_FLOAT:
263 return fetch_R64G64B64_FLOAT;
264 case PIPE_FORMAT_R64G64B64A64_FLOAT:
265 return fetch_R64G64B64A64_FLOAT;
266
267 case PIPE_FORMAT_R32_FLOAT:
268 return fetch_R32_FLOAT;
269 case PIPE_FORMAT_R32G32_FLOAT:
270 return fetch_R32G32_FLOAT;
271 case PIPE_FORMAT_R32G32B32_FLOAT:
272 return fetch_R32G32B32_FLOAT;
273 case PIPE_FORMAT_R32G32B32A32_FLOAT:
274 return fetch_R32G32B32A32_FLOAT;
275
276 case PIPE_FORMAT_R32_UNORM:
277 return fetch_R32_UNORM;
278 case PIPE_FORMAT_R32G32_UNORM:
279 return fetch_R32G32_UNORM;
280 case PIPE_FORMAT_R32G32B32_UNORM:
281 return fetch_R32G32B32_UNORM;
282 case PIPE_FORMAT_R32G32B32A32_UNORM:
283 return fetch_R32G32B32A32_UNORM;
284
285 case PIPE_FORMAT_R32_USCALED:
286 return fetch_R32_USCALED;
287 case PIPE_FORMAT_R32G32_USCALED:
288 return fetch_R32G32_USCALED;
289 case PIPE_FORMAT_R32G32B32_USCALED:
290 return fetch_R32G32B32_USCALED;
291 case PIPE_FORMAT_R32G32B32A32_USCALED:
292 return fetch_R32G32B32A32_USCALED;
293
294 case PIPE_FORMAT_R32_SNORM:
295 return fetch_R32_SNORM;
296 case PIPE_FORMAT_R32G32_SNORM:
297 return fetch_R32G32_SNORM;
298 case PIPE_FORMAT_R32G32B32_SNORM:
299 return fetch_R32G32B32_SNORM;
300 case PIPE_FORMAT_R32G32B32A32_SNORM:
301 return fetch_R32G32B32A32_SNORM;
302
303 case PIPE_FORMAT_R32_SSCALED:
304 return fetch_R32_SSCALED;
305 case PIPE_FORMAT_R32G32_SSCALED:
306 return fetch_R32G32_SSCALED;
307 case PIPE_FORMAT_R32G32B32_SSCALED:
308 return fetch_R32G32B32_SSCALED;
309 case PIPE_FORMAT_R32G32B32A32_SSCALED:
310 return fetch_R32G32B32A32_SSCALED;
311
312 case PIPE_FORMAT_R16_UNORM:
313 return fetch_R16_UNORM;
314 case PIPE_FORMAT_R16G16_UNORM:
315 return fetch_R16G16_UNORM;
316 case PIPE_FORMAT_R16G16B16_UNORM:
317 return fetch_R16G16B16_UNORM;
318 case PIPE_FORMAT_R16G16B16A16_UNORM:
319 return fetch_R16G16B16A16_UNORM;
320
321 case PIPE_FORMAT_R16_USCALED:
322 return fetch_R16_USCALED;
323 case PIPE_FORMAT_R16G16_USCALED:
324 return fetch_R16G16_USCALED;
325 case PIPE_FORMAT_R16G16B16_USCALED:
326 return fetch_R16G16B16_USCALED;
327 case PIPE_FORMAT_R16G16B16A16_USCALED:
328 return fetch_R16G16B16A16_USCALED;
329
330 case PIPE_FORMAT_R16_SNORM:
331 return fetch_R16_SNORM;
332 case PIPE_FORMAT_R16G16_SNORM:
333 return fetch_R16G16_SNORM;
334 case PIPE_FORMAT_R16G16B16_SNORM:
335 return fetch_R16G16B16_SNORM;
336 case PIPE_FORMAT_R16G16B16A16_SNORM:
337 return fetch_R16G16B16A16_SNORM;
338
339 case PIPE_FORMAT_R16_SSCALED:
340 return fetch_R16_SSCALED;
341 case PIPE_FORMAT_R16G16_SSCALED:
342 return fetch_R16G16_SSCALED;
343 case PIPE_FORMAT_R16G16B16_SSCALED:
344 return fetch_R16G16B16_SSCALED;
345 case PIPE_FORMAT_R16G16B16A16_SSCALED:
346 return fetch_R16G16B16A16_SSCALED;
347
348 case PIPE_FORMAT_R8_UNORM:
349 return fetch_R8_UNORM;
350 case PIPE_FORMAT_R8G8_UNORM:
351 return fetch_R8G8_UNORM;
352 case PIPE_FORMAT_R8G8B8_UNORM:
353 return fetch_R8G8B8_UNORM;
354 case PIPE_FORMAT_R8G8B8A8_UNORM:
355 return fetch_R8G8B8A8_UNORM;
356
357 case PIPE_FORMAT_R8_USCALED:
358 return fetch_R8_USCALED;
359 case PIPE_FORMAT_R8G8_USCALED:
360 return fetch_R8G8_USCALED;
361 case PIPE_FORMAT_R8G8B8_USCALED:
362 return fetch_R8G8B8_USCALED;
363 case PIPE_FORMAT_R8G8B8A8_USCALED:
364 return fetch_R8G8B8A8_USCALED;
365
366 case PIPE_FORMAT_R8_SNORM:
367 return fetch_R8_SNORM;
368 case PIPE_FORMAT_R8G8_SNORM:
369 return fetch_R8G8_SNORM;
370 case PIPE_FORMAT_R8G8B8_SNORM:
371 return fetch_R8G8B8_SNORM;
372 case PIPE_FORMAT_R8G8B8A8_SNORM:
373 return fetch_R8G8B8A8_SNORM;
374
375 case PIPE_FORMAT_R8_SSCALED:
376 return fetch_R8_SSCALED;
377 case PIPE_FORMAT_R8G8_SSCALED:
378 return fetch_R8G8_SSCALED;
379 case PIPE_FORMAT_R8G8B8_SSCALED:
380 return fetch_R8G8B8_SSCALED;
381 case PIPE_FORMAT_R8G8B8A8_SSCALED:
382 return fetch_R8G8B8A8_SSCALED;
383
384 case PIPE_FORMAT_A8R8G8B8_UNORM:
385 return fetch_A8R8G8B8_UNORM;
386
387 case PIPE_FORMAT_B8G8R8A8_UNORM:
388 return fetch_B8G8R8A8_UNORM;
389
390 default:
391 assert(0);
392 return fetch_NULL;
393 }
394 }
395
396
397
398
399 static emit_func get_emit_func( enum pipe_format format )
400 {
401 switch (format) {
402 case PIPE_FORMAT_R64_FLOAT:
403 return emit_R64_FLOAT;
404 case PIPE_FORMAT_R64G64_FLOAT:
405 return emit_R64G64_FLOAT;
406 case PIPE_FORMAT_R64G64B64_FLOAT:
407 return emit_R64G64B64_FLOAT;
408 case PIPE_FORMAT_R64G64B64A64_FLOAT:
409 return emit_R64G64B64A64_FLOAT;
410
411 case PIPE_FORMAT_R32_FLOAT:
412 return emit_R32_FLOAT;
413 case PIPE_FORMAT_R32G32_FLOAT:
414 return emit_R32G32_FLOAT;
415 case PIPE_FORMAT_R32G32B32_FLOAT:
416 return emit_R32G32B32_FLOAT;
417 case PIPE_FORMAT_R32G32B32A32_FLOAT:
418 return emit_R32G32B32A32_FLOAT;
419
420 case PIPE_FORMAT_R32_UNORM:
421 return emit_R32_UNORM;
422 case PIPE_FORMAT_R32G32_UNORM:
423 return emit_R32G32_UNORM;
424 case PIPE_FORMAT_R32G32B32_UNORM:
425 return emit_R32G32B32_UNORM;
426 case PIPE_FORMAT_R32G32B32A32_UNORM:
427 return emit_R32G32B32A32_UNORM;
428
429 case PIPE_FORMAT_R32_USCALED:
430 return emit_R32_USCALED;
431 case PIPE_FORMAT_R32G32_USCALED:
432 return emit_R32G32_USCALED;
433 case PIPE_FORMAT_R32G32B32_USCALED:
434 return emit_R32G32B32_USCALED;
435 case PIPE_FORMAT_R32G32B32A32_USCALED:
436 return emit_R32G32B32A32_USCALED;
437
438 case PIPE_FORMAT_R32_SNORM:
439 return emit_R32_SNORM;
440 case PIPE_FORMAT_R32G32_SNORM:
441 return emit_R32G32_SNORM;
442 case PIPE_FORMAT_R32G32B32_SNORM:
443 return emit_R32G32B32_SNORM;
444 case PIPE_FORMAT_R32G32B32A32_SNORM:
445 return emit_R32G32B32A32_SNORM;
446
447 case PIPE_FORMAT_R32_SSCALED:
448 return emit_R32_SSCALED;
449 case PIPE_FORMAT_R32G32_SSCALED:
450 return emit_R32G32_SSCALED;
451 case PIPE_FORMAT_R32G32B32_SSCALED:
452 return emit_R32G32B32_SSCALED;
453 case PIPE_FORMAT_R32G32B32A32_SSCALED:
454 return emit_R32G32B32A32_SSCALED;
455
456 case PIPE_FORMAT_R16_UNORM:
457 return emit_R16_UNORM;
458 case PIPE_FORMAT_R16G16_UNORM:
459 return emit_R16G16_UNORM;
460 case PIPE_FORMAT_R16G16B16_UNORM:
461 return emit_R16G16B16_UNORM;
462 case PIPE_FORMAT_R16G16B16A16_UNORM:
463 return emit_R16G16B16A16_UNORM;
464
465 case PIPE_FORMAT_R16_USCALED:
466 return emit_R16_USCALED;
467 case PIPE_FORMAT_R16G16_USCALED:
468 return emit_R16G16_USCALED;
469 case PIPE_FORMAT_R16G16B16_USCALED:
470 return emit_R16G16B16_USCALED;
471 case PIPE_FORMAT_R16G16B16A16_USCALED:
472 return emit_R16G16B16A16_USCALED;
473
474 case PIPE_FORMAT_R16_SNORM:
475 return emit_R16_SNORM;
476 case PIPE_FORMAT_R16G16_SNORM:
477 return emit_R16G16_SNORM;
478 case PIPE_FORMAT_R16G16B16_SNORM:
479 return emit_R16G16B16_SNORM;
480 case PIPE_FORMAT_R16G16B16A16_SNORM:
481 return emit_R16G16B16A16_SNORM;
482
483 case PIPE_FORMAT_R16_SSCALED:
484 return emit_R16_SSCALED;
485 case PIPE_FORMAT_R16G16_SSCALED:
486 return emit_R16G16_SSCALED;
487 case PIPE_FORMAT_R16G16B16_SSCALED:
488 return emit_R16G16B16_SSCALED;
489 case PIPE_FORMAT_R16G16B16A16_SSCALED:
490 return emit_R16G16B16A16_SSCALED;
491
492 case PIPE_FORMAT_R8_UNORM:
493 return emit_R8_UNORM;
494 case PIPE_FORMAT_R8G8_UNORM:
495 return emit_R8G8_UNORM;
496 case PIPE_FORMAT_R8G8B8_UNORM:
497 return emit_R8G8B8_UNORM;
498 case PIPE_FORMAT_R8G8B8A8_UNORM:
499 return emit_R8G8B8A8_UNORM;
500
501 case PIPE_FORMAT_R8_USCALED:
502 return emit_R8_USCALED;
503 case PIPE_FORMAT_R8G8_USCALED:
504 return emit_R8G8_USCALED;
505 case PIPE_FORMAT_R8G8B8_USCALED:
506 return emit_R8G8B8_USCALED;
507 case PIPE_FORMAT_R8G8B8A8_USCALED:
508 return emit_R8G8B8A8_USCALED;
509
510 case PIPE_FORMAT_R8_SNORM:
511 return emit_R8_SNORM;
512 case PIPE_FORMAT_R8G8_SNORM:
513 return emit_R8G8_SNORM;
514 case PIPE_FORMAT_R8G8B8_SNORM:
515 return emit_R8G8B8_SNORM;
516 case PIPE_FORMAT_R8G8B8A8_SNORM:
517 return emit_R8G8B8A8_SNORM;
518
519 case PIPE_FORMAT_R8_SSCALED:
520 return emit_R8_SSCALED;
521 case PIPE_FORMAT_R8G8_SSCALED:
522 return emit_R8G8_SSCALED;
523 case PIPE_FORMAT_R8G8B8_SSCALED:
524 return emit_R8G8B8_SSCALED;
525 case PIPE_FORMAT_R8G8B8A8_SSCALED:
526 return emit_R8G8B8A8_SSCALED;
527
528 case PIPE_FORMAT_A8R8G8B8_UNORM:
529 return emit_A8R8G8B8_UNORM;
530
531 case PIPE_FORMAT_B8G8R8A8_UNORM:
532 return emit_B8G8R8A8_UNORM;
533
534 default:
535 assert(0);
536 return emit_NULL;
537 }
538 }
539
540
541
542 /**
543 * Fetch vertex attributes for 'count' vertices.
544 */
545 static void generic_run_elts( struct translate *translate,
546 const unsigned *elts,
547 unsigned count,
548 void *output_buffer )
549 {
550 struct translate_generic *tg = translate_generic(translate);
551 char *vert = output_buffer;
552 unsigned nr_attrs = tg->nr_attrib;
553 unsigned attr;
554 unsigned i;
555
556 /* loop over vertex attributes (vertex shader inputs)
557 */
558 for (i = 0; i < count; i++) {
559 unsigned elt = *elts++;
560
561 for (attr = 0; attr < nr_attrs; attr++) {
562 float data[4];
563
564 const char *src = (tg->attrib[attr].input_ptr +
565 tg->attrib[attr].input_stride * elt);
566
567 char *dst = (vert +
568 tg->attrib[attr].output_offset);
569
570 tg->attrib[attr].fetch( src, data );
571 tg->attrib[attr].emit( data, dst );
572 }
573
574 vert += tg->output_stride;
575 }
576 }
577
578
579
580 static void generic_set_buffer( struct translate *translate,
581 unsigned buf,
582 const void *ptr,
583 unsigned stride )
584 {
585 struct translate_generic *tg = translate_generic(translate);
586 unsigned i;
587
588 for (i = 0; i < tg->nr_attrib; i++) {
589 if (tg->attrib[i].buffer == buf) {
590 tg->attrib[i].input_ptr = ((char *)ptr +
591 tg->attrib[i].input_offset);
592 tg->attrib[i].input_stride = stride;
593 }
594 }
595 }
596
597
598 static void generic_destroy( struct translate *translate )
599 {
600 FREE(translate);
601 }
602
603 struct translate *translate_generic_create( unsigned output_stride,
604 const struct translate_element *elements,
605 unsigned nr_elements )
606 {
607 struct translate_generic *tg = CALLOC_STRUCT(translate_generic);
608 unsigned i;
609
610 if (tg == NULL)
611 return NULL;
612
613 tg->translate.destroy = generic_destroy;
614 tg->translate.set_buffer = generic_set_buffer;
615 tg->translate.run_elts = generic_run_elts;
616
617 for (i = 0; i < nr_elements; i++) {
618 tg->attrib[i].fetch = get_fetch_func(elements[i].input_format);
619 tg->attrib[i].buffer = elements[i].input_buffer;
620 tg->attrib[i].input_offset = elements[i].input_offset;
621
622 tg->attrib[i].emit = get_emit_func(elements[i].output_format);
623 tg->attrib[i].output_offset = elements[i].output_offset;
624 }
625
626 tg->nr_attrib = nr_elements;
627
628
629 return &tg->translate;
630 }