742f03b503183875ca68c8d32dac399ce1ca8727
[mesa.git] / src / gallium / auxiliary / translate / translate_generic.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /*
29 * Authors:
30 * Keith Whitwell <keith@tungstengraphics.com>
31 */
32
33 #include "util/u_memory.h"
34 #include "pipe/p_state.h"
35 #include "translate.h"
36
37
38 #define DRAW_DBG 0
39
40 typedef void (*fetch_func)(const void *ptr, float *attrib);
41 typedef void (*emit_func)(const float *attrib, void *ptr);
42
43
44
45 struct translate_generic {
46 struct translate translate;
47
48 struct {
49 fetch_func fetch;
50 unsigned buffer;
51 unsigned input_offset;
52 unsigned instance_divisor;
53
54 emit_func emit;
55 unsigned output_offset;
56
57 char *input_ptr;
58 unsigned input_stride;
59
60 } attrib[PIPE_MAX_ATTRIBS];
61
62 unsigned nr_attrib;
63 };
64
65
66 static struct translate_generic *translate_generic( struct translate *translate )
67 {
68 return (struct translate_generic *)translate;
69 }
70
71 /**
72 * Fetch a float[4] vertex attribute from memory, doing format/type
73 * conversion as needed.
74 *
75 * This is probably needed/dupliocated elsewhere, eg format
76 * conversion, texture sampling etc.
77 */
78 #define ATTRIB( NAME, SZ, TYPE, FROM, TO ) \
79 static void \
80 fetch_##NAME(const void *ptr, float *attrib) \
81 { \
82 const float defaults[4] = { 0.0f,0.0f,0.0f,1.0f }; \
83 unsigned i; \
84 \
85 for (i = 0; i < SZ; i++) { \
86 attrib[i] = FROM(i); \
87 } \
88 \
89 for (; i < 4; i++) { \
90 attrib[i] = defaults[i]; \
91 } \
92 } \
93 \
94 static void \
95 emit_##NAME(const float *attrib, void *ptr) \
96 { \
97 unsigned i; \
98 TYPE *out = (TYPE *)ptr; \
99 \
100 for (i = 0; i < SZ; i++) { \
101 out[i] = TO(attrib[i]); \
102 } \
103 }
104
105
106 #define FROM_64_FLOAT(i) ((float) ((double *) ptr)[i])
107 #define FROM_32_FLOAT(i) (((float *) ptr)[i])
108
109 #define FROM_8_USCALED(i) ((float) ((unsigned char *) ptr)[i])
110 #define FROM_16_USCALED(i) ((float) ((unsigned short *) ptr)[i])
111 #define FROM_32_USCALED(i) ((float) ((unsigned int *) ptr)[i])
112
113 #define FROM_8_SSCALED(i) ((float) ((char *) ptr)[i])
114 #define FROM_16_SSCALED(i) ((float) ((short *) ptr)[i])
115 #define FROM_32_SSCALED(i) ((float) ((int *) ptr)[i])
116
117 #define FROM_8_UNORM(i) ((float) ((unsigned char *) ptr)[i] / 255.0f)
118 #define FROM_16_UNORM(i) ((float) ((unsigned short *) ptr)[i] / 65535.0f)
119 #define FROM_32_UNORM(i) ((float) ((unsigned int *) ptr)[i] / 4294967295.0f)
120
121 #define FROM_8_SNORM(i) ((float) ((char *) ptr)[i] / 127.0f)
122 #define FROM_16_SNORM(i) ((float) ((short *) ptr)[i] / 32767.0f)
123 #define FROM_32_SNORM(i) ((float) ((int *) ptr)[i] / 2147483647.0f)
124
125 #define FROM_32_FIXED(i) (((int *) ptr)[i] / 65536.0f)
126
127 #define TO_64_FLOAT(x) ((double) x)
128 #define TO_32_FLOAT(x) (x)
129
130 #define TO_8_USCALED(x) ((unsigned char) x)
131 #define TO_16_USCALED(x) ((unsigned short) x)
132 #define TO_32_USCALED(x) ((unsigned int) x)
133
134 #define TO_8_SSCALED(x) ((char) x)
135 #define TO_16_SSCALED(x) ((short) x)
136 #define TO_32_SSCALED(x) ((int) x)
137
138 #define TO_8_UNORM(x) ((unsigned char) (x * 255.0f))
139 #define TO_16_UNORM(x) ((unsigned short) (x * 65535.0f))
140 #define TO_32_UNORM(x) ((unsigned int) (x * 4294967295.0f))
141
142 #define TO_8_SNORM(x) ((char) (x * 127.0f))
143 #define TO_16_SNORM(x) ((short) (x * 32767.0f))
144 #define TO_32_SNORM(x) ((int) (x * 2147483647.0f))
145
146 #define TO_32_FIXED(x) ((int) (x * 65536.0f))
147
148
149
150 ATTRIB( R64G64B64A64_FLOAT, 4, double, FROM_64_FLOAT, TO_64_FLOAT )
151 ATTRIB( R64G64B64_FLOAT, 3, double, FROM_64_FLOAT, TO_64_FLOAT )
152 ATTRIB( R64G64_FLOAT, 2, double, FROM_64_FLOAT, TO_64_FLOAT )
153 ATTRIB( R64_FLOAT, 1, double, FROM_64_FLOAT, TO_64_FLOAT )
154
155 ATTRIB( R32G32B32A32_FLOAT, 4, float, FROM_32_FLOAT, TO_32_FLOAT )
156 ATTRIB( R32G32B32_FLOAT, 3, float, FROM_32_FLOAT, TO_32_FLOAT )
157 ATTRIB( R32G32_FLOAT, 2, float, FROM_32_FLOAT, TO_32_FLOAT )
158 ATTRIB( R32_FLOAT, 1, float, FROM_32_FLOAT, TO_32_FLOAT )
159
160 ATTRIB( R32G32B32A32_USCALED, 4, unsigned, FROM_32_USCALED, TO_32_USCALED )
161 ATTRIB( R32G32B32_USCALED, 3, unsigned, FROM_32_USCALED, TO_32_USCALED )
162 ATTRIB( R32G32_USCALED, 2, unsigned, FROM_32_USCALED, TO_32_USCALED )
163 ATTRIB( R32_USCALED, 1, unsigned, FROM_32_USCALED, TO_32_USCALED )
164
165 ATTRIB( R32G32B32A32_SSCALED, 4, int, FROM_32_SSCALED, TO_32_SSCALED )
166 ATTRIB( R32G32B32_SSCALED, 3, int, FROM_32_SSCALED, TO_32_SSCALED )
167 ATTRIB( R32G32_SSCALED, 2, int, FROM_32_SSCALED, TO_32_SSCALED )
168 ATTRIB( R32_SSCALED, 1, int, FROM_32_SSCALED, TO_32_SSCALED )
169
170 ATTRIB( R32G32B32A32_UNORM, 4, unsigned, FROM_32_UNORM, TO_32_UNORM )
171 ATTRIB( R32G32B32_UNORM, 3, unsigned, FROM_32_UNORM, TO_32_UNORM )
172 ATTRIB( R32G32_UNORM, 2, unsigned, FROM_32_UNORM, TO_32_UNORM )
173 ATTRIB( R32_UNORM, 1, unsigned, FROM_32_UNORM, TO_32_UNORM )
174
175 ATTRIB( R32G32B32A32_SNORM, 4, int, FROM_32_SNORM, TO_32_SNORM )
176 ATTRIB( R32G32B32_SNORM, 3, int, FROM_32_SNORM, TO_32_SNORM )
177 ATTRIB( R32G32_SNORM, 2, int, FROM_32_SNORM, TO_32_SNORM )
178 ATTRIB( R32_SNORM, 1, int, FROM_32_SNORM, TO_32_SNORM )
179
180 ATTRIB( R16G16B16A16_USCALED, 4, ushort, FROM_16_USCALED, TO_16_USCALED )
181 ATTRIB( R16G16B16_USCALED, 3, ushort, FROM_16_USCALED, TO_16_USCALED )
182 ATTRIB( R16G16_USCALED, 2, ushort, FROM_16_USCALED, TO_16_USCALED )
183 ATTRIB( R16_USCALED, 1, ushort, FROM_16_USCALED, TO_16_USCALED )
184
185 ATTRIB( R16G16B16A16_SSCALED, 4, short, FROM_16_SSCALED, TO_16_SSCALED )
186 ATTRIB( R16G16B16_SSCALED, 3, short, FROM_16_SSCALED, TO_16_SSCALED )
187 ATTRIB( R16G16_SSCALED, 2, short, FROM_16_SSCALED, TO_16_SSCALED )
188 ATTRIB( R16_SSCALED, 1, short, FROM_16_SSCALED, TO_16_SSCALED )
189
190 ATTRIB( R16G16B16A16_UNORM, 4, ushort, FROM_16_UNORM, TO_16_UNORM )
191 ATTRIB( R16G16B16_UNORM, 3, ushort, FROM_16_UNORM, TO_16_UNORM )
192 ATTRIB( R16G16_UNORM, 2, ushort, FROM_16_UNORM, TO_16_UNORM )
193 ATTRIB( R16_UNORM, 1, ushort, FROM_16_UNORM, TO_16_UNORM )
194
195 ATTRIB( R16G16B16A16_SNORM, 4, short, FROM_16_SNORM, TO_16_SNORM )
196 ATTRIB( R16G16B16_SNORM, 3, short, FROM_16_SNORM, TO_16_SNORM )
197 ATTRIB( R16G16_SNORM, 2, short, FROM_16_SNORM, TO_16_SNORM )
198 ATTRIB( R16_SNORM, 1, short, FROM_16_SNORM, TO_16_SNORM )
199
200 ATTRIB( R8G8B8A8_USCALED, 4, ubyte, FROM_8_USCALED, TO_8_USCALED )
201 ATTRIB( R8G8B8_USCALED, 3, ubyte, FROM_8_USCALED, TO_8_USCALED )
202 ATTRIB( R8G8_USCALED, 2, ubyte, FROM_8_USCALED, TO_8_USCALED )
203 ATTRIB( R8_USCALED, 1, ubyte, FROM_8_USCALED, TO_8_USCALED )
204
205 ATTRIB( R8G8B8A8_SSCALED, 4, char, FROM_8_SSCALED, TO_8_SSCALED )
206 ATTRIB( R8G8B8_SSCALED, 3, char, FROM_8_SSCALED, TO_8_SSCALED )
207 ATTRIB( R8G8_SSCALED, 2, char, FROM_8_SSCALED, TO_8_SSCALED )
208 ATTRIB( R8_SSCALED, 1, char, FROM_8_SSCALED, TO_8_SSCALED )
209
210 ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
211 ATTRIB( R8G8B8_UNORM, 3, ubyte, FROM_8_UNORM, TO_8_UNORM )
212 ATTRIB( R8G8_UNORM, 2, ubyte, FROM_8_UNORM, TO_8_UNORM )
213 ATTRIB( R8_UNORM, 1, ubyte, FROM_8_UNORM, TO_8_UNORM )
214
215 ATTRIB( R8G8B8A8_SNORM, 4, char, FROM_8_SNORM, TO_8_SNORM )
216 ATTRIB( R8G8B8_SNORM, 3, char, FROM_8_SNORM, TO_8_SNORM )
217 ATTRIB( R8G8_SNORM, 2, char, FROM_8_SNORM, TO_8_SNORM )
218 ATTRIB( R8_SNORM, 1, char, FROM_8_SNORM, TO_8_SNORM )
219
220 ATTRIB( A8R8G8B8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
221 /*ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )*/
222
223 ATTRIB( R32G32B32A32_FIXED, 4, int, FROM_32_FIXED, TO_32_FIXED )
224 ATTRIB( R32G32B32_FIXED, 3, int, FROM_32_FIXED, TO_32_FIXED )
225 ATTRIB( R32G32_FIXED, 2, int, FROM_32_FIXED, TO_32_FIXED )
226 ATTRIB( R32_FIXED, 1, int, FROM_32_FIXED, TO_32_FIXED )
227
228
229
230 static void
231 fetch_B8G8R8A8_UNORM(const void *ptr, float *attrib)
232 {
233 attrib[2] = FROM_8_UNORM(0);
234 attrib[1] = FROM_8_UNORM(1);
235 attrib[0] = FROM_8_UNORM(2);
236 attrib[3] = FROM_8_UNORM(3);
237 }
238
239 static void
240 emit_B8G8R8A8_UNORM( const float *attrib, void *ptr)
241 {
242 ubyte *out = (ubyte *)ptr;
243 out[2] = TO_8_UNORM(attrib[0]);
244 out[1] = TO_8_UNORM(attrib[1]);
245 out[0] = TO_8_UNORM(attrib[2]);
246 out[3] = TO_8_UNORM(attrib[3]);
247 }
248
249 static void
250 fetch_NULL( const void *ptr, float *attrib )
251 {
252 attrib[0] = 0;
253 attrib[1] = 0;
254 attrib[2] = 0;
255 attrib[3] = 1;
256 }
257
258 static void
259 emit_NULL( const float *attrib, void *ptr )
260 {
261 /* do nothing is the only sensible option */
262 }
263
264 static fetch_func get_fetch_func( enum pipe_format format )
265 {
266 switch (format) {
267 case PIPE_FORMAT_R64_FLOAT:
268 return &fetch_R64_FLOAT;
269 case PIPE_FORMAT_R64G64_FLOAT:
270 return &fetch_R64G64_FLOAT;
271 case PIPE_FORMAT_R64G64B64_FLOAT:
272 return &fetch_R64G64B64_FLOAT;
273 case PIPE_FORMAT_R64G64B64A64_FLOAT:
274 return &fetch_R64G64B64A64_FLOAT;
275
276 case PIPE_FORMAT_R32_FLOAT:
277 return &fetch_R32_FLOAT;
278 case PIPE_FORMAT_R32G32_FLOAT:
279 return &fetch_R32G32_FLOAT;
280 case PIPE_FORMAT_R32G32B32_FLOAT:
281 return &fetch_R32G32B32_FLOAT;
282 case PIPE_FORMAT_R32G32B32A32_FLOAT:
283 return &fetch_R32G32B32A32_FLOAT;
284
285 case PIPE_FORMAT_R32_UNORM:
286 return &fetch_R32_UNORM;
287 case PIPE_FORMAT_R32G32_UNORM:
288 return &fetch_R32G32_UNORM;
289 case PIPE_FORMAT_R32G32B32_UNORM:
290 return &fetch_R32G32B32_UNORM;
291 case PIPE_FORMAT_R32G32B32A32_UNORM:
292 return &fetch_R32G32B32A32_UNORM;
293
294 case PIPE_FORMAT_R32_USCALED:
295 return &fetch_R32_USCALED;
296 case PIPE_FORMAT_R32G32_USCALED:
297 return &fetch_R32G32_USCALED;
298 case PIPE_FORMAT_R32G32B32_USCALED:
299 return &fetch_R32G32B32_USCALED;
300 case PIPE_FORMAT_R32G32B32A32_USCALED:
301 return &fetch_R32G32B32A32_USCALED;
302
303 case PIPE_FORMAT_R32_SNORM:
304 return &fetch_R32_SNORM;
305 case PIPE_FORMAT_R32G32_SNORM:
306 return &fetch_R32G32_SNORM;
307 case PIPE_FORMAT_R32G32B32_SNORM:
308 return &fetch_R32G32B32_SNORM;
309 case PIPE_FORMAT_R32G32B32A32_SNORM:
310 return &fetch_R32G32B32A32_SNORM;
311
312 case PIPE_FORMAT_R32_SSCALED:
313 return &fetch_R32_SSCALED;
314 case PIPE_FORMAT_R32G32_SSCALED:
315 return &fetch_R32G32_SSCALED;
316 case PIPE_FORMAT_R32G32B32_SSCALED:
317 return &fetch_R32G32B32_SSCALED;
318 case PIPE_FORMAT_R32G32B32A32_SSCALED:
319 return &fetch_R32G32B32A32_SSCALED;
320
321 case PIPE_FORMAT_R16_UNORM:
322 return &fetch_R16_UNORM;
323 case PIPE_FORMAT_R16G16_UNORM:
324 return &fetch_R16G16_UNORM;
325 case PIPE_FORMAT_R16G16B16_UNORM:
326 return &fetch_R16G16B16_UNORM;
327 case PIPE_FORMAT_R16G16B16A16_UNORM:
328 return &fetch_R16G16B16A16_UNORM;
329
330 case PIPE_FORMAT_R16_USCALED:
331 return &fetch_R16_USCALED;
332 case PIPE_FORMAT_R16G16_USCALED:
333 return &fetch_R16G16_USCALED;
334 case PIPE_FORMAT_R16G16B16_USCALED:
335 return &fetch_R16G16B16_USCALED;
336 case PIPE_FORMAT_R16G16B16A16_USCALED:
337 return &fetch_R16G16B16A16_USCALED;
338
339 case PIPE_FORMAT_R16_SNORM:
340 return &fetch_R16_SNORM;
341 case PIPE_FORMAT_R16G16_SNORM:
342 return &fetch_R16G16_SNORM;
343 case PIPE_FORMAT_R16G16B16_SNORM:
344 return &fetch_R16G16B16_SNORM;
345 case PIPE_FORMAT_R16G16B16A16_SNORM:
346 return &fetch_R16G16B16A16_SNORM;
347
348 case PIPE_FORMAT_R16_SSCALED:
349 return &fetch_R16_SSCALED;
350 case PIPE_FORMAT_R16G16_SSCALED:
351 return &fetch_R16G16_SSCALED;
352 case PIPE_FORMAT_R16G16B16_SSCALED:
353 return &fetch_R16G16B16_SSCALED;
354 case PIPE_FORMAT_R16G16B16A16_SSCALED:
355 return &fetch_R16G16B16A16_SSCALED;
356
357 case PIPE_FORMAT_R8_UNORM:
358 return &fetch_R8_UNORM;
359 case PIPE_FORMAT_R8G8_UNORM:
360 return &fetch_R8G8_UNORM;
361 case PIPE_FORMAT_R8G8B8_UNORM:
362 return &fetch_R8G8B8_UNORM;
363 case PIPE_FORMAT_R8G8B8A8_UNORM:
364 return &fetch_R8G8B8A8_UNORM;
365
366 case PIPE_FORMAT_R8_USCALED:
367 return &fetch_R8_USCALED;
368 case PIPE_FORMAT_R8G8_USCALED:
369 return &fetch_R8G8_USCALED;
370 case PIPE_FORMAT_R8G8B8_USCALED:
371 return &fetch_R8G8B8_USCALED;
372 case PIPE_FORMAT_R8G8B8A8_USCALED:
373 return &fetch_R8G8B8A8_USCALED;
374
375 case PIPE_FORMAT_R8_SNORM:
376 return &fetch_R8_SNORM;
377 case PIPE_FORMAT_R8G8_SNORM:
378 return &fetch_R8G8_SNORM;
379 case PIPE_FORMAT_R8G8B8_SNORM:
380 return &fetch_R8G8B8_SNORM;
381 case PIPE_FORMAT_R8G8B8A8_SNORM:
382 return &fetch_R8G8B8A8_SNORM;
383
384 case PIPE_FORMAT_R8_SSCALED:
385 return &fetch_R8_SSCALED;
386 case PIPE_FORMAT_R8G8_SSCALED:
387 return &fetch_R8G8_SSCALED;
388 case PIPE_FORMAT_R8G8B8_SSCALED:
389 return &fetch_R8G8B8_SSCALED;
390 case PIPE_FORMAT_R8G8B8A8_SSCALED:
391 return &fetch_R8G8B8A8_SSCALED;
392
393 case PIPE_FORMAT_A8R8G8B8_UNORM:
394 return &fetch_A8R8G8B8_UNORM;
395
396 case PIPE_FORMAT_B8G8R8A8_UNORM:
397 return &fetch_B8G8R8A8_UNORM;
398
399 case PIPE_FORMAT_R32_FIXED:
400 return &fetch_R32_FIXED;
401 case PIPE_FORMAT_R32G32_FIXED:
402 return &fetch_R32G32_FIXED;
403 case PIPE_FORMAT_R32G32B32_FIXED:
404 return &fetch_R32G32B32_FIXED;
405 case PIPE_FORMAT_R32G32B32A32_FIXED:
406 return &fetch_R32G32B32A32_FIXED;
407
408 default:
409 assert(0);
410 return &fetch_NULL;
411 }
412 }
413
414
415
416
417 static emit_func get_emit_func( enum pipe_format format )
418 {
419 /* silence warnings */
420 (void) emit_R32G32B32A32_FIXED;
421 (void) emit_R32G32B32_FIXED;
422 (void) emit_R32G32_FIXED;
423 (void) emit_R32_FIXED;
424
425 switch (format) {
426 case PIPE_FORMAT_R64_FLOAT:
427 return &emit_R64_FLOAT;
428 case PIPE_FORMAT_R64G64_FLOAT:
429 return &emit_R64G64_FLOAT;
430 case PIPE_FORMAT_R64G64B64_FLOAT:
431 return &emit_R64G64B64_FLOAT;
432 case PIPE_FORMAT_R64G64B64A64_FLOAT:
433 return &emit_R64G64B64A64_FLOAT;
434
435 case PIPE_FORMAT_R32_FLOAT:
436 return &emit_R32_FLOAT;
437 case PIPE_FORMAT_R32G32_FLOAT:
438 return &emit_R32G32_FLOAT;
439 case PIPE_FORMAT_R32G32B32_FLOAT:
440 return &emit_R32G32B32_FLOAT;
441 case PIPE_FORMAT_R32G32B32A32_FLOAT:
442 return &emit_R32G32B32A32_FLOAT;
443
444 case PIPE_FORMAT_R32_UNORM:
445 return &emit_R32_UNORM;
446 case PIPE_FORMAT_R32G32_UNORM:
447 return &emit_R32G32_UNORM;
448 case PIPE_FORMAT_R32G32B32_UNORM:
449 return &emit_R32G32B32_UNORM;
450 case PIPE_FORMAT_R32G32B32A32_UNORM:
451 return &emit_R32G32B32A32_UNORM;
452
453 case PIPE_FORMAT_R32_USCALED:
454 return &emit_R32_USCALED;
455 case PIPE_FORMAT_R32G32_USCALED:
456 return &emit_R32G32_USCALED;
457 case PIPE_FORMAT_R32G32B32_USCALED:
458 return &emit_R32G32B32_USCALED;
459 case PIPE_FORMAT_R32G32B32A32_USCALED:
460 return &emit_R32G32B32A32_USCALED;
461
462 case PIPE_FORMAT_R32_SNORM:
463 return &emit_R32_SNORM;
464 case PIPE_FORMAT_R32G32_SNORM:
465 return &emit_R32G32_SNORM;
466 case PIPE_FORMAT_R32G32B32_SNORM:
467 return &emit_R32G32B32_SNORM;
468 case PIPE_FORMAT_R32G32B32A32_SNORM:
469 return &emit_R32G32B32A32_SNORM;
470
471 case PIPE_FORMAT_R32_SSCALED:
472 return &emit_R32_SSCALED;
473 case PIPE_FORMAT_R32G32_SSCALED:
474 return &emit_R32G32_SSCALED;
475 case PIPE_FORMAT_R32G32B32_SSCALED:
476 return &emit_R32G32B32_SSCALED;
477 case PIPE_FORMAT_R32G32B32A32_SSCALED:
478 return &emit_R32G32B32A32_SSCALED;
479
480 case PIPE_FORMAT_R16_UNORM:
481 return &emit_R16_UNORM;
482 case PIPE_FORMAT_R16G16_UNORM:
483 return &emit_R16G16_UNORM;
484 case PIPE_FORMAT_R16G16B16_UNORM:
485 return &emit_R16G16B16_UNORM;
486 case PIPE_FORMAT_R16G16B16A16_UNORM:
487 return &emit_R16G16B16A16_UNORM;
488
489 case PIPE_FORMAT_R16_USCALED:
490 return &emit_R16_USCALED;
491 case PIPE_FORMAT_R16G16_USCALED:
492 return &emit_R16G16_USCALED;
493 case PIPE_FORMAT_R16G16B16_USCALED:
494 return &emit_R16G16B16_USCALED;
495 case PIPE_FORMAT_R16G16B16A16_USCALED:
496 return &emit_R16G16B16A16_USCALED;
497
498 case PIPE_FORMAT_R16_SNORM:
499 return &emit_R16_SNORM;
500 case PIPE_FORMAT_R16G16_SNORM:
501 return &emit_R16G16_SNORM;
502 case PIPE_FORMAT_R16G16B16_SNORM:
503 return &emit_R16G16B16_SNORM;
504 case PIPE_FORMAT_R16G16B16A16_SNORM:
505 return &emit_R16G16B16A16_SNORM;
506
507 case PIPE_FORMAT_R16_SSCALED:
508 return &emit_R16_SSCALED;
509 case PIPE_FORMAT_R16G16_SSCALED:
510 return &emit_R16G16_SSCALED;
511 case PIPE_FORMAT_R16G16B16_SSCALED:
512 return &emit_R16G16B16_SSCALED;
513 case PIPE_FORMAT_R16G16B16A16_SSCALED:
514 return &emit_R16G16B16A16_SSCALED;
515
516 case PIPE_FORMAT_R8_UNORM:
517 return &emit_R8_UNORM;
518 case PIPE_FORMAT_R8G8_UNORM:
519 return &emit_R8G8_UNORM;
520 case PIPE_FORMAT_R8G8B8_UNORM:
521 return &emit_R8G8B8_UNORM;
522 case PIPE_FORMAT_R8G8B8A8_UNORM:
523 return &emit_R8G8B8A8_UNORM;
524
525 case PIPE_FORMAT_R8_USCALED:
526 return &emit_R8_USCALED;
527 case PIPE_FORMAT_R8G8_USCALED:
528 return &emit_R8G8_USCALED;
529 case PIPE_FORMAT_R8G8B8_USCALED:
530 return &emit_R8G8B8_USCALED;
531 case PIPE_FORMAT_R8G8B8A8_USCALED:
532 return &emit_R8G8B8A8_USCALED;
533
534 case PIPE_FORMAT_R8_SNORM:
535 return &emit_R8_SNORM;
536 case PIPE_FORMAT_R8G8_SNORM:
537 return &emit_R8G8_SNORM;
538 case PIPE_FORMAT_R8G8B8_SNORM:
539 return &emit_R8G8B8_SNORM;
540 case PIPE_FORMAT_R8G8B8A8_SNORM:
541 return &emit_R8G8B8A8_SNORM;
542
543 case PIPE_FORMAT_R8_SSCALED:
544 return &emit_R8_SSCALED;
545 case PIPE_FORMAT_R8G8_SSCALED:
546 return &emit_R8G8_SSCALED;
547 case PIPE_FORMAT_R8G8B8_SSCALED:
548 return &emit_R8G8B8_SSCALED;
549 case PIPE_FORMAT_R8G8B8A8_SSCALED:
550 return &emit_R8G8B8A8_SSCALED;
551
552 case PIPE_FORMAT_A8R8G8B8_UNORM:
553 return &emit_A8R8G8B8_UNORM;
554
555 case PIPE_FORMAT_B8G8R8A8_UNORM:
556 return &emit_B8G8R8A8_UNORM;
557
558 default:
559 assert(0);
560 return &emit_NULL;
561 }
562 }
563
564
565
566 /**
567 * Fetch vertex attributes for 'count' vertices.
568 */
569 static void PIPE_CDECL generic_run_elts( struct translate *translate,
570 const unsigned *elts,
571 unsigned count,
572 unsigned instance_id,
573 void *output_buffer )
574 {
575 struct translate_generic *tg = translate_generic(translate);
576 char *vert = output_buffer;
577 unsigned nr_attrs = tg->nr_attrib;
578 unsigned attr;
579 unsigned i;
580
581 /* loop over vertex attributes (vertex shader inputs)
582 */
583 for (i = 0; i < count; i++) {
584 unsigned elt = *elts++;
585
586 for (attr = 0; attr < nr_attrs; attr++) {
587 float data[4];
588 const char *src;
589
590 char *dst = (vert +
591 tg->attrib[attr].output_offset);
592
593 if (tg->attrib[attr].instance_divisor) {
594 src = tg->attrib[attr].input_ptr +
595 tg->attrib[attr].input_stride *
596 (instance_id / tg->attrib[attr].instance_divisor);
597 } else {
598 src = tg->attrib[attr].input_ptr +
599 tg->attrib[attr].input_stride * elt;
600 }
601
602 tg->attrib[attr].fetch( src, data );
603
604 if (0) debug_printf("vert %d/%d attr %d: %f %f %f %f\n",
605 i, elt, attr, data[0], data[1], data[2], data[3]);
606
607 tg->attrib[attr].emit( data, dst );
608 }
609
610 vert += tg->translate.key.output_stride;
611 }
612 }
613
614
615
616 static void PIPE_CDECL generic_run( struct translate *translate,
617 unsigned start,
618 unsigned count,
619 unsigned instance_id,
620 void *output_buffer )
621 {
622 struct translate_generic *tg = translate_generic(translate);
623 char *vert = output_buffer;
624 unsigned nr_attrs = tg->nr_attrib;
625 unsigned attr;
626 unsigned i;
627
628 /* loop over vertex attributes (vertex shader inputs)
629 */
630 for (i = 0; i < count; i++) {
631 unsigned elt = start + i;
632
633 for (attr = 0; attr < nr_attrs; attr++) {
634 float data[4];
635 const char *src;
636
637 char *dst = (vert +
638 tg->attrib[attr].output_offset);
639
640 if (tg->attrib[attr].instance_divisor) {
641 src = tg->attrib[attr].input_ptr +
642 tg->attrib[attr].input_stride *
643 (instance_id / tg->attrib[attr].instance_divisor);
644 } else {
645 src = tg->attrib[attr].input_ptr +
646 tg->attrib[attr].input_stride * elt;
647 }
648
649 tg->attrib[attr].fetch( src, data );
650
651 if (0) debug_printf("vert %d attr %d: %f %f %f %f\n",
652 i, attr, data[0], data[1], data[2], data[3]);
653
654 tg->attrib[attr].emit( data, dst );
655 }
656
657 vert += tg->translate.key.output_stride;
658 }
659 }
660
661
662
663 static void generic_set_buffer( struct translate *translate,
664 unsigned buf,
665 const void *ptr,
666 unsigned stride )
667 {
668 struct translate_generic *tg = translate_generic(translate);
669 unsigned i;
670
671 for (i = 0; i < tg->nr_attrib; i++) {
672 if (tg->attrib[i].buffer == buf) {
673 tg->attrib[i].input_ptr = ((char *)ptr +
674 tg->attrib[i].input_offset);
675 tg->attrib[i].input_stride = stride;
676 }
677 }
678 }
679
680
681 static void generic_release( struct translate *translate )
682 {
683 /* Refcount?
684 */
685 FREE(translate);
686 }
687
688 struct translate *translate_generic_create( const struct translate_key *key )
689 {
690 struct translate_generic *tg = CALLOC_STRUCT(translate_generic);
691 unsigned i;
692
693 if (tg == NULL)
694 return NULL;
695
696 tg->translate.key = *key;
697 tg->translate.release = generic_release;
698 tg->translate.set_buffer = generic_set_buffer;
699 tg->translate.run_elts = generic_run_elts;
700 tg->translate.run = generic_run;
701
702 for (i = 0; i < key->nr_elements; i++) {
703
704 tg->attrib[i].fetch = get_fetch_func(key->element[i].input_format);
705 tg->attrib[i].buffer = key->element[i].input_buffer;
706 tg->attrib[i].input_offset = key->element[i].input_offset;
707 tg->attrib[i].instance_divisor = key->element[i].instance_divisor;
708
709 tg->attrib[i].emit = get_emit_func(key->element[i].output_format);
710 tg->attrib[i].output_offset = key->element[i].output_offset;
711
712 }
713
714 tg->nr_attrib = key->nr_elements;
715
716
717 return &tg->translate;
718 }