Merge branch '7.8'
[mesa.git] / src / gallium / auxiliary / translate / translate_generic.c
1 /**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 /*
29 * Authors:
30 * Keith Whitwell <keith@tungstengraphics.com>
31 */
32
33 #include "util/u_memory.h"
34 #include "util/u_math.h"
35 #include "pipe/p_state.h"
36 #include "translate.h"
37
38
39 #define DRAW_DBG 0
40
41 typedef void (*fetch_func)(const void *ptr, float *attrib);
42 typedef void (*emit_func)(const float *attrib, void *ptr);
43
44
45
46 struct translate_generic {
47 struct translate translate;
48
49 struct {
50 enum translate_element_type type;
51
52 fetch_func fetch;
53 unsigned buffer;
54 unsigned input_offset;
55 unsigned instance_divisor;
56
57 emit_func emit;
58 unsigned output_offset;
59
60 char *input_ptr;
61 unsigned input_stride;
62 unsigned max_index;
63
64 } attrib[PIPE_MAX_ATTRIBS];
65
66 unsigned nr_attrib;
67 };
68
69
70 static struct translate_generic *translate_generic( struct translate *translate )
71 {
72 return (struct translate_generic *)translate;
73 }
74
75 /**
76 * Fetch a float[4] vertex attribute from memory, doing format/type
77 * conversion as needed.
78 *
79 * This is probably needed/dupliocated elsewhere, eg format
80 * conversion, texture sampling etc.
81 */
82 #define ATTRIB( NAME, SZ, TYPE, FROM, TO ) \
83 static void \
84 fetch_##NAME(const void *ptr, float *attrib) \
85 { \
86 const float defaults[4] = { 0.0f,0.0f,0.0f,1.0f }; \
87 unsigned i; \
88 \
89 for (i = 0; i < SZ; i++) { \
90 attrib[i] = FROM(i); \
91 } \
92 \
93 for (; i < 4; i++) { \
94 attrib[i] = defaults[i]; \
95 } \
96 } \
97 \
98 static void \
99 emit_##NAME(const float *attrib, void *ptr) \
100 { \
101 unsigned i; \
102 TYPE *out = (TYPE *)ptr; \
103 \
104 for (i = 0; i < SZ; i++) { \
105 out[i] = TO(attrib[i]); \
106 } \
107 }
108
109
110 #define FROM_64_FLOAT(i) ((float) ((double *) ptr)[i])
111 #define FROM_32_FLOAT(i) (((float *) ptr)[i])
112
113 #define FROM_8_USCALED(i) ((float) ((unsigned char *) ptr)[i])
114 #define FROM_16_USCALED(i) ((float) ((unsigned short *) ptr)[i])
115 #define FROM_32_USCALED(i) ((float) ((unsigned int *) ptr)[i])
116
117 #define FROM_8_SSCALED(i) ((float) ((char *) ptr)[i])
118 #define FROM_16_SSCALED(i) ((float) ((short *) ptr)[i])
119 #define FROM_32_SSCALED(i) ((float) ((int *) ptr)[i])
120
121 #define FROM_8_UNORM(i) ((float) ((unsigned char *) ptr)[i] / 255.0f)
122 #define FROM_16_UNORM(i) ((float) ((unsigned short *) ptr)[i] / 65535.0f)
123 #define FROM_32_UNORM(i) ((float) ((unsigned int *) ptr)[i] / 4294967295.0f)
124
125 #define FROM_8_SNORM(i) ((float) ((char *) ptr)[i] / 127.0f)
126 #define FROM_16_SNORM(i) ((float) ((short *) ptr)[i] / 32767.0f)
127 #define FROM_32_SNORM(i) ((float) ((int *) ptr)[i] / 2147483647.0f)
128
129 #define FROM_32_FIXED(i) (((int *) ptr)[i] / 65536.0f)
130
131 #define TO_64_FLOAT(x) ((double) x)
132 #define TO_32_FLOAT(x) (x)
133
134 #define TO_8_USCALED(x) ((unsigned char) x)
135 #define TO_16_USCALED(x) ((unsigned short) x)
136 #define TO_32_USCALED(x) ((unsigned int) x)
137
138 #define TO_8_SSCALED(x) ((char) x)
139 #define TO_16_SSCALED(x) ((short) x)
140 #define TO_32_SSCALED(x) ((int) x)
141
142 #define TO_8_UNORM(x) ((unsigned char) (x * 255.0f))
143 #define TO_16_UNORM(x) ((unsigned short) (x * 65535.0f))
144 #define TO_32_UNORM(x) ((unsigned int) (x * 4294967295.0f))
145
146 #define TO_8_SNORM(x) ((char) (x * 127.0f))
147 #define TO_16_SNORM(x) ((short) (x * 32767.0f))
148 #define TO_32_SNORM(x) ((int) (x * 2147483647.0f))
149
150 #define TO_32_FIXED(x) ((int) (x * 65536.0f))
151
152
153
154 ATTRIB( R64G64B64A64_FLOAT, 4, double, FROM_64_FLOAT, TO_64_FLOAT )
155 ATTRIB( R64G64B64_FLOAT, 3, double, FROM_64_FLOAT, TO_64_FLOAT )
156 ATTRIB( R64G64_FLOAT, 2, double, FROM_64_FLOAT, TO_64_FLOAT )
157 ATTRIB( R64_FLOAT, 1, double, FROM_64_FLOAT, TO_64_FLOAT )
158
159 ATTRIB( R32G32B32A32_FLOAT, 4, float, FROM_32_FLOAT, TO_32_FLOAT )
160 ATTRIB( R32G32B32_FLOAT, 3, float, FROM_32_FLOAT, TO_32_FLOAT )
161 ATTRIB( R32G32_FLOAT, 2, float, FROM_32_FLOAT, TO_32_FLOAT )
162 ATTRIB( R32_FLOAT, 1, float, FROM_32_FLOAT, TO_32_FLOAT )
163
164 ATTRIB( R32G32B32A32_USCALED, 4, unsigned, FROM_32_USCALED, TO_32_USCALED )
165 ATTRIB( R32G32B32_USCALED, 3, unsigned, FROM_32_USCALED, TO_32_USCALED )
166 ATTRIB( R32G32_USCALED, 2, unsigned, FROM_32_USCALED, TO_32_USCALED )
167 ATTRIB( R32_USCALED, 1, unsigned, FROM_32_USCALED, TO_32_USCALED )
168
169 ATTRIB( R32G32B32A32_SSCALED, 4, int, FROM_32_SSCALED, TO_32_SSCALED )
170 ATTRIB( R32G32B32_SSCALED, 3, int, FROM_32_SSCALED, TO_32_SSCALED )
171 ATTRIB( R32G32_SSCALED, 2, int, FROM_32_SSCALED, TO_32_SSCALED )
172 ATTRIB( R32_SSCALED, 1, int, FROM_32_SSCALED, TO_32_SSCALED )
173
174 ATTRIB( R32G32B32A32_UNORM, 4, unsigned, FROM_32_UNORM, TO_32_UNORM )
175 ATTRIB( R32G32B32_UNORM, 3, unsigned, FROM_32_UNORM, TO_32_UNORM )
176 ATTRIB( R32G32_UNORM, 2, unsigned, FROM_32_UNORM, TO_32_UNORM )
177 ATTRIB( R32_UNORM, 1, unsigned, FROM_32_UNORM, TO_32_UNORM )
178
179 ATTRIB( R32G32B32A32_SNORM, 4, int, FROM_32_SNORM, TO_32_SNORM )
180 ATTRIB( R32G32B32_SNORM, 3, int, FROM_32_SNORM, TO_32_SNORM )
181 ATTRIB( R32G32_SNORM, 2, int, FROM_32_SNORM, TO_32_SNORM )
182 ATTRIB( R32_SNORM, 1, int, FROM_32_SNORM, TO_32_SNORM )
183
184 ATTRIB( R16G16B16A16_USCALED, 4, ushort, FROM_16_USCALED, TO_16_USCALED )
185 ATTRIB( R16G16B16_USCALED, 3, ushort, FROM_16_USCALED, TO_16_USCALED )
186 ATTRIB( R16G16_USCALED, 2, ushort, FROM_16_USCALED, TO_16_USCALED )
187 ATTRIB( R16_USCALED, 1, ushort, FROM_16_USCALED, TO_16_USCALED )
188
189 ATTRIB( R16G16B16A16_SSCALED, 4, short, FROM_16_SSCALED, TO_16_SSCALED )
190 ATTRIB( R16G16B16_SSCALED, 3, short, FROM_16_SSCALED, TO_16_SSCALED )
191 ATTRIB( R16G16_SSCALED, 2, short, FROM_16_SSCALED, TO_16_SSCALED )
192 ATTRIB( R16_SSCALED, 1, short, FROM_16_SSCALED, TO_16_SSCALED )
193
194 ATTRIB( R16G16B16A16_UNORM, 4, ushort, FROM_16_UNORM, TO_16_UNORM )
195 ATTRIB( R16G16B16_UNORM, 3, ushort, FROM_16_UNORM, TO_16_UNORM )
196 ATTRIB( R16G16_UNORM, 2, ushort, FROM_16_UNORM, TO_16_UNORM )
197 ATTRIB( R16_UNORM, 1, ushort, FROM_16_UNORM, TO_16_UNORM )
198
199 ATTRIB( R16G16B16A16_SNORM, 4, short, FROM_16_SNORM, TO_16_SNORM )
200 ATTRIB( R16G16B16_SNORM, 3, short, FROM_16_SNORM, TO_16_SNORM )
201 ATTRIB( R16G16_SNORM, 2, short, FROM_16_SNORM, TO_16_SNORM )
202 ATTRIB( R16_SNORM, 1, short, FROM_16_SNORM, TO_16_SNORM )
203
204 ATTRIB( R8G8B8A8_USCALED, 4, ubyte, FROM_8_USCALED, TO_8_USCALED )
205 ATTRIB( R8G8B8_USCALED, 3, ubyte, FROM_8_USCALED, TO_8_USCALED )
206 ATTRIB( R8G8_USCALED, 2, ubyte, FROM_8_USCALED, TO_8_USCALED )
207 ATTRIB( R8_USCALED, 1, ubyte, FROM_8_USCALED, TO_8_USCALED )
208
209 ATTRIB( R8G8B8A8_SSCALED, 4, char, FROM_8_SSCALED, TO_8_SSCALED )
210 ATTRIB( R8G8B8_SSCALED, 3, char, FROM_8_SSCALED, TO_8_SSCALED )
211 ATTRIB( R8G8_SSCALED, 2, char, FROM_8_SSCALED, TO_8_SSCALED )
212 ATTRIB( R8_SSCALED, 1, char, FROM_8_SSCALED, TO_8_SSCALED )
213
214 ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
215 ATTRIB( R8G8B8_UNORM, 3, ubyte, FROM_8_UNORM, TO_8_UNORM )
216 ATTRIB( R8G8_UNORM, 2, ubyte, FROM_8_UNORM, TO_8_UNORM )
217 ATTRIB( R8_UNORM, 1, ubyte, FROM_8_UNORM, TO_8_UNORM )
218
219 ATTRIB( R8G8B8A8_SNORM, 4, char, FROM_8_SNORM, TO_8_SNORM )
220 ATTRIB( R8G8B8_SNORM, 3, char, FROM_8_SNORM, TO_8_SNORM )
221 ATTRIB( R8G8_SNORM, 2, char, FROM_8_SNORM, TO_8_SNORM )
222 ATTRIB( R8_SNORM, 1, char, FROM_8_SNORM, TO_8_SNORM )
223
224 ATTRIB( A8R8G8B8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )
225 /*ATTRIB( R8G8B8A8_UNORM, 4, ubyte, FROM_8_UNORM, TO_8_UNORM )*/
226
227 ATTRIB( R32G32B32A32_FIXED, 4, int, FROM_32_FIXED, TO_32_FIXED )
228 ATTRIB( R32G32B32_FIXED, 3, int, FROM_32_FIXED, TO_32_FIXED )
229 ATTRIB( R32G32_FIXED, 2, int, FROM_32_FIXED, TO_32_FIXED )
230 ATTRIB( R32_FIXED, 1, int, FROM_32_FIXED, TO_32_FIXED )
231
232
233
234 static void
235 fetch_B8G8R8A8_UNORM(const void *ptr, float *attrib)
236 {
237 attrib[2] = FROM_8_UNORM(0);
238 attrib[1] = FROM_8_UNORM(1);
239 attrib[0] = FROM_8_UNORM(2);
240 attrib[3] = FROM_8_UNORM(3);
241 }
242
243 static void
244 emit_B8G8R8A8_UNORM( const float *attrib, void *ptr)
245 {
246 ubyte *out = (ubyte *)ptr;
247 out[2] = TO_8_UNORM(attrib[0]);
248 out[1] = TO_8_UNORM(attrib[1]);
249 out[0] = TO_8_UNORM(attrib[2]);
250 out[3] = TO_8_UNORM(attrib[3]);
251 }
252
253 static void
254 fetch_NULL( const void *ptr, float *attrib )
255 {
256 attrib[0] = 0;
257 attrib[1] = 0;
258 attrib[2] = 0;
259 attrib[3] = 1;
260 }
261
262 static void
263 emit_NULL( const float *attrib, void *ptr )
264 {
265 /* do nothing is the only sensible option */
266 }
267
268 static fetch_func get_fetch_func( enum pipe_format format )
269 {
270 switch (format) {
271 case PIPE_FORMAT_R64_FLOAT:
272 return &fetch_R64_FLOAT;
273 case PIPE_FORMAT_R64G64_FLOAT:
274 return &fetch_R64G64_FLOAT;
275 case PIPE_FORMAT_R64G64B64_FLOAT:
276 return &fetch_R64G64B64_FLOAT;
277 case PIPE_FORMAT_R64G64B64A64_FLOAT:
278 return &fetch_R64G64B64A64_FLOAT;
279
280 case PIPE_FORMAT_R32_FLOAT:
281 return &fetch_R32_FLOAT;
282 case PIPE_FORMAT_R32G32_FLOAT:
283 return &fetch_R32G32_FLOAT;
284 case PIPE_FORMAT_R32G32B32_FLOAT:
285 return &fetch_R32G32B32_FLOAT;
286 case PIPE_FORMAT_R32G32B32A32_FLOAT:
287 return &fetch_R32G32B32A32_FLOAT;
288
289 case PIPE_FORMAT_R32_UNORM:
290 return &fetch_R32_UNORM;
291 case PIPE_FORMAT_R32G32_UNORM:
292 return &fetch_R32G32_UNORM;
293 case PIPE_FORMAT_R32G32B32_UNORM:
294 return &fetch_R32G32B32_UNORM;
295 case PIPE_FORMAT_R32G32B32A32_UNORM:
296 return &fetch_R32G32B32A32_UNORM;
297
298 case PIPE_FORMAT_R32_USCALED:
299 return &fetch_R32_USCALED;
300 case PIPE_FORMAT_R32G32_USCALED:
301 return &fetch_R32G32_USCALED;
302 case PIPE_FORMAT_R32G32B32_USCALED:
303 return &fetch_R32G32B32_USCALED;
304 case PIPE_FORMAT_R32G32B32A32_USCALED:
305 return &fetch_R32G32B32A32_USCALED;
306
307 case PIPE_FORMAT_R32_SNORM:
308 return &fetch_R32_SNORM;
309 case PIPE_FORMAT_R32G32_SNORM:
310 return &fetch_R32G32_SNORM;
311 case PIPE_FORMAT_R32G32B32_SNORM:
312 return &fetch_R32G32B32_SNORM;
313 case PIPE_FORMAT_R32G32B32A32_SNORM:
314 return &fetch_R32G32B32A32_SNORM;
315
316 case PIPE_FORMAT_R32_SSCALED:
317 return &fetch_R32_SSCALED;
318 case PIPE_FORMAT_R32G32_SSCALED:
319 return &fetch_R32G32_SSCALED;
320 case PIPE_FORMAT_R32G32B32_SSCALED:
321 return &fetch_R32G32B32_SSCALED;
322 case PIPE_FORMAT_R32G32B32A32_SSCALED:
323 return &fetch_R32G32B32A32_SSCALED;
324
325 case PIPE_FORMAT_R16_UNORM:
326 return &fetch_R16_UNORM;
327 case PIPE_FORMAT_R16G16_UNORM:
328 return &fetch_R16G16_UNORM;
329 case PIPE_FORMAT_R16G16B16_UNORM:
330 return &fetch_R16G16B16_UNORM;
331 case PIPE_FORMAT_R16G16B16A16_UNORM:
332 return &fetch_R16G16B16A16_UNORM;
333
334 case PIPE_FORMAT_R16_USCALED:
335 return &fetch_R16_USCALED;
336 case PIPE_FORMAT_R16G16_USCALED:
337 return &fetch_R16G16_USCALED;
338 case PIPE_FORMAT_R16G16B16_USCALED:
339 return &fetch_R16G16B16_USCALED;
340 case PIPE_FORMAT_R16G16B16A16_USCALED:
341 return &fetch_R16G16B16A16_USCALED;
342
343 case PIPE_FORMAT_R16_SNORM:
344 return &fetch_R16_SNORM;
345 case PIPE_FORMAT_R16G16_SNORM:
346 return &fetch_R16G16_SNORM;
347 case PIPE_FORMAT_R16G16B16_SNORM:
348 return &fetch_R16G16B16_SNORM;
349 case PIPE_FORMAT_R16G16B16A16_SNORM:
350 return &fetch_R16G16B16A16_SNORM;
351
352 case PIPE_FORMAT_R16_SSCALED:
353 return &fetch_R16_SSCALED;
354 case PIPE_FORMAT_R16G16_SSCALED:
355 return &fetch_R16G16_SSCALED;
356 case PIPE_FORMAT_R16G16B16_SSCALED:
357 return &fetch_R16G16B16_SSCALED;
358 case PIPE_FORMAT_R16G16B16A16_SSCALED:
359 return &fetch_R16G16B16A16_SSCALED;
360
361 case PIPE_FORMAT_R8_UNORM:
362 return &fetch_R8_UNORM;
363 case PIPE_FORMAT_R8G8_UNORM:
364 return &fetch_R8G8_UNORM;
365 case PIPE_FORMAT_R8G8B8_UNORM:
366 return &fetch_R8G8B8_UNORM;
367 case PIPE_FORMAT_R8G8B8A8_UNORM:
368 return &fetch_R8G8B8A8_UNORM;
369
370 case PIPE_FORMAT_R8_USCALED:
371 return &fetch_R8_USCALED;
372 case PIPE_FORMAT_R8G8_USCALED:
373 return &fetch_R8G8_USCALED;
374 case PIPE_FORMAT_R8G8B8_USCALED:
375 return &fetch_R8G8B8_USCALED;
376 case PIPE_FORMAT_R8G8B8A8_USCALED:
377 return &fetch_R8G8B8A8_USCALED;
378
379 case PIPE_FORMAT_R8_SNORM:
380 return &fetch_R8_SNORM;
381 case PIPE_FORMAT_R8G8_SNORM:
382 return &fetch_R8G8_SNORM;
383 case PIPE_FORMAT_R8G8B8_SNORM:
384 return &fetch_R8G8B8_SNORM;
385 case PIPE_FORMAT_R8G8B8A8_SNORM:
386 return &fetch_R8G8B8A8_SNORM;
387
388 case PIPE_FORMAT_R8_SSCALED:
389 return &fetch_R8_SSCALED;
390 case PIPE_FORMAT_R8G8_SSCALED:
391 return &fetch_R8G8_SSCALED;
392 case PIPE_FORMAT_R8G8B8_SSCALED:
393 return &fetch_R8G8B8_SSCALED;
394 case PIPE_FORMAT_R8G8B8A8_SSCALED:
395 return &fetch_R8G8B8A8_SSCALED;
396
397 case PIPE_FORMAT_B8G8R8A8_UNORM:
398 return &fetch_B8G8R8A8_UNORM;
399
400 case PIPE_FORMAT_A8R8G8B8_UNORM:
401 return &fetch_A8R8G8B8_UNORM;
402
403 case PIPE_FORMAT_R32_FIXED:
404 return &fetch_R32_FIXED;
405 case PIPE_FORMAT_R32G32_FIXED:
406 return &fetch_R32G32_FIXED;
407 case PIPE_FORMAT_R32G32B32_FIXED:
408 return &fetch_R32G32B32_FIXED;
409 case PIPE_FORMAT_R32G32B32A32_FIXED:
410 return &fetch_R32G32B32A32_FIXED;
411
412 default:
413 assert(0);
414 return &fetch_NULL;
415 }
416 }
417
418
419
420
421 static emit_func get_emit_func( enum pipe_format format )
422 {
423 /* silence warnings */
424 (void) emit_R32G32B32A32_FIXED;
425 (void) emit_R32G32B32_FIXED;
426 (void) emit_R32G32_FIXED;
427 (void) emit_R32_FIXED;
428
429 switch (format) {
430 case PIPE_FORMAT_R64_FLOAT:
431 return &emit_R64_FLOAT;
432 case PIPE_FORMAT_R64G64_FLOAT:
433 return &emit_R64G64_FLOAT;
434 case PIPE_FORMAT_R64G64B64_FLOAT:
435 return &emit_R64G64B64_FLOAT;
436 case PIPE_FORMAT_R64G64B64A64_FLOAT:
437 return &emit_R64G64B64A64_FLOAT;
438
439 case PIPE_FORMAT_R32_FLOAT:
440 return &emit_R32_FLOAT;
441 case PIPE_FORMAT_R32G32_FLOAT:
442 return &emit_R32G32_FLOAT;
443 case PIPE_FORMAT_R32G32B32_FLOAT:
444 return &emit_R32G32B32_FLOAT;
445 case PIPE_FORMAT_R32G32B32A32_FLOAT:
446 return &emit_R32G32B32A32_FLOAT;
447
448 case PIPE_FORMAT_R32_UNORM:
449 return &emit_R32_UNORM;
450 case PIPE_FORMAT_R32G32_UNORM:
451 return &emit_R32G32_UNORM;
452 case PIPE_FORMAT_R32G32B32_UNORM:
453 return &emit_R32G32B32_UNORM;
454 case PIPE_FORMAT_R32G32B32A32_UNORM:
455 return &emit_R32G32B32A32_UNORM;
456
457 case PIPE_FORMAT_R32_USCALED:
458 return &emit_R32_USCALED;
459 case PIPE_FORMAT_R32G32_USCALED:
460 return &emit_R32G32_USCALED;
461 case PIPE_FORMAT_R32G32B32_USCALED:
462 return &emit_R32G32B32_USCALED;
463 case PIPE_FORMAT_R32G32B32A32_USCALED:
464 return &emit_R32G32B32A32_USCALED;
465
466 case PIPE_FORMAT_R32_SNORM:
467 return &emit_R32_SNORM;
468 case PIPE_FORMAT_R32G32_SNORM:
469 return &emit_R32G32_SNORM;
470 case PIPE_FORMAT_R32G32B32_SNORM:
471 return &emit_R32G32B32_SNORM;
472 case PIPE_FORMAT_R32G32B32A32_SNORM:
473 return &emit_R32G32B32A32_SNORM;
474
475 case PIPE_FORMAT_R32_SSCALED:
476 return &emit_R32_SSCALED;
477 case PIPE_FORMAT_R32G32_SSCALED:
478 return &emit_R32G32_SSCALED;
479 case PIPE_FORMAT_R32G32B32_SSCALED:
480 return &emit_R32G32B32_SSCALED;
481 case PIPE_FORMAT_R32G32B32A32_SSCALED:
482 return &emit_R32G32B32A32_SSCALED;
483
484 case PIPE_FORMAT_R16_UNORM:
485 return &emit_R16_UNORM;
486 case PIPE_FORMAT_R16G16_UNORM:
487 return &emit_R16G16_UNORM;
488 case PIPE_FORMAT_R16G16B16_UNORM:
489 return &emit_R16G16B16_UNORM;
490 case PIPE_FORMAT_R16G16B16A16_UNORM:
491 return &emit_R16G16B16A16_UNORM;
492
493 case PIPE_FORMAT_R16_USCALED:
494 return &emit_R16_USCALED;
495 case PIPE_FORMAT_R16G16_USCALED:
496 return &emit_R16G16_USCALED;
497 case PIPE_FORMAT_R16G16B16_USCALED:
498 return &emit_R16G16B16_USCALED;
499 case PIPE_FORMAT_R16G16B16A16_USCALED:
500 return &emit_R16G16B16A16_USCALED;
501
502 case PIPE_FORMAT_R16_SNORM:
503 return &emit_R16_SNORM;
504 case PIPE_FORMAT_R16G16_SNORM:
505 return &emit_R16G16_SNORM;
506 case PIPE_FORMAT_R16G16B16_SNORM:
507 return &emit_R16G16B16_SNORM;
508 case PIPE_FORMAT_R16G16B16A16_SNORM:
509 return &emit_R16G16B16A16_SNORM;
510
511 case PIPE_FORMAT_R16_SSCALED:
512 return &emit_R16_SSCALED;
513 case PIPE_FORMAT_R16G16_SSCALED:
514 return &emit_R16G16_SSCALED;
515 case PIPE_FORMAT_R16G16B16_SSCALED:
516 return &emit_R16G16B16_SSCALED;
517 case PIPE_FORMAT_R16G16B16A16_SSCALED:
518 return &emit_R16G16B16A16_SSCALED;
519
520 case PIPE_FORMAT_R8_UNORM:
521 return &emit_R8_UNORM;
522 case PIPE_FORMAT_R8G8_UNORM:
523 return &emit_R8G8_UNORM;
524 case PIPE_FORMAT_R8G8B8_UNORM:
525 return &emit_R8G8B8_UNORM;
526 case PIPE_FORMAT_R8G8B8A8_UNORM:
527 return &emit_R8G8B8A8_UNORM;
528
529 case PIPE_FORMAT_R8_USCALED:
530 return &emit_R8_USCALED;
531 case PIPE_FORMAT_R8G8_USCALED:
532 return &emit_R8G8_USCALED;
533 case PIPE_FORMAT_R8G8B8_USCALED:
534 return &emit_R8G8B8_USCALED;
535 case PIPE_FORMAT_R8G8B8A8_USCALED:
536 return &emit_R8G8B8A8_USCALED;
537
538 case PIPE_FORMAT_R8_SNORM:
539 return &emit_R8_SNORM;
540 case PIPE_FORMAT_R8G8_SNORM:
541 return &emit_R8G8_SNORM;
542 case PIPE_FORMAT_R8G8B8_SNORM:
543 return &emit_R8G8B8_SNORM;
544 case PIPE_FORMAT_R8G8B8A8_SNORM:
545 return &emit_R8G8B8A8_SNORM;
546
547 case PIPE_FORMAT_R8_SSCALED:
548 return &emit_R8_SSCALED;
549 case PIPE_FORMAT_R8G8_SSCALED:
550 return &emit_R8G8_SSCALED;
551 case PIPE_FORMAT_R8G8B8_SSCALED:
552 return &emit_R8G8B8_SSCALED;
553 case PIPE_FORMAT_R8G8B8A8_SSCALED:
554 return &emit_R8G8B8A8_SSCALED;
555
556 case PIPE_FORMAT_B8G8R8A8_UNORM:
557 return &emit_B8G8R8A8_UNORM;
558
559 case PIPE_FORMAT_A8R8G8B8_UNORM:
560 return &emit_A8R8G8B8_UNORM;
561
562 default:
563 assert(0);
564 return &emit_NULL;
565 }
566 }
567
568
569
570 /**
571 * Fetch vertex attributes for 'count' vertices.
572 */
573 static void PIPE_CDECL generic_run_elts( struct translate *translate,
574 const unsigned *elts,
575 unsigned count,
576 unsigned instance_id,
577 void *output_buffer )
578 {
579 struct translate_generic *tg = translate_generic(translate);
580 char *vert = output_buffer;
581 unsigned nr_attrs = tg->nr_attrib;
582 unsigned attr;
583 unsigned i;
584
585 /* loop over vertex attributes (vertex shader inputs)
586 */
587 for (i = 0; i < count; i++) {
588 unsigned elt = *elts++;
589
590 for (attr = 0; attr < nr_attrs; attr++) {
591 float data[4];
592 const char *src;
593 unsigned index;
594
595 char *dst = (vert +
596 tg->attrib[attr].output_offset);
597
598 if (tg->attrib[attr].instance_divisor) {
599 index = instance_id / tg->attrib[attr].instance_divisor;
600 } else {
601 index = elt;
602 }
603
604 index = MIN2(index, tg->attrib[attr].max_index);
605
606 src = tg->attrib[attr].input_ptr +
607 tg->attrib[attr].input_stride * index;
608
609 tg->attrib[attr].fetch( src, data );
610
611 if (0) debug_printf("vert %d/%d attr %d: %f %f %f %f\n",
612 i, elt, attr, data[0], data[1], data[2], data[3]);
613
614 tg->attrib[attr].emit( data, dst );
615 }
616
617 vert += tg->translate.key.output_stride;
618 }
619 }
620
621
622
623 static void PIPE_CDECL generic_run( struct translate *translate,
624 unsigned start,
625 unsigned count,
626 unsigned instance_id,
627 void *output_buffer )
628 {
629 struct translate_generic *tg = translate_generic(translate);
630 char *vert = output_buffer;
631 unsigned nr_attrs = tg->nr_attrib;
632 unsigned attr;
633 unsigned i;
634
635 /* loop over vertex attributes (vertex shader inputs)
636 */
637 for (i = 0; i < count; i++) {
638 unsigned elt = start + i;
639
640 for (attr = 0; attr < nr_attrs; attr++) {
641 float data[4];
642
643 char *dst = (vert +
644 tg->attrib[attr].output_offset);
645
646 if (tg->attrib[attr].type == TRANSLATE_ELEMENT_NORMAL) {
647 const char *src;
648
649 if (tg->attrib[attr].instance_divisor) {
650 src = tg->attrib[attr].input_ptr +
651 tg->attrib[attr].input_stride *
652 (instance_id / tg->attrib[attr].instance_divisor);
653 } else {
654 src = tg->attrib[attr].input_ptr +
655 tg->attrib[attr].input_stride * elt;
656 }
657
658 tg->attrib[attr].fetch( src, data );
659 } else {
660 data[0] = (float)instance_id;
661 }
662
663 if (0) debug_printf("vert %d attr %d: %f %f %f %f\n",
664 i, attr, data[0], data[1], data[2], data[3]);
665
666 tg->attrib[attr].emit( data, dst );
667 }
668
669 vert += tg->translate.key.output_stride;
670 }
671 }
672
673
674
675 static void generic_set_buffer( struct translate *translate,
676 unsigned buf,
677 const void *ptr,
678 unsigned stride,
679 unsigned max_index )
680 {
681 struct translate_generic *tg = translate_generic(translate);
682 unsigned i;
683
684 for (i = 0; i < tg->nr_attrib; i++) {
685 if (tg->attrib[i].buffer == buf) {
686 tg->attrib[i].input_ptr = ((char *)ptr +
687 tg->attrib[i].input_offset);
688 tg->attrib[i].input_stride = stride;
689 tg->attrib[i].max_index = max_index;
690 }
691 }
692 }
693
694
695 static void generic_release( struct translate *translate )
696 {
697 /* Refcount?
698 */
699 FREE(translate);
700 }
701
702 struct translate *translate_generic_create( const struct translate_key *key )
703 {
704 struct translate_generic *tg = CALLOC_STRUCT(translate_generic);
705 unsigned i;
706
707 if (tg == NULL)
708 return NULL;
709
710 tg->translate.key = *key;
711 tg->translate.release = generic_release;
712 tg->translate.set_buffer = generic_set_buffer;
713 tg->translate.run_elts = generic_run_elts;
714 tg->translate.run = generic_run;
715
716 for (i = 0; i < key->nr_elements; i++) {
717 tg->attrib[i].type = key->element[i].type;
718
719 tg->attrib[i].fetch = get_fetch_func(key->element[i].input_format);
720 tg->attrib[i].buffer = key->element[i].input_buffer;
721 tg->attrib[i].input_offset = key->element[i].input_offset;
722 tg->attrib[i].instance_divisor = key->element[i].instance_divisor;
723
724 tg->attrib[i].emit = get_emit_func(key->element[i].output_format);
725 tg->attrib[i].output_offset = key->element[i].output_offset;
726
727 }
728
729 tg->nr_attrib = key->nr_elements;
730
731
732 return &tg->translate;
733 }