Merge branch 'mesa_7_5_branch'
[mesa.git] / src / gallium / drivers / nv04 / nv04_prim_vbuf.c
1
2 #include "util/u_debug.h"
3 #include "pipe/p_inlines.h"
4 #include "pipe/internal/p_winsys_screen.h"
5 #include "pipe/p_compiler.h"
6
7 #include "draw/draw_vbuf.h"
8
9 #include "nv04_context.h"
10 #include "nv04_state.h"
11
12 #define VERTEX_SIZE 40
13 #define VERTEX_BUFFER_SIZE (4096*VERTEX_SIZE) // 4096 vertices of 40 bytes each
14
15 /**
16 * Primitive renderer for nv04.
17 */
18 struct nv04_vbuf_render {
19 struct vbuf_render base;
20
21 struct nv04_context *nv04;
22
23 /** Vertex buffer */
24 unsigned char* buffer;
25
26 /** Vertex size in bytes */
27 unsigned vertex_size;
28
29 /** Current primitive */
30 unsigned prim;
31 };
32
33
34 /**
35 * Basically a cast wrapper.
36 */
37 static INLINE struct nv04_vbuf_render *
38 nv04_vbuf_render( struct vbuf_render *render )
39 {
40 assert(render);
41 return (struct nv04_vbuf_render *)render;
42 }
43
44
45 static const struct vertex_info *
46 nv04_vbuf_render_get_vertex_info( struct vbuf_render *render )
47 {
48 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
49 struct nv04_context *nv04 = nv04_render->nv04;
50 return &nv04->vertex_info;
51 }
52
53
54 static boolean
55 nv04_vbuf_render_allocate_vertices( struct vbuf_render *render,
56 ushort vertex_size,
57 ushort nr_vertices )
58 {
59 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
60
61 nv04_render->buffer = (unsigned char*) MALLOC(VERTEX_BUFFER_SIZE);
62 assert(!nv04_render->buffer);
63
64 return nv04_render->buffer ? TRUE : FALSE;
65 }
66
67 static void *
68 nv04_vbuf_render_map_vertices( struct vbuf_render *render )
69 {
70 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
71 return nv04_render->buffer;
72 }
73
74 static void
75 nv04_vbuf_render_unmap_vertices( struct vbuf_render *render,
76 ushort min_index,
77 ushort max_index )
78 {
79 }
80
81 static boolean
82 nv04_vbuf_render_set_primitive( struct vbuf_render *render,
83 unsigned prim )
84 {
85 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
86
87 if (prim <= PIPE_PRIM_LINE_STRIP)
88 return FALSE;
89
90 nv04_render->prim = prim;
91 return TRUE;
92 }
93
94 static INLINE void nv04_2triangles(struct nv04_context* nv04, unsigned char* buffer, ushort v0, ushort v1, ushort v2, ushort v3, ushort v4, ushort v5)
95 {
96 BEGIN_RING(fahrenheit,NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0xA),49);
97 OUT_RINGp(buffer + VERTEX_SIZE * v0,8);
98 OUT_RINGp(buffer + VERTEX_SIZE * v1,8);
99 OUT_RINGp(buffer + VERTEX_SIZE * v2,8);
100 OUT_RINGp(buffer + VERTEX_SIZE * v3,8);
101 OUT_RINGp(buffer + VERTEX_SIZE * v4,8);
102 OUT_RINGp(buffer + VERTEX_SIZE * v5,8);
103 OUT_RING(0xFEDCBA);
104 }
105
106 static INLINE void nv04_1triangle(struct nv04_context* nv04, unsigned char* buffer, ushort v0, ushort v1, ushort v2)
107 {
108 BEGIN_RING(fahrenheit,NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0xD),25);
109 OUT_RINGp(buffer + VERTEX_SIZE * v0,8);
110 OUT_RINGp(buffer + VERTEX_SIZE * v1,8);
111 OUT_RINGp(buffer + VERTEX_SIZE * v2,8);
112 OUT_RING(0xFED);
113 }
114
115 static INLINE void nv04_1quad(struct nv04_context* nv04, unsigned char* buffer, ushort v0, ushort v1, ushort v2, ushort v3)
116 {
117 BEGIN_RING(fahrenheit,NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0xC),33);
118 OUT_RINGp(buffer + VERTEX_SIZE * v0,8);
119 OUT_RINGp(buffer + VERTEX_SIZE * v1,8);
120 OUT_RINGp(buffer + VERTEX_SIZE * v2,8);
121 OUT_RINGp(buffer + VERTEX_SIZE * v3,8);
122 OUT_RING(0xFECEDC);
123 }
124
125 static void nv04_vbuf_render_triangles_elts(struct nv04_vbuf_render * render, const ushort * indices, uint nr_indices)
126 {
127 unsigned char* buffer = render->buffer;
128 struct nv04_context* nv04 = render->nv04;
129 int i;
130
131 for( i=0; i< nr_indices-5; i+=6)
132 nv04_2triangles(nv04,
133 buffer,
134 indices[i+0],
135 indices[i+1],
136 indices[i+2],
137 indices[i+3],
138 indices[i+4],
139 indices[i+5]
140 );
141 if (i != nr_indices)
142 {
143 nv04_1triangle(nv04,
144 buffer,
145 indices[i+0],
146 indices[i+1],
147 indices[i+2]
148 );
149 i+=3;
150 }
151 if (i != nr_indices)
152 NOUVEAU_ERR("Houston, we have lost some vertices\n");
153 }
154
155 static void nv04_vbuf_render_tri_strip_elts(struct nv04_vbuf_render* render, const ushort* indices, uint nr_indices)
156 {
157 const uint32_t striptbl[]={0x321210,0x543432,0x765654,0x987876,0xBA9A98,0xDCBCBA,0xFEDEDC};
158 unsigned char* buffer = render->buffer;
159 struct nv04_context* nv04 = render->nv04;
160 int i,j;
161
162 for(i = 0; i<nr_indices; i+=14)
163 {
164 int numvert = MIN2(16, nr_indices - i);
165 int numtri = numvert - 2;
166 if (numvert<3)
167 break;
168
169 BEGIN_RING( fahrenheit, NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0x0), numvert*8 );
170 for(j = 0; j<numvert; j++)
171 OUT_RINGp( buffer + VERTEX_SIZE * indices [i+j], 8 );
172
173 BEGIN_RING_NI( fahrenheit, NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_DRAWPRIMITIVE(0), (numtri+1)/2 );
174 for(j = 0; j<numtri/2; j++ )
175 OUT_RING(striptbl[j]);
176 if (numtri%2)
177 OUT_RING(striptbl[numtri/2]&0xFFF);
178 }
179 }
180
181 static void nv04_vbuf_render_tri_fan_elts(struct nv04_vbuf_render* render, const ushort* indices, uint nr_indices)
182 {
183 const uint32_t fantbl[]={0x320210,0x540430,0x760650,0x980870,0xBA0A90,0xDC0CB0,0xFE0ED0};
184 unsigned char* buffer = render->buffer;
185 struct nv04_context* nv04 = render->nv04;
186 int i,j;
187
188 BEGIN_RING(fahrenheit, NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0x0), 8);
189 OUT_RINGp(buffer + VERTEX_SIZE * indices[0], 8);
190
191 for(i = 1; i<nr_indices; i+=14)
192 {
193 int numvert=MIN2(15, nr_indices - i);
194 int numtri=numvert-2;
195 if (numvert < 3)
196 break;
197
198 BEGIN_RING(fahrenheit, NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_SX(0x1), numvert*8);
199
200 for(j=0;j<numvert;j++)
201 OUT_RINGp( buffer + VERTEX_SIZE * indices[ i+j ], 8 );
202
203 BEGIN_RING_NI(fahrenheit, NV04_DX5_TEXTURED_TRIANGLE_TLVERTEX_DRAWPRIMITIVE(0), (numtri+1)/2);
204 for(j = 0; j<numtri/2; j++)
205 OUT_RING(fantbl[j]);
206 if (numtri%2)
207 OUT_RING(fantbl[numtri/2]&0xFFF);
208 }
209 }
210
211 static void nv04_vbuf_render_quads_elts(struct nv04_vbuf_render* render, const ushort* indices, uint nr_indices)
212 {
213 unsigned char* buffer = render->buffer;
214 struct nv04_context* nv04 = render->nv04;
215 int i;
216
217 for(i = 0; i < nr_indices; i += 4)
218 nv04_1quad(nv04,
219 buffer,
220 indices[i+0],
221 indices[i+1],
222 indices[i+2],
223 indices[i+3]
224 );
225 }
226
227
228 static void
229 nv04_vbuf_render_draw( struct vbuf_render *render,
230 const ushort *indices,
231 uint nr_indices)
232 {
233 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
234
235 // emit the indices
236 switch( nv04_render->prim )
237 {
238 case PIPE_PRIM_TRIANGLES:
239 nv04_vbuf_render_triangles_elts(nv04_render, indices, nr_indices);
240 break;
241 case PIPE_PRIM_QUAD_STRIP:
242 case PIPE_PRIM_TRIANGLE_STRIP:
243 nv04_vbuf_render_tri_strip_elts(nv04_render, indices, nr_indices);
244 break;
245 case PIPE_PRIM_TRIANGLE_FAN:
246 case PIPE_PRIM_POLYGON:
247 nv04_vbuf_render_tri_fan_elts(nv04_render, indices, nr_indices);
248 break;
249 case PIPE_PRIM_QUADS:
250 nv04_vbuf_render_quads_elts(nv04_render, indices, nr_indices);
251 break;
252 default:
253 NOUVEAU_ERR("You have to implement primitive %d, young padawan\n", nv04_render->prim);
254 break;
255 }
256 }
257
258
259 static void
260 nv04_vbuf_render_release_vertices( struct vbuf_render *render )
261 {
262 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
263
264 free(nv04_render->buffer);
265 nv04_render->buffer = NULL;
266 }
267
268
269 static void
270 nv04_vbuf_render_destroy( struct vbuf_render *render )
271 {
272 struct nv04_vbuf_render *nv04_render = nv04_vbuf_render(render);
273 FREE(nv04_render);
274 }
275
276
277 /**
278 * Create a new primitive render.
279 */
280 static struct vbuf_render *
281 nv04_vbuf_render_create( struct nv04_context *nv04 )
282 {
283 struct nv04_vbuf_render *nv04_render = CALLOC_STRUCT(nv04_vbuf_render);
284
285 nv04_render->nv04 = nv04;
286
287 nv04_render->base.max_vertex_buffer_bytes = VERTEX_BUFFER_SIZE;
288 nv04_render->base.max_indices = 65536;
289 nv04_render->base.get_vertex_info = nv04_vbuf_render_get_vertex_info;
290 nv04_render->base.allocate_vertices = nv04_vbuf_render_allocate_vertices;
291 nv04_render->base.map_vertices = nv04_vbuf_render_map_vertices;
292 nv04_render->base.unmap_vertices = nv04_vbuf_render_unmap_vertices;
293 nv04_render->base.set_primitive = nv04_vbuf_render_set_primitive;
294 nv04_render->base.draw = nv04_vbuf_render_draw;
295 nv04_render->base.release_vertices = nv04_vbuf_render_release_vertices;
296 nv04_render->base.destroy = nv04_vbuf_render_destroy;
297
298 return &nv04_render->base;
299 }
300
301
302 /**
303 * Create a new primitive vbuf/render stage.
304 */
305 struct draw_stage *nv04_draw_vbuf_stage( struct nv04_context *nv04 )
306 {
307 struct vbuf_render *render;
308 struct draw_stage *stage;
309
310 render = nv04_vbuf_render_create(nv04);
311 if(!render)
312 return NULL;
313
314 stage = draw_vbuf_stage( nv04->draw, render );
315 if(!stage) {
316 render->destroy(render);
317 return NULL;
318 }
319
320 return stage;
321 }