gallium: adapt nv drivers to interface cleanups
[mesa.git] / src / gallium / drivers / nv20 / nv20_miptree.c
1 #include "pipe/p_state.h"
2 #include "pipe/p_defines.h"
3 #include "pipe/p_inlines.h"
4 #include "util/u_math.h"
5
6 #include "nv20_context.h"
7 #include "nv20_screen.h"
8
9 static void
10 nv20_miptree_layout(struct nv20_miptree *nv20mt)
11 {
12 struct pipe_texture *pt = &nv20mt->base;
13 uint width = pt->width0;
14 uint offset = 0;
15 int nr_faces, l, f;
16 uint wide_pitch = pt->tex_usage & (PIPE_TEXTURE_USAGE_SAMPLER |
17 PIPE_TEXTURE_USAGE_DEPTH_STENCIL |
18 PIPE_TEXTURE_USAGE_RENDER_TARGET |
19 PIPE_TEXTURE_USAGE_DISPLAY_TARGET |
20 PIPE_TEXTURE_USAGE_PRIMARY);
21
22 if (pt->target == PIPE_TEXTURE_CUBE) {
23 nr_faces = 6;
24 } else {
25 nr_faces = 1;
26 }
27
28 for (l = 0; l <= pt->last_level; l++) {
29 if (wide_pitch && (pt->tex_usage & NOUVEAU_TEXTURE_USAGE_LINEAR))
30 nv20mt->level[l].pitch = align(pf_get_stride(pt->format, pt->width0), 64);
31 else
32 nv20mt->level[l].pitch = pf_get_stride(pt->format, width);
33
34 nv20mt->level[l].image_offset =
35 CALLOC(nr_faces, sizeof(unsigned));
36
37 width = u_minify(width, 1);
38 }
39
40 for (f = 0; f < nr_faces; f++) {
41 for (l = 0; l < pt->last_level; l++) {
42 nv20mt->level[l].image_offset[f] = offset;
43
44 if (!(pt->tex_usage & NOUVEAU_TEXTURE_USAGE_LINEAR) &&
45 u_minify(pt->width0, l + 1) > 1 && u_minify(pt->height0, l + 1) > 1)
46 offset += align(nv20mt->level[l].pitch * u_minify(pt->height0, l), 64);
47 else
48 offset += nv20mt->level[l].pitch * u_minify(pt->height0, l);
49 }
50
51 nv20mt->level[l].image_offset[f] = offset;
52 offset += nv20mt->level[l].pitch * u_minify(pt->height0, l);
53 }
54
55 nv20mt->total_size = offset;
56 }
57
58 static struct pipe_texture *
59 nv20_miptree_blanket(struct pipe_screen *pscreen, const struct pipe_texture *pt,
60 const unsigned *stride, struct pipe_buffer *pb)
61 {
62 struct nv20_miptree *mt;
63
64 /* Only supports 2D, non-mipmapped textures for the moment */
65 if (pt->target != PIPE_TEXTURE_2D || pt->last_level != 0 ||
66 pt->depth0 != 1)
67 return NULL;
68
69 mt = CALLOC_STRUCT(nv20_miptree);
70 if (!mt)
71 return NULL;
72
73 mt->base = *pt;
74 pipe_reference_init(&mt->base.reference, 1);
75 mt->base.screen = pscreen;
76 mt->level[0].pitch = stride[0];
77 mt->level[0].image_offset = CALLOC(1, sizeof(unsigned));
78
79 pipe_buffer_reference(&mt->buffer, pb);
80 return &mt->base;
81 }
82
83 static struct pipe_texture *
84 nv20_miptree_create(struct pipe_screen *screen, const struct pipe_texture *pt)
85 {
86 struct nv20_miptree *mt;
87 unsigned buf_usage = PIPE_BUFFER_USAGE_PIXEL |
88 NOUVEAU_BUFFER_USAGE_TEXTURE;
89
90 mt = MALLOC(sizeof(struct nv20_miptree));
91 if (!mt)
92 return NULL;
93 mt->base = *pt;
94 pipe_reference_init(&mt->base.reference, 1);
95 mt->base.screen = screen;
96
97 /* Swizzled textures must be POT */
98 if (pt->width0 & (pt->width0 - 1) ||
99 pt->height0 & (pt->height0 - 1))
100 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
101 else
102 if (pt->tex_usage & (PIPE_TEXTURE_USAGE_PRIMARY |
103 PIPE_TEXTURE_USAGE_DISPLAY_TARGET |
104 PIPE_TEXTURE_USAGE_DEPTH_STENCIL))
105 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
106 else
107 if (pt->tex_usage & PIPE_TEXTURE_USAGE_DYNAMIC)
108 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
109 else {
110 switch (pt->format) {
111 /* TODO: Figure out which formats can be swizzled */
112 case PIPE_FORMAT_A8R8G8B8_UNORM:
113 case PIPE_FORMAT_X8R8G8B8_UNORM:
114 case PIPE_FORMAT_R16_SNORM:
115 {
116 if (debug_get_bool_option("NOUVEAU_NO_SWIZZLE", FALSE))
117 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
118 break;
119 }
120 default:
121 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
122 }
123 }
124
125 if (pt->tex_usage & PIPE_TEXTURE_USAGE_DYNAMIC)
126 buf_usage |= PIPE_BUFFER_USAGE_CPU_READ_WRITE;
127
128 nv20_miptree_layout(mt);
129
130 mt->buffer = screen->buffer_create(screen, 256, buf_usage, mt->total_size);
131 if (!mt->buffer) {
132 FREE(mt);
133 return NULL;
134 }
135
136 return &mt->base;
137 }
138
139 static void
140 nv20_miptree_destroy(struct pipe_texture *pt)
141 {
142 struct nv20_miptree *nv20mt = (struct nv20_miptree *)pt;
143 int l;
144
145 pipe_buffer_reference(&nv20mt->buffer, NULL);
146 for (l = 0; l <= pt->last_level; l++) {
147 if (nv20mt->level[l].image_offset)
148 FREE(nv20mt->level[l].image_offset);
149 }
150 }
151
152 static struct pipe_surface *
153 nv20_miptree_surface_get(struct pipe_screen *screen, struct pipe_texture *pt,
154 unsigned face, unsigned level, unsigned zslice,
155 unsigned flags)
156 {
157 struct nv20_miptree *nv20mt = (struct nv20_miptree *)pt;
158 struct nv04_surface *ns;
159
160 ns = CALLOC_STRUCT(nv04_surface);
161 if (!ns)
162 return NULL;
163 pipe_texture_reference(&ns->base.texture, pt);
164 ns->base.format = pt->format;
165 ns->base.width = u_minify(pt->width0, level);
166 ns->base.height = u_minify(pt->height0, level);
167 ns->base.usage = flags;
168 pipe_reference_init(&ns->base.reference, 1);
169 ns->base.face = face;
170 ns->base.level = level;
171 ns->base.zslice = zslice;
172 ns->pitch = nv20mt->level[level].pitch;
173
174 if (pt->target == PIPE_TEXTURE_CUBE) {
175 ns->base.offset = nv20mt->level[level].image_offset[face];
176 } else
177 if (pt->target == PIPE_TEXTURE_3D) {
178 ns->base.offset = nv20mt->level[level].image_offset[zslice];
179 } else {
180 ns->base.offset = nv20mt->level[level].image_offset[0];
181 }
182
183 return &ns->base;
184 }
185
186 static void
187 nv20_miptree_surface_destroy(struct pipe_surface *ps)
188 {
189 pipe_texture_reference(&ps->texture, NULL);
190 FREE(ps);
191 }
192
193 void nv20_screen_init_miptree_functions(struct pipe_screen *pscreen)
194 {
195 pscreen->texture_create = nv20_miptree_create;
196 pscreen->texture_blanket = nv20_miptree_blanket;
197 pscreen->texture_destroy = nv20_miptree_destroy;
198 pscreen->get_tex_surface = nv20_miptree_surface_get;
199 pscreen->tex_surface_destroy = nv20_miptree_surface_destroy;
200 }
201