Merge branch 'master' into glsl-pp-rework-2
[mesa.git] / src / gallium / drivers / nv20 / nv20_miptree.c
1 #include "pipe/p_state.h"
2 #include "pipe/p_defines.h"
3 #include "pipe/p_inlines.h"
4 #include "util/u_math.h"
5
6 #include "nv20_context.h"
7 #include "nv20_screen.h"
8
9 static void
10 nv20_miptree_layout(struct nv20_miptree *nv20mt)
11 {
12 struct pipe_texture *pt = &nv20mt->base;
13 uint width = pt->width0;
14 uint offset = 0;
15 int nr_faces, l, f;
16 uint wide_pitch = pt->tex_usage & (PIPE_TEXTURE_USAGE_SAMPLER |
17 PIPE_TEXTURE_USAGE_DEPTH_STENCIL |
18 PIPE_TEXTURE_USAGE_RENDER_TARGET |
19 PIPE_TEXTURE_USAGE_DISPLAY_TARGET |
20 PIPE_TEXTURE_USAGE_PRIMARY);
21
22 if (pt->target == PIPE_TEXTURE_CUBE) {
23 nr_faces = 6;
24 } else {
25 nr_faces = 1;
26 }
27
28 for (l = 0; l <= pt->last_level; l++) {
29 if (wide_pitch && (pt->tex_usage & NOUVEAU_TEXTURE_USAGE_LINEAR))
30 nv20mt->level[l].pitch = align(pf_get_stride(pt->format, pt->width0), 64);
31 else
32 nv20mt->level[l].pitch = pf_get_stride(pt->format, width);
33
34 nv20mt->level[l].image_offset =
35 CALLOC(nr_faces, sizeof(unsigned));
36
37 width = u_minify(width, 1);
38 }
39
40 for (f = 0; f < nr_faces; f++) {
41 for (l = 0; l < pt->last_level; l++) {
42 nv20mt->level[l].image_offset[f] = offset;
43
44 if (!(pt->tex_usage & NOUVEAU_TEXTURE_USAGE_LINEAR) &&
45 u_minify(pt->width0, l + 1) > 1 && u_minify(pt->height0, l + 1) > 1)
46 offset += align(nv20mt->level[l].pitch * u_minify(pt->height0, l), 64);
47 else
48 offset += nv20mt->level[l].pitch * u_minify(pt->height0, l);
49 }
50
51 nv20mt->level[l].image_offset[f] = offset;
52 offset += nv20mt->level[l].pitch * u_minify(pt->height0, l);
53 }
54
55 nv20mt->total_size = offset;
56 }
57
58 static struct pipe_texture *
59 nv20_miptree_blanket(struct pipe_screen *pscreen, const struct pipe_texture *pt,
60 const unsigned *stride, struct pipe_buffer *pb)
61 {
62 struct nv20_miptree *mt;
63
64 /* Only supports 2D, non-mipmapped textures for the moment */
65 if (pt->target != PIPE_TEXTURE_2D || pt->last_level != 0 ||
66 pt->depth0 != 1)
67 return NULL;
68
69 mt = CALLOC_STRUCT(nv20_miptree);
70 if (!mt)
71 return NULL;
72
73 mt->base = *pt;
74 pipe_reference_init(&mt->base.reference, 1);
75 mt->base.screen = pscreen;
76 mt->level[0].pitch = stride[0];
77 mt->level[0].image_offset = CALLOC(1, sizeof(unsigned));
78
79 pipe_buffer_reference(&mt->buffer, pb);
80 mt->bo = nouveau_bo(mt->buffer);
81 return &mt->base;
82 }
83
84 static struct pipe_texture *
85 nv20_miptree_create(struct pipe_screen *screen, const struct pipe_texture *pt)
86 {
87 struct nv20_miptree *mt;
88 unsigned buf_usage = PIPE_BUFFER_USAGE_PIXEL |
89 NOUVEAU_BUFFER_USAGE_TEXTURE;
90
91 mt = MALLOC(sizeof(struct nv20_miptree));
92 if (!mt)
93 return NULL;
94 mt->base = *pt;
95 pipe_reference_init(&mt->base.reference, 1);
96 mt->base.screen = screen;
97
98 /* Swizzled textures must be POT */
99 if (pt->width0 & (pt->width0 - 1) ||
100 pt->height0 & (pt->height0 - 1))
101 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
102 else
103 if (pt->tex_usage & (PIPE_TEXTURE_USAGE_PRIMARY |
104 PIPE_TEXTURE_USAGE_DISPLAY_TARGET |
105 PIPE_TEXTURE_USAGE_DEPTH_STENCIL))
106 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
107 else
108 if (pt->tex_usage & PIPE_TEXTURE_USAGE_DYNAMIC)
109 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
110 else {
111 switch (pt->format) {
112 /* TODO: Figure out which formats can be swizzled */
113 case PIPE_FORMAT_A8R8G8B8_UNORM:
114 case PIPE_FORMAT_X8R8G8B8_UNORM:
115 case PIPE_FORMAT_R16_SNORM:
116 {
117 if (debug_get_bool_option("NOUVEAU_NO_SWIZZLE", FALSE))
118 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
119 break;
120 }
121 default:
122 mt->base.tex_usage |= NOUVEAU_TEXTURE_USAGE_LINEAR;
123 }
124 }
125
126 if (pt->tex_usage & PIPE_TEXTURE_USAGE_DYNAMIC)
127 buf_usage |= PIPE_BUFFER_USAGE_CPU_READ_WRITE;
128
129 nv20_miptree_layout(mt);
130
131 mt->buffer = screen->buffer_create(screen, 256, buf_usage, mt->total_size);
132 if (!mt->buffer) {
133 FREE(mt);
134 return NULL;
135 }
136 mt->bo = nouveau_bo(mt->buffer);
137
138 return &mt->base;
139 }
140
141 static void
142 nv20_miptree_destroy(struct pipe_texture *pt)
143 {
144 struct nv20_miptree *nv20mt = (struct nv20_miptree *)pt;
145 int l;
146
147 pipe_buffer_reference(&nv20mt->buffer, NULL);
148 for (l = 0; l <= pt->last_level; l++) {
149 if (nv20mt->level[l].image_offset)
150 FREE(nv20mt->level[l].image_offset);
151 }
152 }
153
154 static struct pipe_surface *
155 nv20_miptree_surface_get(struct pipe_screen *screen, struct pipe_texture *pt,
156 unsigned face, unsigned level, unsigned zslice,
157 unsigned flags)
158 {
159 struct nv20_miptree *nv20mt = (struct nv20_miptree *)pt;
160 struct nv04_surface *ns;
161
162 ns = CALLOC_STRUCT(nv04_surface);
163 if (!ns)
164 return NULL;
165 pipe_texture_reference(&ns->base.texture, pt);
166 ns->base.format = pt->format;
167 ns->base.width = u_minify(pt->width0, level);
168 ns->base.height = u_minify(pt->height0, level);
169 ns->base.usage = flags;
170 pipe_reference_init(&ns->base.reference, 1);
171 ns->base.face = face;
172 ns->base.level = level;
173 ns->base.zslice = zslice;
174 ns->pitch = nv20mt->level[level].pitch;
175
176 if (pt->target == PIPE_TEXTURE_CUBE) {
177 ns->base.offset = nv20mt->level[level].image_offset[face];
178 } else
179 if (pt->target == PIPE_TEXTURE_3D) {
180 ns->base.offset = nv20mt->level[level].image_offset[zslice];
181 } else {
182 ns->base.offset = nv20mt->level[level].image_offset[0];
183 }
184
185 return &ns->base;
186 }
187
188 static void
189 nv20_miptree_surface_destroy(struct pipe_surface *ps)
190 {
191 pipe_texture_reference(&ps->texture, NULL);
192 FREE(ps);
193 }
194
195 void nv20_screen_init_miptree_functions(struct pipe_screen *pscreen)
196 {
197 pscreen->texture_create = nv20_miptree_create;
198 pscreen->texture_blanket = nv20_miptree_blanket;
199 pscreen->texture_destroy = nv20_miptree_destroy;
200 pscreen->get_tex_surface = nv20_miptree_surface_get;
201 pscreen->tex_surface_destroy = nv20_miptree_surface_destroy;
202 }
203