1 #ifndef __NOUVEAU_BUFFER_H__
2 #define __NOUVEAU_BUFFER_H__
4 #include "util/u_range.h"
5 #include "util/u_transfer.h"
9 struct nouveau_context
;
12 /* DIRTY: buffer was (or will be after the next flush) written to by GPU and
13 * resource->data has not been updated to reflect modified VRAM contents
15 * USER_MEMORY: resource->data is a pointer to client memory and may change
18 * USER_PTR: bo is backed by user memory mapped into the GPUs VM
20 #define NOUVEAU_BUFFER_STATUS_GPU_READING (1 << 0)
21 #define NOUVEAU_BUFFER_STATUS_GPU_WRITING (1 << 1)
22 #define NOUVEAU_BUFFER_STATUS_DIRTY (1 << 2)
23 #define NOUVEAU_BUFFER_STATUS_USER_PTR (1 << 6)
24 #define NOUVEAU_BUFFER_STATUS_USER_MEMORY (1 << 7)
26 #define NOUVEAU_BUFFER_STATUS_REALLOC_MASK NOUVEAU_BUFFER_STATUS_USER_MEMORY
28 /* Resources, if mapped into the GPU's address space, are guaranteed to
29 * have constant virtual addresses (nv50+).
31 * The address of a resource will lie within the nouveau_bo referenced,
32 * and this bo should be added to the memory manager's validation list.
34 struct nv04_resource
{
35 struct pipe_resource base
;
36 const struct u_resource_vtbl
*vtbl
;
38 uint64_t address
; /* virtual address (nv50+) */
40 uint8_t *data
; /* resource's contents, if domain == 0, or cached */
41 struct nouveau_bo
*bo
;
42 uint32_t offset
; /* offset into the data/bo */
47 uint16_t cb_bindings
[6]; /* per-shader per-slot bindings */
49 struct nouveau_fence
*fence
;
50 struct nouveau_fence
*fence_wr
;
52 struct nouveau_mm_allocation
*mm
;
54 /* buffer range that has been initialized */
55 struct util_range valid_buffer_range
;
59 nouveau_buffer_release_gpu_storage(struct nv04_resource
*);
62 nouveau_copy_buffer(struct nouveau_context
*,
63 struct nv04_resource
*dst
, unsigned dst_pos
,
64 struct nv04_resource
*src
, unsigned src_pos
, unsigned size
);
67 nouveau_buffer_migrate(struct nouveau_context
*,
68 struct nv04_resource
*, unsigned domain
);
71 nouveau_resource_map_offset(struct nouveau_context
*, struct nv04_resource
*,
72 uint32_t offset
, uint32_t flags
);
75 nouveau_resource_unmap(struct nv04_resource
*res
)
80 static inline struct nv04_resource
*
81 nv04_resource(struct pipe_resource
*resource
)
83 return (struct nv04_resource
*)resource
;
86 /* is resource mapped into the GPU's address space (i.e. VRAM or GART) ? */
88 nouveau_resource_mapped_by_gpu(struct pipe_resource
*resource
)
90 return nv04_resource(resource
)->domain
!= 0;
93 struct pipe_resource
*
94 nouveau_buffer_create(struct pipe_screen
*pscreen
,
95 const struct pipe_resource
*templ
);
97 struct pipe_resource
*
98 nouveau_buffer_create_from_user(struct pipe_screen
*pscreen
,
99 const struct pipe_resource
*templ
,
102 struct pipe_resource
*
103 nouveau_user_buffer_create(struct pipe_screen
*screen
, void *ptr
,
104 unsigned bytes
, unsigned usage
);
107 nouveau_user_buffer_upload(struct nouveau_context
*, struct nv04_resource
*,
108 unsigned base
, unsigned size
);
111 nouveau_buffer_invalidate(struct pipe_context
*pipe
,
112 struct pipe_resource
*resource
);
114 /* Copy data to a scratch buffer and return address & bo the data resides in.
115 * Returns 0 on failure.
118 nouveau_scratch_data(struct nouveau_context
*,
119 const void *data
, unsigned base
, unsigned size
,
120 struct nouveau_bo
**);