{
for (uint32_t i = 0; i < count; i++) {
ANV_FROM_HANDLE(anv_device_memory, mem, ranges[i].memory);
- void *p = mem->map + (ranges[i].offset & ~CACHELINE_MASK);
- void *end;
+ if (ranges[i].offset >= mem->map_size)
+ continue;
- if (ranges[i].offset + ranges[i].size > mem->map_size)
- end = mem->map + mem->map_size;
- else
- end = mem->map + ranges[i].offset + ranges[i].size;
-
- while (p < end) {
- __builtin_ia32_clflush(p);
- p += CACHELINE_SIZE;
- }
+ anv_clflush_range(mem->map + ranges[i].offset,
+ MIN2(ranges[i].size, mem->map_size - ranges[i].offset));
}
}
#define CACHELINE_MASK 63
static inline void
-anv_flush_range(void *start, size_t size)
+anv_clflush_range(void *start, size_t size)
{
void *p = (void *) (((uintptr_t) start) & ~CACHELINE_MASK);
void *end = start + size;
- __builtin_ia32_mfence();
while (p < end) {
__builtin_ia32_clflush(p);
p += CACHELINE_SIZE;
}
static inline void
-anv_invalidate_range(void *start, size_t size)
+anv_flush_range(void *start, size_t size)
{
- void *p = (void *) (((uintptr_t) start) & ~CACHELINE_MASK);
- void *end = start + size;
+ __builtin_ia32_mfence();
+ anv_clflush_range(start, size);
+}
- while (p < end) {
- __builtin_ia32_clflush(p);
- p += CACHELINE_SIZE;
- }
+static inline void
+anv_invalidate_range(void *start, size_t size)
+{
+ anv_clflush_range(start, size);
__builtin_ia32_mfence();
}