When set to a truish value, this globally disables the minmax cache for all
buffer objects.
No #ifdef DEBUG guards because this option can be interesting for
benchmarking.
Reviewed-by: Marek Olšák <marek.olsak@amd.com>
"130". Mesa will not really implement all the features of the given language version
if it's higher than what's normally reported. (for developers only)
<li>MESA_GLSL - <a href="shading.html#envvars">shading language compiler options</a>
+<li>MESA_NO_MINMAX_CACHE - when set, the minmax index cache is globally disabled.
</ul>
#include <stdbool.h>
#include <inttypes.h> /* for PRId64 macro */
+#include "util/debug.h"
#include "glheader.h"
#include "enums.h"
#include "hash.h"
}
+/**
+ * Get the value of MESA_NO_MINMAX_CACHE.
+ */
+static bool
+get_no_minmax_cache()
+{
+ static bool read = false;
+ static bool disable = false;
+
+ if (!read) {
+ disable = env_var_as_boolean("MESA_NO_MINMAX_CACHE", false);
+ read = true;
+ }
+
+ return disable;
+}
+
+
/**
* Initialize a buffer object to default values.
*/
obj->RefCount = 1;
obj->Name = name;
obj->Usage = GL_STATIC_DRAW_ARB;
+
+ if (get_no_minmax_cache())
+ obj->UsageHistory |= USAGE_DISABLE_MINMAX_CACHE;
}