Add a dri config option to enable the max texture level hack
authorDave Airlie <airliedfreedesktop.org>
Sat, 26 Feb 2005 05:24:04 +0000 (05:24 +0000)
committerDave Airlie <airliedfreedesktop.org>
Sat, 26 Feb 2005 05:24:04 +0000 (05:24 +0000)
make ycbcr depend on a CHIPSET define .. needs to be filled in though

src/mesa/drivers/dri/common/xmlpool.h
src/mesa/drivers/dri/r200/r200_context.c
src/mesa/drivers/dri/r200/r200_screen.c
src/mesa/drivers/dri/r200/r200_screen.h
src/mesa/drivers/dri/radeon/radeon_context.c
src/mesa/drivers/dri/radeon/radeon_screen.c

index 29b5bf597d99e4bbf90ac7951713f6ba62d97453..dfa5f49524b3de700fe144a01f18d7b40969e7d1 100644 (file)
@@ -293,6 +293,11 @@ DRI_CONF_OPT_BEGIN_V(texture_units,int,def, # min ":" # max ) \
         DRI_CONF_DESC(de,"Anzahl der Textureinheiten") \
 DRI_CONF_OPT_END
 
+#define DRI_CONF_TEXTURE_LEVEL_HACK(def) \
+DRI_CONF_OPT_BEGIN(texture_level_hack,bool,def) \
+  DRI_CONF_DESC(en,"Enable texture level hack for radeon/r200 for playing games with compressed textures") \
+DRI_CONF_OPT_END
+
 #define DRI_CONF_TEXTURE_HEAPS_ALL 0
 #define DRI_CONF_TEXTURE_HEAPS_CARD 1
 #define DRI_CONF_TEXTURE_HEAPS_GART 2
@@ -331,4 +336,5 @@ DRI_CONF_OPT_BEGIN(nv_vertex_program,bool,def) \
         DRI_CONF_DESC(fr,"Activer GL_NV_vertex_program") \
 DRI_CONF_OPT_END
 
+
 #endif
index 25f9d2cb96b4ef65af6fa73722922fba603ffa04..bc50f3de68e99ceb8c737efffe557b477bb5ca54 100644 (file)
@@ -365,6 +365,13 @@ GLboolean r200CreateContext( const __GLcontextModes *glVisual,
                                 12,
                                 GL_FALSE );
 
+   /* adjust max texture size a bit. Hack, but I really want to use larger textures
+      which will work just fine in 99.999999% of all cases, especially with texture compression... */
+   if (driQueryOptionb( &rmesa->optionCache, "texture_level_hack" ))
+   {
+     if (ctx->Const.MaxTextureLevels < 12) ctx->Const.MaxTextureLevels += 1;
+   }
+
    ctx->Const.MaxTextureMaxAnisotropy = 16.0;
 
    /* No wide points.
@@ -415,9 +422,9 @@ GLboolean r200CreateContext( const __GLcontextModes *glVisual,
    _math_matrix_set_identity( &rmesa->tmpmat );
 
    driInitExtensions( ctx, card_extensions, GL_TRUE );
-   if (rmesa->r200Screen->chipset & R200_CHIPSET_REAL_R200) {
-   /* yuv textures only work with r200 chips for unknown reasons, the
-      others get the bit ordering right but don't actually do YUV-RGB conversion */
+   if (!rmesa->r200Screen->chipset & R200_CHIPSET_YCBCR_BROKEN) {
+     /* yuv textures don't work with some chips - R200 / rv280 okay so far
+       others get the bit ordering right but don't actually do YUV-RGB conversion */
       _mesa_enable_extension( ctx, "GL_MESA_ycbcr_texture" );
    }
    if (rmesa->glCtx->Mesa_DXTn) {
index 76d70160924aa258733468188d7dc4ceaf7c71ea..103d5d338439dc18800b627ddfc25bf4184991bf 100644 (file)
@@ -73,6 +73,7 @@ DRI_CONF_BEGIN
         DRI_CONF_COLOR_REDUCTION(DRI_CONF_COLOR_REDUCTION_DITHER)
         DRI_CONF_ROUND_MODE(DRI_CONF_ROUND_TRUNC)
         DRI_CONF_DITHER_MODE(DRI_CONF_DITHER_XERRORDIFF)
+        DRI_CONF_TEXTURE_LEVEL_HACK(false)
     DRI_CONF_SECTION_END
     DRI_CONF_SECTION_DEBUG
         DRI_CONF_NO_RAST(false)
@@ -82,7 +83,7 @@ DRI_CONF_BEGIN
         DRI_CONF_NV_VERTEX_PROGRAM(false)
     DRI_CONF_SECTION_END
 DRI_CONF_END;
-static const GLuint __driNConfigOptions = 15;
+static const GLuint __driNConfigOptions = 16;
 
 #if 1
 /* Including xf86PciInfo.h introduces a bunch of errors...
index fa8c33faadf9e3ab697a61dbb26fdd36239db159..fdfb21a6c545706e7f9ffe9886419fc665043329 100644 (file)
@@ -52,6 +52,7 @@ typedef struct {
 /* chipset features */
 #define R200_CHIPSET_TCL       (1 << 0)
 #define R200_CHIPSET_REAL_R200  (1 << 1)
+#define R200_CHIPSET_YCBCR_BROKEN (1 << 2) 
 
 
 #define R200_NR_TEX_HEAPS 2
index 6a2f3dc353e2bc21e151b9f4c62d89eb81519154..fb00e59b46b5b84a38871a21abcd4f12a95abb29 100644 (file)
@@ -342,6 +342,13 @@ radeonCreateContext( const __GLcontextModes *glVisual,
                                 12,
                                 GL_FALSE );
 
+   /* adjust max texture size a bit. Hack, but I really want to use larger textures
+      which will work just fine in 99.999999% of all cases, especially with texture compression... */
+   if (driQueryOptionb( &rmesa->optionCache, "texture_level_hack" ))
+   {
+     if (ctx->Const.MaxTextureLevels < 12) ctx->Const.MaxTextureLevels += 1;
+   }
+
    ctx->Const.MaxTextureMaxAnisotropy = 16.0;
 
    /* No wide points.
index eba24d82d6acb3ae24174e0c510406731982e112..08c85fdf5c0888c24554b433a981017daaef8448 100644 (file)
@@ -70,12 +70,13 @@ DRI_CONF_BEGIN
         DRI_CONF_COLOR_REDUCTION(DRI_CONF_COLOR_REDUCTION_DITHER)
         DRI_CONF_ROUND_MODE(DRI_CONF_ROUND_TRUNC)
         DRI_CONF_DITHER_MODE(DRI_CONF_DITHER_XERRORDIFF)
+        DRI_CONF_TEXTURE_LEVEL_HACK(false)
     DRI_CONF_SECTION_END
     DRI_CONF_SECTION_DEBUG
         DRI_CONF_NO_RAST(false)
     DRI_CONF_SECTION_END
 DRI_CONF_END;
-static const GLuint __driNConfigOptions = 12;
+static const GLuint __driNConfigOptions = 13;
 
 #if 1
 /* Including xf86PciInfo.h introduces a bunch of errors...