Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntony Riakiotakis <kalast@gmail.com>2015-03-30 15:14:32 +0300
committerAntony Riakiotakis <kalast@gmail.com>2015-03-30 15:14:52 +0300
commit590efaacb862aa4352b11e77d37805df5445a90f (patch)
tree5549fbbe7877cde99213b9c5b181dee4b0d49e45 /source/blender/gpu/intern/gpu_extensions.c
parent4aeb34dc82e904660a1df06617406028b5331ff1 (diff)
Potential fix for T43987, ambient occlusion different between offscreen
and on screen rendering. Aaaaah, the beauty of driver implementations of OpenGL! Turns out the problem here is that drivers calculate df/dy differently in some cases (probably because OpenGL counts y reverse to how the window system does, so drivers can get confused). Fixed this for the ATI case based on info we have so far, there's also the Intel case which will be handled separately (missing info on Intel's renderer string etc). Unfortunately we can't really fix this for the general case so we'll have to haldle cases as they come in our tracker and by adding silly string comparisons in our GPU initialization module <sigh>.
Diffstat (limited to 'source/blender/gpu/intern/gpu_extensions.c')
-rw-r--r--source/blender/gpu/intern/gpu_extensions.c29
1 files changed, 28 insertions, 1 deletions
diff --git a/source/blender/gpu/intern/gpu_extensions.c b/source/blender/gpu/intern/gpu_extensions.c
index bc6e24f5072..c602fdad18e 100644
--- a/source/blender/gpu/intern/gpu_extensions.c
+++ b/source/blender/gpu/intern/gpu_extensions.c
@@ -40,6 +40,7 @@
#include "BLI_blenlib.h"
#include "BLI_utildefines.h"
#include "BLI_math_base.h"
+#include "BLI_math_vector.h"
#include "BKE_global.h"
@@ -113,6 +114,9 @@ static struct GPUGlobal {
GPUTexture *invalid_tex_1D; /* texture used in place of invalid textures (not loaded correctly, missing) */
GPUTexture *invalid_tex_2D;
GPUTexture *invalid_tex_3D;
+ float dfdyfactors[2]; /* workaround for different calculation of dfdy factors on GPUs. Some GPUs/drivers
+ calculate dfdy in shader differently when drawing to an offscreen buffer. First
+ number is factor on screen and second is off-screen */
} GG = {1, 0};
/* Number of maximum output slots. We support 4 outputs for now (usually we wouldn't need more to preserve fill rate) */
@@ -144,10 +148,15 @@ int GPU_max_texture_size(void)
return GG.maxtexsize;
}
+void GPU_get_dfdy_factors(float fac[2])
+{
+ copy_v2_v2(fac, GG.dfdyfactors);
+}
+
void gpu_extensions_init(void)
{
GLint r, g, b;
- const char *vendor, *renderer;
+ const char *vendor, *renderer, *version;
/* glewIsSupported("GL_VERSION_2_0") */
@@ -168,6 +177,7 @@ void gpu_extensions_init(void)
vendor = (const char *)glGetString(GL_VENDOR);
renderer = (const char *)glGetString(GL_RENDERER);
+ version = (const char *)glGetString(GL_VERSION);
if (strstr(vendor, "ATI")) {
GG.device = GPU_DEVICE_ATI;
@@ -244,6 +254,23 @@ void gpu_extensions_init(void)
#endif
+ /* df/dy calculation factors, those are dependent on driver */
+ if ((strstr(vendor, "ATI") && strstr(version, "3.3.10750"))) {
+ GG.dfdyfactors[0] = 1.0;
+ GG.dfdyfactors[1] = -1.0;
+ }
+ /*
+ if ((strstr(vendor, "Intel"))) {
+ GG.dfdyfactors[0] = -1.0;
+ GG.dfdyfactors[1] = 1.0;
+ }
+ */
+ else {
+ GG.dfdyfactors[0] = 1.0;
+ GG.dfdyfactors[1] = 1.0;
+ }
+
+
GPU_invalid_tex_init();
GPU_simple_shaders_init();
}