Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClément Foucault <foucault.clem@gmail.com>2018-09-18 15:22:42 +0300
committerClément Foucault <foucault.clem@gmail.com>2018-09-18 15:22:42 +0300
commiteafec6d4f72b05c389576358b9e1d0ebd87cf174 (patch)
tree77367435ef72425f389509288ced9ef79758d8ae /source/blender/gpu/intern/gpu_extensions.c
parentbf2a54b0584c1e568af7ecf67ae2a623bc5263fe (diff)
GPUShader: Manually validate sampler count
This happens on NVidia GPUs, using more textures than the maximum allowed by the gl will NOT trigger a linking issue (maybe because of bindless texture implementation?). So in this case we manually count the number of samplers per shader stage and compare it against the GL limit. We discard the shader if the sampler count is too high. This shows the user something is wrong with the shader.
Diffstat (limited to 'source/blender/gpu/intern/gpu_extensions.c')
-rw-r--r--source/blender/gpu/intern/gpu_extensions.c27
1 files changed, 23 insertions, 4 deletions
diff --git a/source/blender/gpu/intern/gpu_extensions.c b/source/blender/gpu/intern/gpu_extensions.c
index 112618de92d..198f986d06e 100644
--- a/source/blender/gpu/intern/gpu_extensions.c
+++ b/source/blender/gpu/intern/gpu_extensions.c
@@ -68,6 +68,9 @@ static struct GPUGlobal {
GLint maxtexlayers;
GLint maxcubemapsize;
GLint maxtextures;
+ GLint maxtexturesfrag;
+ GLint maxtexturesgeom;
+ GLint maxtexturesvert;
GLint maxubosize;
GLint maxubobinds;
int colordepth;
@@ -106,6 +109,21 @@ int GPU_max_textures(void)
return GG.maxtextures;
}
+int GPU_max_textures_frag(void)
+{
+ return GG.maxtexturesfrag;
+}
+
+int GPU_max_textures_geom(void)
+{
+ return GG.maxtexturesgeom;
+}
+
+int GPU_max_textures_vert(void)
+{
+ return GG.maxtexturesvert;
+}
+
float GPU_max_texture_anisotropy(void)
{
return GG.max_anisotropy;
@@ -144,7 +162,10 @@ void gpu_extensions_init(void)
*/
BLI_assert(GLEW_VERSION_3_3);
- glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &GG.maxtextures);
+ glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &GG.maxtexturesfrag);
+ glGetIntegerv(GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, &GG.maxtexturesvert);
+ glGetIntegerv(GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS, &GG.maxtexturesgeom);
+ glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, &GG.maxtextures);
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &GG.maxtexsize);
glGetIntegerv(GL_MAX_ARRAY_TEXTURE_LAYERS, &GG.maxtexlayers);
@@ -172,9 +193,7 @@ void gpu_extensions_init(void)
glGetFramebufferAttachmentParameteriv(GL_FRAMEBUFFER, GL_FRONT_LEFT, GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE, &b);
GG.colordepth = r + g + b; /* Assumes same depth for RGB. */
- if (GLEW_VERSION_3_2 || GLEW_ARB_texture_multisample) {
- glGetIntegerv(GL_MAX_COLOR_TEXTURE_SAMPLES, &GG.samples_color_texture_max);
- }
+ glGetIntegerv(GL_MAX_COLOR_TEXTURE_SAMPLES, &GG.samples_color_texture_max);
const char *vendor = (const char *)glGetString(GL_VENDOR);
const char *renderer = (const char *)glGetString(GL_RENDERER);