From 49562da98d81a0a3d2a4c94b1138be9272587c91 Mon Sep 17 00:00:00 2001 From: Brecht Van Lommel Date: Wed, 16 Jan 2019 15:24:43 +0100 Subject: Preferences: remove unnecessary 16 bit textures preference. This is a leftover from a time when these were not supported on all GPUs. --- source/blender/gpu/intern/gpu_draw.c | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) (limited to 'source/blender/gpu') diff --git a/source/blender/gpu/intern/gpu_draw.c b/source/blender/gpu/intern/gpu_draw.c index cf9cbc8ca89..7c06555f255 100644 --- a/source/blender/gpu/intern/gpu_draw.c +++ b/source/blender/gpu/intern/gpu_draw.c @@ -309,18 +309,7 @@ GPUTexture *GPU_texture_from_blender( bool use_high_bit_depth = false, do_color_management = false; if (ibuf->rect_float) { - if (U.use_16bit_textures) { - /* use high precision textures. This is relatively harmless because OpenGL gives us - * a high precision format only if it is available */ - use_high_bit_depth = true; - } - else if (ibuf->rect == NULL) { - IMB_rect_from_float(ibuf); - } - /* we may skip this in high precision, but if not, we need to have a valid buffer here */ - else if (ibuf->userflags & IB_RECT_INVALID) { - IMB_rect_from_float(ibuf); - } + use_high_bit_depth = true; /* TODO unneeded when float images are correctly treated as linear always */ if (!is_data) { -- cgit v1.2.3