diff options
author | Campbell Barton <ideasman42@gmail.com> | 2016-06-10 11:48:27 +0300 |
---|---|---|
committer | Campbell Barton <ideasman42@gmail.com> | 2016-06-10 11:48:54 +0300 |
commit | 1b6067b5678a37fc1aec7016300f8453b1984995 (patch) | |
tree | 6dbd14578dfc82595038823f20206dab8a04a317 | |
parent | 26b11140ada2994fd8846fb903e1f6bc90fbe8f6 (diff) |
Correct hasattr for code using item access
Also minor cleanup
-rw-r--r-- | doc/python_api/examples/bge.texture.2.py | 72 |
1 files changed, 35 insertions, 37 deletions
diff --git a/doc/python_api/examples/bge.texture.2.py b/doc/python_api/examples/bge.texture.2.py index f7e01705792..96619007fba 100644 --- a/doc/python_api/examples/bge.texture.2.py +++ b/doc/python_api/examples/bge.texture.2.py @@ -6,18 +6,18 @@ usable by OpenGL, they must be processed by a shader. The three shaders presente cover all common video capture cases. This file reflects the current video transfer method implemented in the Decklink module: -whenever possible the video images are transferred as float texture because this is more +whenever possible the video images are transferred as float texture because this is more compatible with GPUs. Of course, only the pixel formats that have a correspondant GL format can be transferred as float. Look for fg_shaders in this file for an exhaustive list. Other pixel formats will be transferred as 32 bits integer red-channel texture but this -won't work with certain GPU (Intel GMA); the corresponding shaders are not shown here. +won't work with certain GPU (Intel GMA); the corresponding shaders are not shown here. However, it should not be necessary to use any of them as the list below covers all practical cases of video capture with all types of Decklink product. In other words, only use one of the pixel format below and you will be fine. Note that depending on the video stream, only certain pixel formats will be allowed (others will throw an exception). -For example, to capture a PAL video stream, you must use one of the YUV formats. +For example, to capture a PAL video stream, you must use one of the YUV formats. To find which pixel format is suitable for a particular video stream, use the 'Media Express' utility that comes with the Decklink software : if you see the video in the 'Log and Capture' @@ -40,7 +40,7 @@ VertexShader = """ gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex; gl_TexCoord[0] = gl_MultiTexCoord0; } - + """ # For use with RGB video stream: the pixel is directly usable @@ -61,7 +61,7 @@ FragmentShader_R10l = """ ty = eye+gl_TexCoord[0].y*stereo; color = texture(tex, vec2(tx,ty)); color.a = 0.7; - gl_FragColor = color; + gl_FragColor = color; } """ @@ -69,18 +69,18 @@ FragmentShader_R10l = """ # FragmentShader_2vuy = """ #version 130 - uniform sampler2D tex; + uniform sampler2D tex; // stereo = 1.0 if 2D image, =0.5 if 3D (left eye below, right eye above) uniform float stereo; // eye = 0.0 for the left eye, 0.5 for the right eye uniform float eye; - void main(void) + void main(void) { vec4 color; - float tx, ty, width, Y, Cb, Cr; + float tx, ty, width, Y, Cb, Cr; int px; - tx = gl_TexCoord[0].x; + tx = gl_TexCoord[0].x; ty = eye+gl_TexCoord[0].y*stereo; width = float(textureSize(tex, 0).x); color = texture(tex, vec2(tx, ty)); @@ -93,14 +93,14 @@ FragmentShader_2vuy = """ Y = color.a; break; } - Y = (Y - 0.0625) * 1.168949772; - Cb = (color.b - 0.0625) * 1.142857143 - 0.5; - Cr = (color.r - 0.0625) * 1.142857143 - 0.5; - color.r = Y + 1.5748 * Cr; + Y = (Y - 0.0625) * 1.168949772; + Cb = (color.b - 0.0625) * 1.142857143 - 0.5; + Cr = (color.r - 0.0625) * 1.142857143 - 0.5; + color.r = Y + 1.5748 * Cr; color.g = Y - 0.1873 * Cb - 0.4681 * Cr; color.b = Y + 1.8556 * Cb; color.a = 0.7; - gl_FragColor = color; + gl_FragColor = color; } """ @@ -108,19 +108,19 @@ FragmentShader_2vuy = """ # FragmentShader_v210 = """ #version 130 - uniform sampler2D tex; + uniform sampler2D tex; // stereo = 1.0 if 2D image, =0.5 if 3D (left eye below, right eye above) uniform float stereo; // eye = 0.0 for the left eye, 0.5 for the right eye uniform float eye; - void main(void) + void main(void) { vec4 color, color1, color2, color3; int px; - float tx, ty, width, sx, dx, bx, Y, Cb, Cr; - tx = gl_TexCoord[0].x; - ty = eye+gl_TexCoord[0].y*stereo; + float tx, ty, width, sx, dx, bx, Y, Cb, Cr; + tx = gl_TexCoord[0].x; + ty = eye+gl_TexCoord[0].y*stereo; width = float(textureSize(tex, 0).x); // to sample macro pixels (6 pixels in 4 words) sx = tx*width*0.25+0.01; @@ -170,14 +170,14 @@ FragmentShader_v210 = """ Y = color3.r; break; } - Y = (Y - 0.0625) * 1.168949772; - Cb = (Cb - 0.0625) * 1.142857143 - 0.5; - Cr = (Cr - 0.0625) * 1.142857143 - 0.5; - color.r = Y + 1.5748 * Cr; + Y = (Y - 0.0625) * 1.168949772; + Cb = (Cb - 0.0625) * 1.142857143 - 0.5; + Cr = (Cr - 0.0625) * 1.142857143 - 0.5; + color.r = Y + 1.5748 * Cr; color.g = Y - 0.1873 * Cb - 0.4681 * Cr; color.b = Y + 1.8556 * Cb; color.a = 0.7; - gl_FragColor = color; + gl_FragColor = color; } """ @@ -197,17 +197,15 @@ fg_shaders = { } - - # # Helper function to attach a pixel shader to the material that receives the video frame. # def config_video(obj, format, pixel, is3D=False, mat=0, card=0): - if not pixel in fg_shaders: + if pixel not in fg_shaders: raise('Unsuported shader') shader = obj.meshes[0].materials[mat].getShader() - if shader != None and not shader.isValid(): + if shader is not None and not shader.isValid(): shader.setSource(VertexShader, fg_shaders[pixel], True) shader.setSampler('tex', 0) shader.setUniformEyef("eye") @@ -221,19 +219,19 @@ def config_video(obj, format, pixel, is3D=False, mat=0, card=0): # # Attach this function to an object that has a material with texture # and call it once to initialize the object -# +# def init(cont): - #config_video(cont.owner, 'HD720p5994', '8BitBGRA') - #config_video(cont.owner, 'HD720p5994', '8BitYUV') - #config_video(cont.owner, 'pal ', '10BitYUV') - config_video(cont.owner, 'pal ', '8BitYUV') - + # config_video(cont.owner, 'HD720p5994', '8BitBGRA') + # config_video(cont.owner, 'HD720p5994', '8BitYUV') + # config_video(cont.owner, 'pal ', '10BitYUV') + config_video(cont.owner, 'pal ', '8BitYUV') + # # To be called on every frame # def play(cont): obj = cont.owner - if hasattr(obj, "video"): - obj["video"].refresh(True) - + video = obj.get("video") + if video is not None: + video.refresh(True) |