Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDalai Felinto <dfelinto@gmail.com>2016-03-10 15:28:29 +0300
committerDalai Felinto <dfelinto@gmail.com>2016-03-10 15:28:29 +0300
commitde7a8af79380ba9c77bedb068b72f3d7e05bc98e (patch)
tree0a3b7a62ba1f95c1b5e78ee4d04c9460ebf7b5c0 /intern/cycles
parent1bae9c6a5b15160b5bdbda479ac144841fd78b63 (diff)
Multi-View: Cycles - Spherical Stereo support (VR Panoramas)
This is a new option for panorama cameras to render stereo that can be used in virtual reality devices The option is available under the camera panel when Multi-View is enabled (Views option in the Render Layers panel) Known limitations: ------------------ * Parallel convergence is not supported (you need to set a convergence distance really high to simulate this effect). * Pivot was not supposed to affect the render but it does, this has to be looked at, but for now set it to CENTER * Derivatives in perspective camera need to be pre-computed or we shuld get rid of kcam->dx/dy (Sergey words, I don't fully grasp the implication shere) * This works in perspective mode and in panorama mode. However, for fully benefit from this effect in perspective mode you need to render a cube map. (there is an addon for this, developed separately, perhaps we could include it in master). * We have no support for "neck distance" at the moment. This is supposed to help with objects at short distances. * We have no support to rotate the "Up Axis" of the stereo plane. Meaning, we hardcode 0,0,1 as UP, and create the stereo pair related to that. (although we could take the camera local UP when rendering panoramas, this wouldn't work for perspective cameras. * We have no support for interocular distance attenuation based on the proximity of the poles (which helps to reduce the pole rotation effect/artifact). THIS NEEDS DOCS - both in 2.78 release log and the Blender manual. Meanwhile you can read about it here: http://code.blender.org/2015/03/1451 This patch specifically dates from March 2015, as you can see in the code.blender.org post. Many thanks to all the reviewers, testers and minor sponsors who helped me maintain spherical-stereo for 1 year. All that said, have fun with this. This feature was what got me started with Multi-View development (at the time what I was looking for was Fulldome stereo support, but the implementation is the same). In order to make this into Blender I had to make it aiming at a less-specic user-case Thus Multi-View started. (this was December 2012, during Siggraph Asia and a chat I had with Paul Bourke during the conference). I don't have the original patch anymore, but you can find a re-based version of it from March 2013, right before I start with the Multi-View project https://developer.blender.org/P332 Reviewers: sergey, dingto Subscribers: #cycles Differential Revision: https://developer.blender.org/D1223
Diffstat (limited to 'intern/cycles')
-rw-r--r--intern/cycles/app/cycles_xml.cpp4
-rw-r--r--intern/cycles/blender/addon/__init__.py1
-rw-r--r--intern/cycles/blender/blender_camera.cpp45
-rw-r--r--intern/cycles/blender/blender_session.cpp9
-rw-r--r--intern/cycles/blender/blender_sync.h3
-rw-r--r--intern/cycles/kernel/kernel_camera.h56
-rw-r--r--intern/cycles/kernel/kernel_projection.h26
-rw-r--r--intern/cycles/kernel/kernel_types.h5
-rw-r--r--intern/cycles/render/camera.cpp48
-rw-r--r--intern/cycles/render/camera.h13
10 files changed, 172 insertions, 38 deletions
diff --git a/intern/cycles/app/cycles_xml.cpp b/intern/cycles/app/cycles_xml.cpp
index dd1dae121b1..a8f90c7e6d4 100644
--- a/intern/cycles/app/cycles_xml.cpp
+++ b/intern/cycles/app/cycles_xml.cpp
@@ -359,6 +359,10 @@ static void xml_read_camera(const XMLReadState& state, pugi::xml_node node)
xml_read_float(&cam->fisheye_fov, node, "fisheye_fov");
xml_read_float(&cam->fisheye_lens, node, "fisheye_lens");
+ xml_read_float(&cam->use_spherical_stereo, node, "use_spherical_stereo");
+ xml_read_float(&cam->interocular_distance, node, "interocular_distance");
+ xml_read_float(&cam->convergence_distance, node, "convergence_distance");
+
xml_read_float(&cam->sensorwidth, node, "sensorwidth");
xml_read_float(&cam->sensorheight, node, "sensorheight");
diff --git a/intern/cycles/blender/addon/__init__.py b/intern/cycles/blender/addon/__init__.py
index 150b4e75581..29388317873 100644
--- a/intern/cycles/blender/addon/__init__.py
+++ b/intern/cycles/blender/addon/__init__.py
@@ -43,6 +43,7 @@ class CyclesRender(bpy.types.RenderEngine):
bl_use_preview = True
bl_use_exclude_layers = True
bl_use_save_buffers = True
+ bl_use_spherical_stereo = True
def __init__(self):
self.session = None
diff --git a/intern/cycles/blender/blender_camera.cpp b/intern/cycles/blender/blender_camera.cpp
index cdbee52bfa9..2a7c1d3d943 100644
--- a/intern/cycles/blender/blender_camera.cpp
+++ b/intern/cycles/blender/blender_camera.cpp
@@ -62,6 +62,9 @@ struct BlenderCamera {
float latitude_max;
float longitude_min;
float longitude_max;
+ bool use_spherical_stereo;
+ float interocular_distance;
+ float convergence_distance;
enum { AUTO, HORIZONTAL, VERTICAL } sensor_fit;
float sensor_width;
@@ -110,7 +113,8 @@ static void blender_camera_init(BlenderCamera *bcam,
static float blender_camera_focal_distance(BL::RenderEngine& b_engine,
BL::Object& b_ob,
- BL::Camera& b_camera)
+ BL::Camera& b_camera,
+ BlenderCamera *bcam)
{
BL::Object b_dof_object = b_camera.dof_object();
@@ -119,8 +123,8 @@ static float blender_camera_focal_distance(BL::RenderEngine& b_engine,
/* for dof object, return distance along camera Z direction */
BL::Array<float, 16> b_ob_matrix;
- b_engine.camera_model_matrix(b_ob, b_ob_matrix);
- Transform obmat = get_transform(b_ob_matrix);
+ b_engine.camera_model_matrix(b_ob, bcam->use_spherical_stereo, b_ob_matrix);
+ Transform obmat = transform_clear_scale(get_transform(b_ob_matrix));
Transform dofmat = get_transform(b_dof_object.matrix_world());
float3 view_dir = normalize(transform_get_column(&obmat, 2));
float3 dof_dir = transform_get_column(&obmat, 3) - transform_get_column(&dofmat, 3);
@@ -170,6 +174,10 @@ static void blender_camera_from_object(BlenderCamera *bcam,
bcam->longitude_min = RNA_float_get(&ccamera, "longitude_min");
bcam->longitude_max = RNA_float_get(&ccamera, "longitude_max");
+ bcam->interocular_distance = b_camera.stereo().interocular_distance();
+ bcam->convergence_distance = b_camera.stereo().convergence_distance();
+ bcam->use_spherical_stereo = b_engine.use_spherical_stereo(b_ob);
+
bcam->ortho_scale = b_camera.ortho_scale();
bcam->lens = b_camera.lens();
@@ -192,10 +200,10 @@ static void blender_camera_from_object(BlenderCamera *bcam,
bcam->apertureblades = RNA_int_get(&ccamera, "aperture_blades");
bcam->aperturerotation = RNA_float_get(&ccamera, "aperture_rotation");
- bcam->focaldistance = blender_camera_focal_distance(b_engine, b_ob, b_camera);
+ bcam->focaldistance = blender_camera_focal_distance(b_engine, b_ob, b_camera, bcam);
bcam->aperture_ratio = RNA_float_get(&ccamera, "aperture_ratio");
- bcam->shift.x = b_engine.camera_shift_x(b_ob);
+ bcam->shift.x = b_engine.camera_shift_x(b_ob, bcam->use_spherical_stereo);
bcam->shift.y = b_camera.shift_y();
bcam->sensor_width = b_camera.sensor_width();
@@ -337,7 +345,7 @@ static void blender_camera_viewplane(BlenderCamera *bcam,
}
}
-static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int height)
+static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int height, const char *viewname)
{
/* copy camera to compare later */
Camera prevcam = *cam;
@@ -394,6 +402,20 @@ static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int
cam->longitude_min = bcam->longitude_min;
cam->longitude_max = bcam->longitude_max;
+ /* panorama stereo */
+ cam->interocular_distance = bcam->interocular_distance;
+ cam->convergence_distance = bcam->convergence_distance;
+ cam->use_spherical_stereo = bcam->use_spherical_stereo;
+
+ if(cam->use_spherical_stereo) {
+ if(strcmp(viewname, "left") == 0)
+ cam->stereo_eye = Camera::STEREO_LEFT;
+ else if(strcmp(viewname, "right") == 0)
+ cam->stereo_eye = Camera::STEREO_RIGHT;
+ else
+ cam->stereo_eye = Camera::STEREO_NONE;
+ }
+
/* anamorphic lens bokeh */
cam->aperture_ratio = bcam->aperture_ratio;
@@ -435,7 +457,8 @@ static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int
void BlenderSync::sync_camera(BL::RenderSettings& b_render,
BL::Object& b_override,
- int width, int height)
+ int width, int height,
+ const char *viewname)
{
BlenderCamera bcam;
blender_camera_init(&bcam, b_render);
@@ -478,13 +501,13 @@ void BlenderSync::sync_camera(BL::RenderSettings& b_render,
if(b_ob) {
BL::Array<float, 16> b_ob_matrix;
blender_camera_from_object(&bcam, b_engine, b_ob);
- b_engine.camera_model_matrix(b_ob, b_ob_matrix);
+ b_engine.camera_model_matrix(b_ob, bcam.use_spherical_stereo, b_ob_matrix);
bcam.matrix = get_transform(b_ob_matrix);
}
/* sync */
Camera *cam = scene->camera;
- blender_camera_sync(cam, &bcam, width, height);
+ blender_camera_sync(cam, &bcam, width, height, viewname);
}
void BlenderSync::sync_camera_motion(BL::RenderSettings& b_render,
@@ -497,7 +520,7 @@ void BlenderSync::sync_camera_motion(BL::RenderSettings& b_render,
Camera *cam = scene->camera;
BL::Array<float, 16> b_ob_matrix;
- b_engine.camera_model_matrix(b_ob, b_ob_matrix);
+ b_engine.camera_model_matrix(b_ob, cam->use_spherical_stereo, b_ob_matrix);
Transform tfm = get_transform(b_ob_matrix);
tfm = blender_camera_matrix(tfm, cam->type, cam->panorama_type);
@@ -766,7 +789,7 @@ void BlenderSync::sync_view(BL::SpaceView3D& b_v3d,
b_v3d,
b_rv3d,
width, height);
- blender_camera_sync(scene->camera, &bcam, width, height);
+ blender_camera_sync(scene->camera, &bcam, width, height, "");
}
BufferParams BlenderSync::get_buffer_params(BL::RenderSettings& b_render,
diff --git a/intern/cycles/blender/blender_session.cpp b/intern/cycles/blender/blender_session.cpp
index f1b524f7b44..1f85feb0b7d 100644
--- a/intern/cycles/blender/blender_session.cpp
+++ b/intern/cycles/blender/blender_session.cpp
@@ -153,7 +153,6 @@ void BlenderSession::create_session()
* do some basic syncing here, no objects or materials for speed */
sync->sync_render_layers(b_v3d, NULL);
sync->sync_integrator();
- sync->sync_camera(b_render, b_camera_override, width, height);
}
/* set buffer parameters */
@@ -206,10 +205,8 @@ void BlenderSession::reset_session(BL::BlendData& b_data_, BL::Scene& b_scene_)
/* for final render we will do full data sync per render layer, only
* do some basic syncing here, no objects or materials for speed */
- BL::Object b_camera_override(b_engine.camera_override());
sync->sync_render_layers(b_v3d, NULL);
sync->sync_integrator();
- sync->sync_camera(b_render, b_camera_override, width, height);
BL::SpaceView3D b_null_space_view3d(PointerRNA_NULL);
BL::RegionView3D b_null_region_view3d(PointerRNA_NULL);
@@ -502,7 +499,7 @@ void BlenderSession::render()
/* update scene */
BL::Object b_camera_override(b_engine.camera_override());
- sync->sync_camera(b_render, b_camera_override, width, height);
+ sync->sync_camera(b_render, b_camera_override, width, height, b_rview_name.c_str());
sync->sync_data(b_render,
b_v3d,
b_camera_override,
@@ -642,7 +639,7 @@ void BlenderSession::bake(BL::Object& b_object,
/* update scene */
BL::Object b_camera_override(b_engine.camera_override());
- sync->sync_camera(b_render, b_camera_override, width, height);
+ sync->sync_camera(b_render, b_camera_override, width, height, "");
sync->sync_data(b_render,
b_v3d,
b_camera_override,
@@ -808,7 +805,7 @@ void BlenderSession::synchronize()
if(b_rv3d)
sync->sync_view(b_v3d, b_rv3d, width, height);
else
- sync->sync_camera(b_render, b_camera_override, width, height);
+ sync->sync_camera(b_render, b_camera_override, width, height, "");
/* unlock */
session->scene->mutex.unlock();
diff --git a/intern/cycles/blender/blender_sync.h b/intern/cycles/blender/blender_sync.h
index d3edfcb657f..92250ffeefc 100644
--- a/intern/cycles/blender/blender_sync.h
+++ b/intern/cycles/blender/blender_sync.h
@@ -69,7 +69,8 @@ public:
void sync_integrator();
void sync_camera(BL::RenderSettings& b_render,
BL::Object& b_override,
- int width, int height);
+ int width, int height,
+ const char *viewname);
void sync_view(BL::SpaceView3D& b_v3d,
BL::RegionView3D& b_rv3d,
int width, int height);
diff --git a/intern/cycles/kernel/kernel_camera.h b/intern/cycles/kernel/kernel_camera.h
index fd1d854a0ff..0947946e63d 100644
--- a/intern/cycles/kernel/kernel_camera.h
+++ b/intern/cycles/kernel/kernel_camera.h
@@ -105,18 +105,32 @@ ccl_device void camera_sample_perspective(KernelGlobals *kg, float raster_x, flo
}
#endif
- ray->P = transform_point(&cameratoworld, ray->P);
- ray->D = transform_direction(&cameratoworld, ray->D);
+ float3 tP = transform_point(&cameratoworld, ray->P);
+ float3 tD = transform_direction(&cameratoworld, ray->D);
+ ray->P = spherical_stereo_position(kg, tD, tP);
+ ray->D = spherical_stereo_direction(kg, tD, tP, ray->P);
ray->D = normalize(ray->D);
#ifdef __RAY_DIFFERENTIALS__
/* ray differential */
- float3 Ddiff = transform_direction(&cameratoworld, Pcamera);
-
ray->dP = differential3_zero();
- ray->dD.dx = normalize(Ddiff + float4_to_float3(kernel_data.cam.dx)) - normalize(Ddiff);
- ray->dD.dy = normalize(Ddiff + float4_to_float3(kernel_data.cam.dy)) - normalize(Ddiff);
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
+ tD = transform_direction(&cameratoworld, tP);
+ float3 Pdiff = spherical_stereo_position(kg, tD, tP);
+ float3 Ddiff = normalize(spherical_stereo_direction(kg, tD, tP, Pdiff));
+
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x + 1.0f, raster_y, 0.0f));
+ tD = transform_direction(&cameratoworld, tP);
+ Pcamera = spherical_stereo_position(kg, tD, tP);
+ ray->dD.dx = normalize(spherical_stereo_direction(kg, tD, tP, Pcamera)) - Ddiff;
+ ray->dP.dx = Pcamera - Pdiff;
+
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y + 1.0f, 0.0f));
+ tD = transform_direction(&cameratoworld, tP);
+ Pcamera = spherical_stereo_position(kg, tD, tP);
+ ray->dD.dy = normalize(spherical_stereo_direction(kg, tD, tP, Pcamera)) - Ddiff;
+ /* dP.dy is zero, since the omnidirectional panorama only shift the eyes horizontally */
#endif
#ifdef __CAMERA_CLIPPING__
@@ -259,22 +273,32 @@ ccl_device void camera_sample_panorama(KernelGlobals *kg, float raster_x, float
}
#endif
- ray->P = transform_point(&cameratoworld, ray->P);
- ray->D = transform_direction(&cameratoworld, ray->D);
+ float3 tP = transform_point(&cameratoworld, ray->P);
+ float3 tD = transform_direction(&cameratoworld, ray->D);
+ ray->P = spherical_stereo_position(kg, tD, tP);
+ ray->D = spherical_stereo_direction(kg, tD, tP, ray->P);
ray->D = normalize(ray->D);
#ifdef __RAY_DIFFERENTIALS__
/* ray differential */
ray->dP = differential3_zero();
- Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
- float3 Ddiff = normalize(transform_direction(&cameratoworld, panorama_to_direction(kg, Pcamera.x, Pcamera.y)));
-
- Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x + 1.0f, raster_y, 0.0f));
- ray->dD.dx = normalize(transform_direction(&cameratoworld, panorama_to_direction(kg, Pcamera.x, Pcamera.y))) - Ddiff;
-
- Pcamera = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y + 1.0f, 0.0f));
- ray->dD.dy = normalize(transform_direction(&cameratoworld, panorama_to_direction(kg, Pcamera.x, Pcamera.y))) - Ddiff;
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y, 0.0f));
+ tD = transform_direction(&cameratoworld, panorama_to_direction(kg, tP.x, tP.y));
+ float3 Pdiff = spherical_stereo_position(kg, tD, tP);
+ float3 Ddiff = normalize(spherical_stereo_direction(kg, tD, tP, Pdiff));
+
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x + 1.0f, raster_y, 0.0f));
+ tD = transform_direction(&cameratoworld, panorama_to_direction(kg, tP.x, tP.y));
+ Pcamera = spherical_stereo_position(kg, tD, tP);
+ ray->dD.dx = normalize(spherical_stereo_direction(kg, tD, tP, Pcamera)) - Ddiff;
+ ray->dP.dx = Pcamera - Pdiff;
+
+ tP = transform_perspective(&rastertocamera, make_float3(raster_x, raster_y + 1.0f, 0.0f));
+ tD = transform_direction(&cameratoworld, panorama_to_direction(kg, tP.x, tP.y));
+ Pcamera = spherical_stereo_position(kg, tD, tP);
+ ray->dD.dy = normalize(spherical_stereo_direction(kg, tD, tP, Pcamera)) - Ddiff;
+ /* dP.dy is zero, since the omnidirectional panorama only shift the eyes horizontally */
#endif
}
diff --git a/intern/cycles/kernel/kernel_projection.h b/intern/cycles/kernel/kernel_projection.h
index 62922df3286..fa06b937e61 100644
--- a/intern/cycles/kernel/kernel_projection.h
+++ b/intern/cycles/kernel/kernel_projection.h
@@ -222,6 +222,32 @@ ccl_device float2 direction_to_panorama(KernelGlobals *kg, float3 dir)
}
}
+ccl_device float3 spherical_stereo_position(KernelGlobals *kg, float3 dir, float3 pos)
+{
+ float3 up, side;
+
+ /* Interocular_offset of zero means either non stereo, or stereo without spherical stereo. */
+
+ up = make_float3(0.0f, 0.0f, 1.0f);
+ side = normalize(cross(dir, up));
+
+ return pos + (side * kernel_data.cam.interocular_offset);
+}
+
+ccl_device float3 spherical_stereo_direction(KernelGlobals *kg, float3 dir, float3 pos, float3 newpos)
+{
+ float3 screenpos, dirnew;
+
+ /* Interocular_distance of zero means either no stereo, or stereo without spherical stereo. */
+ if(kernel_data.cam.interocular_offset == 0.0f)
+ return dir;
+
+ screenpos = pos + (normalize(dir) * kernel_data.cam.convergence_distance);
+ dirnew = screenpos - newpos;
+
+ return dirnew;
+}
+
CCL_NAMESPACE_END
#endif /* __KERNEL_PROJECTION_CL__ */
diff --git a/intern/cycles/kernel/kernel_types.h b/intern/cycles/kernel/kernel_types.h
index bdd17c66c0f..c5edc16f196 100644
--- a/intern/cycles/kernel/kernel_types.h
+++ b/intern/cycles/kernel/kernel_types.h
@@ -901,6 +901,11 @@ typedef struct KernelCamera {
float fisheye_lens;
float4 equirectangular_range;
+ /* stereo */
+ int pad1, pad2;
+ float interocular_offset;
+ float convergence_distance;
+
/* matrices */
Transform cameratoworld;
Transform rastertocamera;
diff --git a/intern/cycles/render/camera.cpp b/intern/cycles/render/camera.cpp
index 9c17a11e0f1..0e343822223 100644
--- a/intern/cycles/render/camera.cpp
+++ b/intern/cycles/render/camera.cpp
@@ -73,6 +73,9 @@ Camera::Camera()
longitude_max = M_PI_F;
fov = M_PI_4_F;
fov_pre = fov_post = fov;
+ stereo_eye = STEREO_NONE;
+ interocular_distance = 0.065f;
+ convergence_distance = 30.0f * 0.065f;
sensorwidth = 0.036f;
sensorheight = 0.024f;
@@ -341,6 +344,21 @@ void Camera::device_update(Device *device, DeviceScene *dscene, Scene *scene)
kcam->equirectangular_range = make_float4(longitude_min - longitude_max, -longitude_min,
latitude_min - latitude_max, -latitude_min + M_PI_2_F);
+ switch(stereo_eye) {
+ case STEREO_LEFT:
+ kcam->interocular_offset = -interocular_distance * 0.5f;
+ break;
+ case STEREO_RIGHT:
+ kcam->interocular_offset = interocular_distance * 0.5f;
+ break;
+ case STEREO_NONE:
+ default:
+ kcam->interocular_offset = 0.0f;
+ break;
+ }
+
+ kcam->convergence_distance = convergence_distance;
+
/* sensor size */
kcam->sensorwidth = sensorwidth;
kcam->sensorheight = sensorheight;
@@ -427,7 +445,8 @@ bool Camera::modified(const Camera& cam)
(latitude_min == cam.latitude_min) &&
(latitude_max == cam.latitude_max) &&
(longitude_min == cam.longitude_min) &&
- (longitude_max == cam.longitude_max));
+ (longitude_max == cam.longitude_max) &&
+ (stereo_eye == cam.stereo_eye));
}
bool Camera::motion_modified(const Camera& cam)
@@ -480,9 +499,30 @@ BoundBox Camera::viewplane_bounds_get()
BoundBox bounds = BoundBox::empty;
if(type == CAMERA_PANORAMA) {
- bounds.grow(make_float3(cameratoworld.x.w,
- cameratoworld.y.w,
- cameratoworld.z.w));
+ if(use_spherical_stereo == false) {
+ bounds.grow(make_float3(cameratoworld.x.w,
+ cameratoworld.y.w,
+ cameratoworld.z.w));
+ }
+ else {
+ float half_eye_distance = interocular_distance * 0.5f;
+
+ bounds.grow(make_float3(cameratoworld.x.w + half_eye_distance,
+ cameratoworld.y.w,
+ cameratoworld.z.w));
+
+ bounds.grow(make_float3(cameratoworld.z.w,
+ cameratoworld.y.w + half_eye_distance,
+ cameratoworld.z.w));
+
+ bounds.grow(make_float3(cameratoworld.x.w - half_eye_distance,
+ cameratoworld.y.w,
+ cameratoworld.z.w));
+
+ bounds.grow(make_float3(cameratoworld.x.w,
+ cameratoworld.y.w - half_eye_distance,
+ cameratoworld.z.w));
+ }
}
else {
bounds.grow(transform_raster_to_world(0.0f, 0.0f));
diff --git a/intern/cycles/render/camera.h b/intern/cycles/render/camera.h
index 6771ecc8f23..6fbb1dc3bc8 100644
--- a/intern/cycles/render/camera.h
+++ b/intern/cycles/render/camera.h
@@ -59,6 +59,13 @@ public:
ROLLING_SHUTTER_NUM_TYPES,
};
+ /* Stereo Type */
+ enum StereoEye {
+ STEREO_NONE,
+ STEREO_LEFT,
+ STEREO_RIGHT,
+ };
+
/* motion blur */
float shuttertime;
MotionPosition motion_position;
@@ -92,6 +99,12 @@ public:
float longitude_min;
float longitude_max;
+ /* panorama stereo */
+ StereoEye stereo_eye;
+ bool use_spherical_stereo;
+ float interocular_distance;
+ float convergence_distance;
+
/* anamorphic lens bokeh */
float aperture_ratio;