Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/extern
diff options
context:
space:
mode:
Diffstat (limited to 'extern')
-rw-r--r--extern/libmv/libmv/simple_pipeline/bundle.cc21
-rw-r--r--extern/libmv/libmv/simple_pipeline/camera_intrinsics.cc24
-rw-r--r--extern/libmv/libmv/simple_pipeline/camera_intrinsics.h31
3 files changed, 51 insertions, 25 deletions
diff --git a/extern/libmv/libmv/simple_pipeline/bundle.cc b/extern/libmv/libmv/simple_pipeline/bundle.cc
index c29eb3d5df6..34117cbbbd7 100644
--- a/extern/libmv/libmv/simple_pipeline/bundle.cc
+++ b/extern/libmv/libmv/simple_pipeline/bundle.cc
@@ -94,17 +94,16 @@ struct OpenCVReprojectionError {
// Apply distortion to the normalized points to get (xd, yd).
// TODO(keir): Do early bailouts for zero distortion; these are expensive
// jet operations.
- T r2 = xn*xn + yn*yn;
- T r4 = r2 * r2;
- T r6 = r4 * r2;
- T r_coeff = T(1) + k1*r2 + k2*r4 + k3*r6;
- T xd = xn * r_coeff + T(2)*p1*xn*yn + p2*(r2 + T(2)*xn*xn);
- T yd = yn * r_coeff + T(2)*p2*xn*yn + p1*(r2 + T(2)*yn*yn);
-
- // Apply focal length and principal point to get the final
- // image coordinates.
- predicted_x = focal_length * xd + principal_point_x;
- predicted_y = focal_length * yd + principal_point_y;
+
+ ApplyRadialDistortionCameraIntrinsics(focal_length,
+ focal_length,
+ principal_point_x,
+ principal_point_y,
+ k1, k2, k3,
+ p1, p2,
+ xn, yn,
+ &predicted_x,
+ &predicted_y);
} else {
predicted_x = xn;
predicted_y = yn;
diff --git a/extern/libmv/libmv/simple_pipeline/camera_intrinsics.cc b/extern/libmv/libmv/simple_pipeline/camera_intrinsics.cc
index 6319846a079..658f65c1367 100644
--- a/extern/libmv/libmv/simple_pipeline/camera_intrinsics.cc
+++ b/extern/libmv/libmv/simple_pipeline/camera_intrinsics.cc
@@ -124,20 +124,16 @@ void CameraIntrinsics::ApplyIntrinsics(double normalized_x,
double normalized_y,
double *image_x,
double *image_y) const {
- double x = normalized_x;
- double y = normalized_y;
-
- // Apply distortion to the normalized points to get (xd, yd).
- double r2 = x*x + y*y;
- double r4 = r2 * r2;
- double r6 = r4 * r2;
- double r_coeff = (1 + k1_*r2 + k2_*r4 + k3_*r6);
- double xd = x * r_coeff + 2*p1_*x*y + p2_*(r2 + 2*x*x);
- double yd = y * r_coeff + 2*p2_*x*y + p1_*(r2 + 2*y*y);
-
- // Apply focal length and principal point to get the final image coordinates.
- *image_x = focal_length_x() * xd + principal_point_x();
- *image_y = focal_length_y() * yd + principal_point_y();
+ ApplyRadialDistortionCameraIntrinsics(focal_length_x(),
+ focal_length_y(),
+ principal_point_x(),
+ principal_point_y(),
+ k1(), k2(), k3(),
+ p1(), p2(),
+ normalized_x,
+ normalized_y,
+ image_x,
+ image_y);
}
struct InvertIntrinsicsCostFunction {
diff --git a/extern/libmv/libmv/simple_pipeline/camera_intrinsics.h b/extern/libmv/libmv/simple_pipeline/camera_intrinsics.h
index e0556674ad5..b51b28a4bfb 100644
--- a/extern/libmv/libmv/simple_pipeline/camera_intrinsics.h
+++ b/extern/libmv/libmv/simple_pipeline/camera_intrinsics.h
@@ -159,6 +159,37 @@ class CameraIntrinsics {
std::ostream& operator <<(std::ostream &os,
const CameraIntrinsics &intrinsics);
+// Apply camera intrinsics to the normalized point to get image coordinates.
+// This applies the radial lens distortion to a point which is in normalized
+// camera coordinates (i.e. the principal point is at (0, 0)) to get image
+// coordinates in pixels. Templated for use with autodifferentiation.
+template <typename T>
+inline void ApplyRadialDistortionCameraIntrinsics(T focal_length_x,
+ T focal_length_y,
+ T principal_point_x,
+ T principal_point_y,
+ T k1, T k2, T k3,
+ T p1, T p2,
+ T normalized_x,
+ T normalized_y,
+ T *image_x,
+ T *image_y) {
+ T x = normalized_x;
+ T y = normalized_y;
+
+ // Apply distortion to the normalized points to get (xd, yd).
+ T r2 = x*x + y*y;
+ T r4 = r2 * r2;
+ T r6 = r4 * r2;
+ T r_coeff = (T(1) + k1*r2 + k2*r4 + k3*r6);
+ T xd = x * r_coeff + T(2)*p1*x*y + p2*(r2 + T(2)*x*x);
+ T yd = y * r_coeff + T(2)*p2*x*y + p1*(r2 + T(2)*y*y);
+
+ // Apply focal length and principal point to get the final image coordinates.
+ *image_x = focal_length_x * xd + principal_point_x;
+ *image_y = focal_length_y * yd + principal_point_y;
+}
+
} // namespace libmv
#endif // LIBMV_SIMPLE_PIPELINE_CAMERA_INTRINSICS_H_