Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/mocap
diff options
context:
space:
mode:
authorBrecht Van Lommel <brechtvanlommel@gmail.com>2018-10-19 18:59:58 +0300
committerBrecht Van Lommel <brechtvanlommel@gmail.com>2018-10-19 19:12:29 +0300
commitd7d3233715503ecc15b8dd1973f7e73257e2cbda (patch)
treef9f9ab24b89fe574b3f972e45c8337bcd791a9b1 /mocap
parent84b817117328b3193533324846ec389b1f5fe5c4 (diff)
Spelling fixes in comments and descriptions, patch by luzpaz.
Differential Revision: https://developer.blender.org/D3746
Diffstat (limited to 'mocap')
-rw-r--r--mocap/__init__.py2
-rw-r--r--mocap/mocap_constraints.py4
-rw-r--r--mocap/mocap_tools.py16
-rw-r--r--mocap/retarget.py10
4 files changed, 16 insertions, 16 deletions
diff --git a/mocap/__init__.py b/mocap/__init__.py
index 27b166ab..d2a1c57b 100644
--- a/mocap/__init__.py
+++ b/mocap/__init__.py
@@ -787,7 +787,7 @@ class OBJECT_OT_UnbakeMocapConstraints(bpy.types.Operator):
class OBJECT_OT_UpdateMocapConstraints(bpy.types.Operator):
#Operator to update all post-retarget fixes, similar to update dependencies on drivers
#Needed because python properties lack certain callbacks and some fixes take a while to recalculate.
- """Update all post-retarget fixes (neccesary to take under """ \
+ """Update all post-retarget fixes (necessary to take under """ \
"""consideration changes to armature object or pose)"""
bl_idname = "mocap.updateconstraints"
bl_label = "Update Mocap Fixes"
diff --git a/mocap/mocap_constraints.py b/mocap/mocap_constraints.py
index d263dfad..3d3e4a60 100644
--- a/mocap/mocap_constraints.py
+++ b/mocap/mocap_constraints.py
@@ -289,7 +289,7 @@ def setConstraint(m_constraint, context):
if m_constraint.type == "floor" and m_constraint.targetMesh:
real_constraint.mute = True
real_constraint.owner_space = "WORLD"
- #calculate the positions thoughout the range
+ #calculate the positions throughout the range
s, e = m_constraint.s_frame, m_constraint.e_frame
s_in, s_out = m_constraint.smooth_in, m_constraint.smooth_out
s -= s_in
@@ -399,7 +399,7 @@ def bakeAllConstraints(obj, s_frame, e_frame, bones):
locBake(s_frame, e_frame, simpleBake)
-#Calls the baking function and decativates releveant constraints
+#Calls the baking function and decativates relevant constraints
def bakeConstraints(context):
obj = context.active_object
bones = obj.pose.bones
diff --git a/mocap/mocap_tools.py b/mocap/mocap_tools.py
index 3f910411..5f9c2a2d 100644
--- a/mocap/mocap_tools.py
+++ b/mocap/mocap_tools.py
@@ -232,12 +232,12 @@ def autoloop_anim():
context.scene.frame_end = flm
-#simplifyCurves: performes the bulk of the samples to bezier conversion.
+#simplifyCurves: performs the bulk of the samples to bezier conversion.
#IN: curveGroup - which can be a collection of singleFcurves, or grouped (via nested lists) .
# error - threshold of permittable error (max distance) of the new beziers to the original data
# reparaError - threshold of error where we should try to fix the parameterization rather than split the existing curve. > error, usually by a small constant factor for best performance.
-# maxIterations - maximum number of iterations of reparameterizations we should attempt. (Newton-Rahpson is not guarenteed to converge, so this is needed).
-# group_mode - boolean, indicating wether we should place bezier keyframes on the same x (frame), or optimize each individual curve.
+# maxIterations - maximum number of iterations of reparameterizations we should attempt. (Newton-Rahpson is not guaranteed to converge, so this is needed).
+# group_mode - boolean, indicating whether we should place bezier keyframes on the same x (frame), or optimize each individual curve.
#OUT: None. Deletes the existing curves and creates the new beziers.
def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
#Calculates the unit tangent of point v
@@ -398,7 +398,7 @@ def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
sumVec += (bez[i + 1] - bez[i]) * bernsteinPoly(n - 1, i, t)
return sumVec
- #use Newton-Raphson to find a better paramterization of datapoints,
+ #use Newton-Raphson to find a better parameterization of datapoints,
#one that minimizes the distance (or error)
# between bezier and original data.
def newtonRaphson(data_pts, s, e, bez):
@@ -531,7 +531,7 @@ def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
# sel_opt- either "sel" (selected) or "all" for which curves to effect
# error- maximum error allowed, in fraction (20% = 0.0020, which is the default),
# i.e. divide by 10000 from percentage wanted.
-# group_mode- boolean, to analyze each curve seperately or in groups,
+# group_mode- boolean, to analyze each curve separately or in groups,
# where a group is all curves that effect the same property/RNA path
def fcurves_simplify(context, obj, sel_opt="all", error=0.002, group_mode=True):
# main vars
@@ -637,7 +637,7 @@ def denoise(obj, fcurves):
fcurve.update()
-# Recieves armature, and rotations all bones by 90 degrees along the X axis
+# Receives armature, and rotations all bones by 90 degrees along the X axis
# This fixes the common axis issue BVH files have when importing.
# IN: Armature (bpy.types.Armature)
def rotate_fix_armature(arm_data):
@@ -683,7 +683,7 @@ def scale_fix_armature(performer_obj, enduser_obj):
#Guess Mapping
-#Given a performer and enduser armature, attempts to guess the hiearchy mapping
+#Given a performer and enduser armature, attempts to guess the hierarchy mapping
def guessMapping(performer_obj, enduser_obj):
perf_bones = performer_obj.data.bones
end_bones = enduser_obj.data.bones
@@ -702,7 +702,7 @@ def guessMapping(performer_obj, enduser_obj):
return "", bone
def nameMatch(bone_a, bone_b):
- # nameMatch - recieves two strings, returns 2 if they are relatively the same, 1 if they are the same but R and L and 0 if no match at all
+ # nameMatch - receives two strings, returns 2 if they are relatively the same, 1 if they are the same but R and L and 0 if no match at all
side_a, noside_a = findBoneSide(bone_a)
side_b, noside_b = findBoneSide(bone_b)
if side_a == side_b:
diff --git a/mocap/retarget.py b/mocap/retarget.py
index d0b203cd..c979c00b 100644
--- a/mocap/retarget.py
+++ b/mocap/retarget.py
@@ -61,8 +61,8 @@ def loadMapping(perf_arm, end_arm):
perf_arm.bones[perf_bone.name].map = end_bone.name
#creation of intermediate armature
-# the intermediate armature has the hiearchy of the end user,
-# does not have rotation inheritence
+# the intermediate armature has the hierarchy of the end user,
+# does not have rotation inheritance
# and bone roll is identical to the performer
# its purpose is to copy over the rotations
# easily while concentrating on the hierarchy changes
@@ -92,7 +92,7 @@ def createIntermediate(performer_obj, enduser_obj, root, s_frame, e_frame, scene
lerp_matrix = first_mat.lerp(next_mat, 0.5)
return lerp_matrix
- #determines the type of hierachy change needed and calls the
+ #determines the type of hierarchy change needed and calls the
#right function
def retargetPerfToInter(inter_bone):
if inter_bone.bone.reverseMap:
@@ -176,7 +176,7 @@ def retargetEnduser(inter_obj, enduser_obj, root, s_frame, e_frame, scene, step)
inter_bones = inter_obj.pose.bones
end_bones = enduser_obj.pose.bones
- #Basic "visual baking" function, for transfering rotations from intermediate to end user
+ #Basic "visual baking" function, for transferring rotations from intermediate to end user
def bakeTransform(end_bone):
src_bone = inter_bones[end_bone.name]
trg_bone = end_bone
@@ -219,7 +219,7 @@ def retargetEnduser(inter_obj, enduser_obj, root, s_frame, e_frame, scene, step)
end_bone.keyframe_insert("location")
bakeTransform(end_bone)
-#recieves the performer feet bones as a variable
+#receives the performer feet bones as a variable
# by "feet" I mean those bones that have plants
# (they don't move, despite root moving) somewhere in the animation.