Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSergej Reich <sergej.reich@googlemail.com>2013-03-07 21:53:16 +0400
committerSergej Reich <sergej.reich@googlemail.com>2013-03-07 21:53:16 +0400
commit643b0be4cb3f73bd876493d2a7bd6f76ef27cf06 (patch)
tree33fa8c08a902176f4204b6cc6a18702997bd90ba /extern/bullet2/src/LinearMath
parent46d32c89f6df911120579d00dd6e1246536cb6d8 (diff)
bullet: Update to current svn, r2636
Apply patches in patches directory, remove patches that were applied upstream. If you made changes without adding a patch, please check. Fixes [#32233] exporting bullet format results in corrupt files.
Diffstat (limited to 'extern/bullet2/src/LinearMath')
-rw-r--r--extern/bullet2/src/LinearMath/btAabbUtil2.h8
-rw-r--r--extern/bullet2/src/LinearMath/btAlignedAllocator.cpp2
-rw-r--r--extern/bullet2/src/LinearMath/btAlignedObjectArray.h19
-rw-r--r--extern/bullet2/src/LinearMath/btConvexHull.cpp11
-rw-r--r--extern/bullet2/src/LinearMath/btConvexHullComputer.cpp14
-rw-r--r--extern/bullet2/src/LinearMath/btDefaultMotionState.h4
-rw-r--r--extern/bullet2/src/LinearMath/btGrahamScan2dConvexHull.h33
-rw-r--r--extern/bullet2/src/LinearMath/btIDebugDraw.h1
-rw-r--r--extern/bullet2/src/LinearMath/btMatrix3x3.h659
-rw-r--r--extern/bullet2/src/LinearMath/btPolarDecomposition.cpp99
-rw-r--r--extern/bullet2/src/LinearMath/btPolarDecomposition.h73
-rw-r--r--extern/bullet2/src/LinearMath/btQuadWord.h108
-rw-r--r--extern/bullet2/src/LinearMath/btQuaternion.h497
-rw-r--r--extern/bullet2/src/LinearMath/btScalar.h136
-rw-r--r--extern/bullet2/src/LinearMath/btSerializer.cpp1743
-rw-r--r--extern/bullet2/src/LinearMath/btSerializer.h9
-rw-r--r--extern/bullet2/src/LinearMath/btTransform.h6
-rw-r--r--extern/bullet2/src/LinearMath/btVector3.cpp1639
-rw-r--r--extern/bullet2/src/LinearMath/btVector3.h808
19 files changed, 4782 insertions, 1087 deletions
diff --git a/extern/bullet2/src/LinearMath/btAabbUtil2.h b/extern/bullet2/src/LinearMath/btAabbUtil2.h
index 42b721dea22..d2997b4e65c 100644
--- a/extern/bullet2/src/LinearMath/btAabbUtil2.h
+++ b/extern/bullet2/src/LinearMath/btAabbUtil2.h
@@ -184,9 +184,7 @@ SIMD_FORCE_INLINE void btTransformAabb(const btVector3& halfExtents, btScalar ma
btVector3 halfExtentsWithMargin = halfExtents+btVector3(margin,margin,margin);
btMatrix3x3 abs_b = t.getBasis().absolute();
btVector3 center = t.getOrigin();
- btVector3 extent = btVector3(abs_b[0].dot(halfExtentsWithMargin),
- abs_b[1].dot(halfExtentsWithMargin),
- abs_b[2].dot(halfExtentsWithMargin));
+ btVector3 extent = halfExtentsWithMargin.dot3( abs_b[0], abs_b[1], abs_b[2] );
aabbMinOut = center - extent;
aabbMaxOut = center + extent;
}
@@ -203,9 +201,7 @@ SIMD_FORCE_INLINE void btTransformAabb(const btVector3& localAabbMin,const btVec
btVector3 localCenter = btScalar(0.5)*(localAabbMax+localAabbMin);
btMatrix3x3 abs_b = trans.getBasis().absolute();
btVector3 center = trans(localCenter);
- btVector3 extent = btVector3(abs_b[0].dot(localHalfExtents),
- abs_b[1].dot(localHalfExtents),
- abs_b[2].dot(localHalfExtents));
+ btVector3 extent = localHalfExtents.dot3( abs_b[0], abs_b[1], abs_b[2] );
aabbMinOut = center-extent;
aabbMaxOut = center+extent;
}
diff --git a/extern/bullet2/src/LinearMath/btAlignedAllocator.cpp b/extern/bullet2/src/LinearMath/btAlignedAllocator.cpp
index c4c0ceb2ed2..a65296c6abe 100644
--- a/extern/bullet2/src/LinearMath/btAlignedAllocator.cpp
+++ b/extern/bullet2/src/LinearMath/btAlignedAllocator.cpp
@@ -119,7 +119,7 @@ void* btAlignedAllocInternal (size_t size, int alignment,int line,char* filen
real = (char *)sAllocFunc(size + 2*sizeof(void *) + (alignment-1));
if (real) {
- ret = (void*) btAlignPointer((real + 2*sizeof(void *), alignment);
+ ret = (void*) btAlignPointer(real + 2*sizeof(void *), alignment);
*((void **)(ret)-1) = (void *)(real);
*((int*)(ret)-2) = size;
diff --git a/extern/bullet2/src/LinearMath/btAlignedObjectArray.h b/extern/bullet2/src/LinearMath/btAlignedObjectArray.h
index 36090e13c89..24e59ab65d7 100644
--- a/extern/bullet2/src/LinearMath/btAlignedObjectArray.h
+++ b/extern/bullet2/src/LinearMath/btAlignedObjectArray.h
@@ -197,8 +197,26 @@ protected:
m_data[m_size].~T();
}
+
///resize changes the number of elements in the array. If the new size is larger, the new elements will be constructed using the optional second argument.
///when the new number of elements is smaller, the destructor will be called, but memory will not be freed, to reduce performance overhead of run-time memory (de)allocations.
+ SIMD_FORCE_INLINE void resizeNoInitialize(int newsize)
+ {
+ int curSize = size();
+
+ if (newsize < curSize)
+ {
+ } else
+ {
+ if (newsize > size())
+ {
+ reserve(newsize);
+ }
+ //leave this uninitialized
+ }
+ m_size = newsize;
+ }
+
SIMD_FORCE_INLINE void resize(int newsize, const T& fillData=T())
{
int curSize = size();
@@ -226,7 +244,6 @@ protected:
m_size = newsize;
}
-
SIMD_FORCE_INLINE T& expandNonInitializing( )
{
int sz = size();
diff --git a/extern/bullet2/src/LinearMath/btConvexHull.cpp b/extern/bullet2/src/LinearMath/btConvexHull.cpp
index 532d76d881f..2ae855dbc1f 100644
--- a/extern/bullet2/src/LinearMath/btConvexHull.cpp
+++ b/extern/bullet2/src/LinearMath/btConvexHull.cpp
@@ -22,13 +22,6 @@ subject to the following restrictions:
-template <class T>
-void Swap(T &a,T &b)
-{
- T tmp = a;
- a=b;
- b=tmp;
-}
//----------------------------------
@@ -518,7 +511,7 @@ int4 HullLibrary::FindSimplex(btVector3 *verts,int verts_count,btAlignedObjectAr
if(p3==p0||p3==p1||p3==p2)
return int4(-1,-1,-1,-1);
btAssert(!(p0==p1||p0==p2||p0==p3||p1==p2||p1==p3||p2==p3));
- if(btDot(verts[p3]-verts[p0],btCross(verts[p1]-verts[p0],verts[p2]-verts[p0])) <0) {Swap(p2,p3);}
+ if(btDot(verts[p3]-verts[p0],btCross(verts[p1]-verts[p0],verts[p2]-verts[p0])) <0) {btSwap(p2,p3);}
return int4(p0,p1,p2,p3);
}
@@ -570,7 +563,7 @@ int HullLibrary::calchullgen(btVector3 *verts,int verts_count, int vlimit)
vlimit-=4;
while(vlimit >0 && ((te=extrudable(epsilon)) != 0))
{
- int3 ti=*te;
+ //int3 ti=*te;
int v=te->vmax;
btAssert(v != -1);
btAssert(!isextreme[v]); // wtf we've already done this vertex
diff --git a/extern/bullet2/src/LinearMath/btConvexHullComputer.cpp b/extern/bullet2/src/LinearMath/btConvexHullComputer.cpp
index 4fd81dac107..3fd77df8da5 100644
--- a/extern/bullet2/src/LinearMath/btConvexHullComputer.cpp
+++ b/extern/bullet2/src/LinearMath/btConvexHullComputer.cpp
@@ -1931,11 +1931,15 @@ void btConvexHullInternal::merge(IntermediateHull& h0, IntermediateHull& h1)
}
}
-
-static bool pointCmp(const btConvexHullInternal::Point32& p, const btConvexHullInternal::Point32& q)
+class pointCmp
{
- return (p.y < q.y) || ((p.y == q.y) && ((p.x < q.x) || ((p.x == q.x) && (p.z < q.z))));
-}
+ public:
+
+ bool operator() ( const btConvexHullInternal::Point32& p, const btConvexHullInternal::Point32& q ) const
+ {
+ return (p.y < q.y) || ((p.y == q.y) && ((p.x < q.x) || ((p.x == q.x) && (p.z < q.z))));
+ }
+};
void btConvexHullInternal::compute(const void* coords, bool doubleCoords, int stride, int count)
{
@@ -2026,7 +2030,7 @@ void btConvexHullInternal::compute(const void* coords, bool doubleCoords, int st
points[i].index = i;
}
}
- points.quickSort(pointCmp);
+ points.quickSort(pointCmp());
vertexPool.reset();
vertexPool.setArraySize(count);
diff --git a/extern/bullet2/src/LinearMath/btDefaultMotionState.h b/extern/bullet2/src/LinearMath/btDefaultMotionState.h
index a6b7ef15ac8..c90b749230c 100644
--- a/extern/bullet2/src/LinearMath/btDefaultMotionState.h
+++ b/extern/bullet2/src/LinearMath/btDefaultMotionState.h
@@ -4,13 +4,15 @@
#include "btMotionState.h"
///The btDefaultMotionState provides a common implementation to synchronize world transforms with offsets.
-struct btDefaultMotionState : public btMotionState
+ATTRIBUTE_ALIGNED16(struct) btDefaultMotionState : public btMotionState
{
btTransform m_graphicsWorldTrans;
btTransform m_centerOfMassOffset;
btTransform m_startWorldTrans;
void* m_userPointer;
+ BT_DECLARE_ALIGNED_ALLOCATOR();
+
btDefaultMotionState(const btTransform& startTrans = btTransform::getIdentity(),const btTransform& centerOfMassOffset = btTransform::getIdentity())
: m_graphicsWorldTrans(startTrans),
m_centerOfMassOffset(centerOfMassOffset),
diff --git a/extern/bullet2/src/LinearMath/btGrahamScan2dConvexHull.h b/extern/bullet2/src/LinearMath/btGrahamScan2dConvexHull.h
index d7bd3eb8911..e658c5cf062 100644
--- a/extern/bullet2/src/LinearMath/btGrahamScan2dConvexHull.h
+++ b/extern/bullet2/src/LinearMath/btGrahamScan2dConvexHull.h
@@ -21,9 +21,9 @@ subject to the following restrictions:
#include "btVector3.h"
#include "btAlignedObjectArray.h"
-struct GrahamVector2 : public btVector3
+struct GrahamVector3 : public btVector3
{
- GrahamVector2(const btVector3& org, int orgIndex)
+ GrahamVector3(const btVector3& org, int orgIndex)
:btVector3(org),
m_orgIndex(orgIndex)
{
@@ -39,7 +39,7 @@ struct btAngleCompareFunc {
: m_anchor(anchor)
{
}
- bool operator()(const GrahamVector2& a, const GrahamVector2& b) const {
+ bool operator()(const GrahamVector3& a, const GrahamVector3& b) const {
if (a.m_angle != b.m_angle)
return a.m_angle < b.m_angle;
else
@@ -56,31 +56,38 @@ struct btAngleCompareFunc {
}
};
-inline void GrahamScanConvexHull2D(btAlignedObjectArray<GrahamVector2>& originalPoints, btAlignedObjectArray<GrahamVector2>& hull)
+inline void GrahamScanConvexHull2D(btAlignedObjectArray<GrahamVector3>& originalPoints, btAlignedObjectArray<GrahamVector3>& hull, const btVector3& normalAxis)
{
+ btVector3 axis0,axis1;
+ btPlaneSpace1(normalAxis,axis0,axis1);
+
+
if (originalPoints.size()<=1)
{
for (int i=0;i<originalPoints.size();i++)
hull.push_back(originalPoints[0]);
return;
}
- //step1 : find anchor point with smallest x/y and move it to first location
- //also precompute angles
+ //step1 : find anchor point with smallest projection on axis0 and move it to first location
for (int i=0;i<originalPoints.size();i++)
{
- const btVector3& left = originalPoints[i];
- const btVector3& right = originalPoints[0];
- if (left.x() < right.x() || !(right.x() < left.x()) && left.y() < right.y())
+// const btVector3& left = originalPoints[i];
+// const btVector3& right = originalPoints[0];
+ btScalar projL = originalPoints[i].dot(axis0);
+ btScalar projR = originalPoints[0].dot(axis0);
+ if (projL < projR)
{
originalPoints.swap(0,i);
}
}
- for (int i=0;i<originalPoints.size();i++)
+ //also precompute angles
+ originalPoints[0].m_angle = -1e30f;
+ for (int i=1;i<originalPoints.size();i++)
{
- btVector3 xvec(1,0,0);
+ btVector3 xvec = axis0;
btVector3 ar = originalPoints[i]-originalPoints[0];
- originalPoints[i].m_angle = btCross(xvec, ar).dot(btVector3(0,0,1)) / ar.length();
+ originalPoints[i].m_angle = btCross(xvec, ar).dot(normalAxis) / ar.length();
}
//step 2: sort all points, based on 'angle' with this anchor
@@ -98,7 +105,7 @@ inline void GrahamScanConvexHull2D(btAlignedObjectArray<GrahamVector2>& original
while (!isConvex&& hull.size()>1) {
btVector3& a = hull[hull.size()-2];
btVector3& b = hull[hull.size()-1];
- isConvex = btCross(a-b,a-originalPoints[i]).dot(btVector3(0,0,1))> 0;
+ isConvex = btCross(a-b,a-originalPoints[i]).dot(normalAxis)> 0;
if (!isConvex)
hull.pop_back();
else
diff --git a/extern/bullet2/src/LinearMath/btIDebugDraw.h b/extern/bullet2/src/LinearMath/btIDebugDraw.h
index 935502f844f..a00d7763a75 100644
--- a/extern/bullet2/src/LinearMath/btIDebugDraw.h
+++ b/extern/bullet2/src/LinearMath/btIDebugDraw.h
@@ -280,6 +280,7 @@ class btIDebugDraw
}
}
+
virtual void drawBox(const btVector3& bbMin, const btVector3& bbMax, const btVector3& color)
{
drawLine(btVector3(bbMin[0], bbMin[1], bbMin[2]), btVector3(bbMax[0], bbMin[1], bbMin[2]), color);
diff --git a/extern/bullet2/src/LinearMath/btMatrix3x3.h b/extern/bullet2/src/LinearMath/btMatrix3x3.h
index d0234a04369..d4f5c95aa64 100644
--- a/extern/bullet2/src/LinearMath/btMatrix3x3.h
+++ b/extern/bullet2/src/LinearMath/btMatrix3x3.h
@@ -18,6 +18,18 @@ subject to the following restrictions:
#include "btVector3.h"
#include "btQuaternion.h"
+#include <stdio.h>
+
+#ifdef BT_USE_SSE
+//const __m128 ATTRIBUTE_ALIGNED16(v2220) = {2.0f, 2.0f, 2.0f, 0.0f};
+const __m128 ATTRIBUTE_ALIGNED16(vMPPP) = {-0.0f, +0.0f, +0.0f, +0.0f};
+#endif
+
+#if defined(BT_USE_SSE) || defined(BT_USE_NEON)
+const btSimdFloat4 ATTRIBUTE_ALIGNED16(v1000) = {1.0f, 0.0f, 0.0f, 0.0f};
+const btSimdFloat4 ATTRIBUTE_ALIGNED16(v0100) = {0.0f, 1.0f, 0.0f, 0.0f};
+const btSimdFloat4 ATTRIBUTE_ALIGNED16(v0010) = {0.0f, 0.0f, 1.0f, 0.0f};
+#endif
#ifdef BT_USE_DOUBLE_PRECISION
#define btMatrix3x3Data btMatrix3x3DoubleData
@@ -28,7 +40,7 @@ subject to the following restrictions:
/**@brief The btMatrix3x3 class implements a 3x3 rotation matrix, to perform linear algebra in combination with btQuaternion, btTransform and btVector3.
* Make sure to only include a pure orthogonal matrix without scaling. */
-class btMatrix3x3 {
+ATTRIBUTE_ALIGNED16(class) btMatrix3x3 {
///Data storage for the matrix, each vector is a row of the matrix
btVector3 m_el[3];
@@ -57,6 +69,42 @@ public:
yx, yy, yz,
zx, zy, zz);
}
+
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ SIMD_FORCE_INLINE btMatrix3x3 (const btSimdFloat4 v0, const btSimdFloat4 v1, const btSimdFloat4 v2 )
+ {
+ m_el[0].mVec128 = v0;
+ m_el[1].mVec128 = v1;
+ m_el[2].mVec128 = v2;
+ }
+
+ SIMD_FORCE_INLINE btMatrix3x3 (const btVector3& v0, const btVector3& v1, const btVector3& v2 )
+ {
+ m_el[0] = v0;
+ m_el[1] = v1;
+ m_el[2] = v2;
+ }
+
+ // Copy constructor
+ SIMD_FORCE_INLINE btMatrix3x3(const btMatrix3x3& rhs)
+ {
+ m_el[0].mVec128 = rhs.m_el[0].mVec128;
+ m_el[1].mVec128 = rhs.m_el[1].mVec128;
+ m_el[2].mVec128 = rhs.m_el[2].mVec128;
+ }
+
+ // Assignment Operator
+ SIMD_FORCE_INLINE btMatrix3x3& operator=(const btMatrix3x3& m)
+ {
+ m_el[0].mVec128 = m.m_el[0].mVec128;
+ m_el[1].mVec128 = m.m_el[1].mVec128;
+ m_el[2].mVec128 = m.m_el[2].mVec128;
+
+ return *this;
+ }
+
+#else
+
/** @brief Copy constructor */
SIMD_FORCE_INLINE btMatrix3x3 (const btMatrix3x3& other)
{
@@ -64,6 +112,7 @@ public:
m_el[1] = other.m_el[1];
m_el[2] = other.m_el[2];
}
+
/** @brief Assignment Operator */
SIMD_FORCE_INLINE btMatrix3x3& operator=(const btMatrix3x3& other)
{
@@ -73,6 +122,8 @@ public:
return *this;
}
+#endif
+
/** @brief Get a column of the matrix as a vector
* @param i Column number 0 indexed */
SIMD_FORCE_INLINE btVector3 getColumn(int i) const
@@ -155,14 +206,69 @@ public:
btScalar d = q.length2();
btFullAssert(d != btScalar(0.0));
btScalar s = btScalar(2.0) / d;
+
+ #if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vs, Q = q.get128();
+ __m128i Qi = btCastfTo128i(Q);
+ __m128 Y, Z;
+ __m128 V1, V2, V3;
+ __m128 V11, V21, V31;
+ __m128 NQ = _mm_xor_ps(Q, btvMzeroMask);
+ __m128i NQi = btCastfTo128i(NQ);
+
+ V1 = btCastiTo128f(_mm_shuffle_epi32 (Qi, BT_SHUFFLE(1,0,2,3))); // Y X Z W
+ V2 = _mm_shuffle_ps(NQ, Q, BT_SHUFFLE(0,0,1,3)); // -X -X Y W
+ V3 = btCastiTo128f(_mm_shuffle_epi32 (Qi, BT_SHUFFLE(2,1,0,3))); // Z Y X W
+ V1 = _mm_xor_ps(V1, vMPPP); // change the sign of the first element
+
+ V11 = btCastiTo128f(_mm_shuffle_epi32 (Qi, BT_SHUFFLE(1,1,0,3))); // Y Y X W
+ V21 = _mm_unpackhi_ps(Q, Q); // Z Z W W
+ V31 = _mm_shuffle_ps(Q, NQ, BT_SHUFFLE(0,2,0,3)); // X Z -X -W
+
+ V2 = V2 * V1; //
+ V1 = V1 * V11; //
+ V3 = V3 * V31; //
+
+ V11 = _mm_shuffle_ps(NQ, Q, BT_SHUFFLE(2,3,1,3)); // -Z -W Y W
+ V11 = V11 * V21; //
+ V21 = _mm_xor_ps(V21, vMPPP); // change the sign of the first element
+ V31 = _mm_shuffle_ps(Q, NQ, BT_SHUFFLE(3,3,1,3)); // W W -Y -W
+ V31 = _mm_xor_ps(V31, vMPPP); // change the sign of the first element
+ Y = btCastiTo128f(_mm_shuffle_epi32 (NQi, BT_SHUFFLE(3,2,0,3))); // -W -Z -X -W
+ Z = btCastiTo128f(_mm_shuffle_epi32 (Qi, BT_SHUFFLE(1,0,1,3))); // Y X Y W
+
+ vs = _mm_load_ss(&s);
+ V21 = V21 * Y;
+ V31 = V31 * Z;
+
+ V1 = V1 + V11;
+ V2 = V2 + V21;
+ V3 = V3 + V31;
+
+ vs = bt_splat3_ps(vs, 0);
+ // s ready
+ V1 = V1 * vs;
+ V2 = V2 * vs;
+ V3 = V3 * vs;
+
+ V1 = V1 + v1000;
+ V2 = V2 + v0100;
+ V3 = V3 + v0010;
+
+ m_el[0] = V1;
+ m_el[1] = V2;
+ m_el[2] = V3;
+ #else
btScalar xs = q.x() * s, ys = q.y() * s, zs = q.z() * s;
btScalar wx = q.w() * xs, wy = q.w() * ys, wz = q.w() * zs;
btScalar xx = q.x() * xs, xy = q.x() * ys, xz = q.x() * zs;
btScalar yy = q.y() * ys, yz = q.y() * zs, zz = q.z() * zs;
- setValue(btScalar(1.0) - (yy + zz), xy - wz, xz + wy,
+ setValue(
+ btScalar(1.0) - (yy + zz), xy - wz, xz + wy,
xy + wz, btScalar(1.0) - (xx + zz), yz - wx,
xz - wy, yz + wx, btScalar(1.0) - (xx + yy));
- }
+ #endif
+ }
/** @brief Set the matrix from euler angles using YPR around YXZ respectively
@@ -205,16 +311,29 @@ public:
/**@brief Set the matrix to the identity */
void setIdentity()
{
+#if (defined(BT_USE_SSE_IN_API)&& defined (BT_USE_SSE)) || defined(BT_USE_NEON)
+ m_el[0] = v1000;
+ m_el[1] = v0100;
+ m_el[2] = v0010;
+#else
setValue(btScalar(1.0), btScalar(0.0), btScalar(0.0),
btScalar(0.0), btScalar(1.0), btScalar(0.0),
btScalar(0.0), btScalar(0.0), btScalar(1.0));
+#endif
}
static const btMatrix3x3& getIdentity()
{
- static const btMatrix3x3 identityMatrix(btScalar(1.0), btScalar(0.0), btScalar(0.0),
+#if (defined(BT_USE_SSE_IN_API)&& defined (BT_USE_SSE)) || defined(BT_USE_NEON)
+ static const btMatrix3x3
+ identityMatrix(v1000, v0100, v0010);
+#else
+ static const btMatrix3x3
+ identityMatrix(
+ btScalar(1.0), btScalar(0.0), btScalar(0.0),
btScalar(0.0), btScalar(1.0), btScalar(0.0),
btScalar(0.0), btScalar(0.0), btScalar(1.0));
+#endif
return identityMatrix;
}
@@ -222,6 +341,40 @@ public:
* @param m The array to be filled */
void getOpenGLSubMatrix(btScalar *m) const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 v0 = m_el[0].mVec128;
+ __m128 v1 = m_el[1].mVec128;
+ __m128 v2 = m_el[2].mVec128; // x2 y2 z2 w2
+ __m128 *vm = (__m128 *)m;
+ __m128 vT;
+
+ v2 = _mm_and_ps(v2, btvFFF0fMask); // x2 y2 z2 0
+
+ vT = _mm_unpackhi_ps(v0, v1); // z0 z1 * *
+ v0 = _mm_unpacklo_ps(v0, v1); // x0 x1 y0 y1
+
+ v1 = _mm_shuffle_ps(v0, v2, BT_SHUFFLE(2, 3, 1, 3) ); // y0 y1 y2 0
+ v0 = _mm_shuffle_ps(v0, v2, BT_SHUFFLE(0, 1, 0, 3) ); // x0 x1 x2 0
+ v2 = btCastdTo128f(_mm_move_sd(btCastfTo128d(v2), btCastfTo128d(vT))); // z0 z1 z2 0
+
+ vm[0] = v0;
+ vm[1] = v1;
+ vm[2] = v2;
+#elif defined(BT_USE_NEON)
+ // note: zeros the w channel. We can preserve it at the cost of two more vtrn instructions.
+ static const uint32x2_t zMask = (const uint32x2_t) {-1, 0 };
+ float32x4_t *vm = (float32x4_t *)m;
+ float32x4x2_t top = vtrnq_f32( m_el[0].mVec128, m_el[1].mVec128 ); // {x0 x1 z0 z1}, {y0 y1 w0 w1}
+ float32x2x2_t bl = vtrn_f32( vget_low_f32(m_el[2].mVec128), vdup_n_f32(0.0f) ); // {x2 0 }, {y2 0}
+ float32x4_t v0 = vcombine_f32( vget_low_f32(top.val[0]), bl.val[0] );
+ float32x4_t v1 = vcombine_f32( vget_low_f32(top.val[1]), bl.val[1] );
+ float32x2_t q = (float32x2_t) vand_u32( (uint32x2_t) vget_high_f32( m_el[2].mVec128), zMask );
+ float32x4_t v2 = vcombine_f32( vget_high_f32(top.val[0]), q ); // z0 z1 z2 0
+
+ vm[0] = v0;
+ vm[1] = v1;
+ vm[2] = v2;
+#else
m[0] = btScalar(m_el[0].x());
m[1] = btScalar(m_el[1].x());
m[2] = btScalar(m_el[2].x());
@@ -234,13 +387,67 @@ public:
m[9] = btScalar(m_el[1].z());
m[10] = btScalar(m_el[2].z());
m[11] = btScalar(0.0);
+#endif
}
/**@brief Get the matrix represented as a quaternion
* @param q The quaternion which will be set */
void getRotation(btQuaternion& q) const
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ btScalar trace = m_el[0].x() + m_el[1].y() + m_el[2].z();
+ btScalar s, x;
+
+ union {
+ btSimdFloat4 vec;
+ btScalar f[4];
+ } temp;
+
+ if (trace > btScalar(0.0))
+ {
+ x = trace + btScalar(1.0);
+
+ temp.f[0]=m_el[2].y() - m_el[1].z();
+ temp.f[1]=m_el[0].z() - m_el[2].x();
+ temp.f[2]=m_el[1].x() - m_el[0].y();
+ temp.f[3]=x;
+ //temp.f[3]= s * btScalar(0.5);
+ }
+ else
+ {
+ int i, j, k;
+ if(m_el[0].x() < m_el[1].y())
+ {
+ if( m_el[1].y() < m_el[2].z() )
+ { i = 2; j = 0; k = 1; }
+ else
+ { i = 1; j = 2; k = 0; }
+ }
+ else
+ {
+ if( m_el[0].x() < m_el[2].z())
+ { i = 2; j = 0; k = 1; }
+ else
+ { i = 0; j = 1; k = 2; }
+ }
+
+ x = m_el[i][i] - m_el[j][j] - m_el[k][k] + btScalar(1.0);
+
+ temp.f[3] = (m_el[k][j] - m_el[j][k]);
+ temp.f[j] = (m_el[j][i] + m_el[i][j]);
+ temp.f[k] = (m_el[k][i] + m_el[i][k]);
+ temp.f[i] = x;
+ //temp.f[i] = s * btScalar(0.5);
+ }
+
+ s = btSqrt(x);
+ q.set128(temp.vec);
+ s = btScalar(0.5) / s;
+
+ q *= s;
+#else
btScalar trace = m_el[0].x() + m_el[1].y() + m_el[2].z();
+
btScalar temp[4];
if (trace > btScalar(0.0))
@@ -270,6 +477,7 @@ public:
temp[k] = (m_el[k][i] + m_el[i][k]) * s;
}
q.setValue(temp[0],temp[1],temp[2],temp[3]);
+#endif
}
/**@brief Get the matrix represented as euler angles around YXZ, roundtrip with setEulerYPR
@@ -376,9 +584,14 @@ public:
btMatrix3x3 scaled(const btVector3& s) const
{
- return btMatrix3x3(m_el[0].x() * s.x(), m_el[0].y() * s.y(), m_el[0].z() * s.z(),
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ return btMatrix3x3(m_el[0] * s, m_el[1] * s, m_el[2] * s);
+#else
+ return btMatrix3x3(
+ m_el[0].x() * s.x(), m_el[0].y() * s.y(), m_el[0].z() * s.z(),
m_el[1].x() * s.x(), m_el[1].y() * s.y(), m_el[1].z() * s.z(),
m_el[2].x() * s.x(), m_el[2].y() * s.y(), m_el[2].z() * s.z());
+#endif
}
/**@brief Return the determinant of the matrix */
@@ -527,15 +740,101 @@ public:
SIMD_FORCE_INLINE btMatrix3x3&
btMatrix3x3::operator*=(const btMatrix3x3& m)
{
- setValue(m.tdotx(m_el[0]), m.tdoty(m_el[0]), m.tdotz(m_el[0]),
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 rv00, rv01, rv02;
+ __m128 rv10, rv11, rv12;
+ __m128 rv20, rv21, rv22;
+ __m128 mv0, mv1, mv2;
+
+ rv02 = m_el[0].mVec128;
+ rv12 = m_el[1].mVec128;
+ rv22 = m_el[2].mVec128;
+
+ mv0 = _mm_and_ps(m[0].mVec128, btvFFF0fMask);
+ mv1 = _mm_and_ps(m[1].mVec128, btvFFF0fMask);
+ mv2 = _mm_and_ps(m[2].mVec128, btvFFF0fMask);
+
+ // rv0
+ rv00 = bt_splat_ps(rv02, 0);
+ rv01 = bt_splat_ps(rv02, 1);
+ rv02 = bt_splat_ps(rv02, 2);
+
+ rv00 = _mm_mul_ps(rv00, mv0);
+ rv01 = _mm_mul_ps(rv01, mv1);
+ rv02 = _mm_mul_ps(rv02, mv2);
+
+ // rv1
+ rv10 = bt_splat_ps(rv12, 0);
+ rv11 = bt_splat_ps(rv12, 1);
+ rv12 = bt_splat_ps(rv12, 2);
+
+ rv10 = _mm_mul_ps(rv10, mv0);
+ rv11 = _mm_mul_ps(rv11, mv1);
+ rv12 = _mm_mul_ps(rv12, mv2);
+
+ // rv2
+ rv20 = bt_splat_ps(rv22, 0);
+ rv21 = bt_splat_ps(rv22, 1);
+ rv22 = bt_splat_ps(rv22, 2);
+
+ rv20 = _mm_mul_ps(rv20, mv0);
+ rv21 = _mm_mul_ps(rv21, mv1);
+ rv22 = _mm_mul_ps(rv22, mv2);
+
+ rv00 = _mm_add_ps(rv00, rv01);
+ rv10 = _mm_add_ps(rv10, rv11);
+ rv20 = _mm_add_ps(rv20, rv21);
+
+ m_el[0].mVec128 = _mm_add_ps(rv00, rv02);
+ m_el[1].mVec128 = _mm_add_ps(rv10, rv12);
+ m_el[2].mVec128 = _mm_add_ps(rv20, rv22);
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t rv0, rv1, rv2;
+ float32x4_t v0, v1, v2;
+ float32x4_t mv0, mv1, mv2;
+
+ v0 = m_el[0].mVec128;
+ v1 = m_el[1].mVec128;
+ v2 = m_el[2].mVec128;
+
+ mv0 = (float32x4_t) vandq_s32((int32x4_t)m[0].mVec128, btvFFF0Mask);
+ mv1 = (float32x4_t) vandq_s32((int32x4_t)m[1].mVec128, btvFFF0Mask);
+ mv2 = (float32x4_t) vandq_s32((int32x4_t)m[2].mVec128, btvFFF0Mask);
+
+ rv0 = vmulq_lane_f32(mv0, vget_low_f32(v0), 0);
+ rv1 = vmulq_lane_f32(mv0, vget_low_f32(v1), 0);
+ rv2 = vmulq_lane_f32(mv0, vget_low_f32(v2), 0);
+
+ rv0 = vmlaq_lane_f32(rv0, mv1, vget_low_f32(v0), 1);
+ rv1 = vmlaq_lane_f32(rv1, mv1, vget_low_f32(v1), 1);
+ rv2 = vmlaq_lane_f32(rv2, mv1, vget_low_f32(v2), 1);
+
+ rv0 = vmlaq_lane_f32(rv0, mv2, vget_high_f32(v0), 0);
+ rv1 = vmlaq_lane_f32(rv1, mv2, vget_high_f32(v1), 0);
+ rv2 = vmlaq_lane_f32(rv2, mv2, vget_high_f32(v2), 0);
+
+ m_el[0].mVec128 = rv0;
+ m_el[1].mVec128 = rv1;
+ m_el[2].mVec128 = rv2;
+#else
+ setValue(
+ m.tdotx(m_el[0]), m.tdoty(m_el[0]), m.tdotz(m_el[0]),
m.tdotx(m_el[1]), m.tdoty(m_el[1]), m.tdotz(m_el[1]),
m.tdotx(m_el[2]), m.tdoty(m_el[2]), m.tdotz(m_el[2]));
+#endif
return *this;
}
SIMD_FORCE_INLINE btMatrix3x3&
btMatrix3x3::operator+=(const btMatrix3x3& m)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ m_el[0].mVec128 = m_el[0].mVec128 + m.m_el[0].mVec128;
+ m_el[1].mVec128 = m_el[1].mVec128 + m.m_el[1].mVec128;
+ m_el[2].mVec128 = m_el[2].mVec128 + m.m_el[2].mVec128;
+#else
setValue(
m_el[0][0]+m.m_el[0][0],
m_el[0][1]+m.m_el[0][1],
@@ -546,52 +845,89 @@ btMatrix3x3::operator+=(const btMatrix3x3& m)
m_el[2][0]+m.m_el[2][0],
m_el[2][1]+m.m_el[2][1],
m_el[2][2]+m.m_el[2][2]);
+#endif
return *this;
}
SIMD_FORCE_INLINE btMatrix3x3
operator*(const btMatrix3x3& m, const btScalar & k)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ __m128 vk = bt_splat_ps(_mm_load_ss((float *)&k), 0x80);
+ return btMatrix3x3(
+ _mm_mul_ps(m[0].mVec128, vk),
+ _mm_mul_ps(m[1].mVec128, vk),
+ _mm_mul_ps(m[2].mVec128, vk));
+#elif defined(BT_USE_NEON)
+ return btMatrix3x3(
+ vmulq_n_f32(m[0].mVec128, k),
+ vmulq_n_f32(m[1].mVec128, k),
+ vmulq_n_f32(m[2].mVec128, k));
+#else
return btMatrix3x3(
m[0].x()*k,m[0].y()*k,m[0].z()*k,
m[1].x()*k,m[1].y()*k,m[1].z()*k,
m[2].x()*k,m[2].y()*k,m[2].z()*k);
+#endif
}
- SIMD_FORCE_INLINE btMatrix3x3
+SIMD_FORCE_INLINE btMatrix3x3
operator+(const btMatrix3x3& m1, const btMatrix3x3& m2)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
return btMatrix3x3(
- m1[0][0]+m2[0][0],
- m1[0][1]+m2[0][1],
- m1[0][2]+m2[0][2],
- m1[1][0]+m2[1][0],
- m1[1][1]+m2[1][1],
- m1[1][2]+m2[1][2],
- m1[2][0]+m2[2][0],
- m1[2][1]+m2[2][1],
- m1[2][2]+m2[2][2]);
+ m1[0].mVec128 + m2[0].mVec128,
+ m1[1].mVec128 + m2[1].mVec128,
+ m1[2].mVec128 + m2[2].mVec128);
+#else
+ return btMatrix3x3(
+ m1[0][0]+m2[0][0],
+ m1[0][1]+m2[0][1],
+ m1[0][2]+m2[0][2],
+
+ m1[1][0]+m2[1][0],
+ m1[1][1]+m2[1][1],
+ m1[1][2]+m2[1][2],
+
+ m1[2][0]+m2[2][0],
+ m1[2][1]+m2[2][1],
+ m1[2][2]+m2[2][2]);
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3
operator-(const btMatrix3x3& m1, const btMatrix3x3& m2)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
return btMatrix3x3(
- m1[0][0]-m2[0][0],
- m1[0][1]-m2[0][1],
- m1[0][2]-m2[0][2],
- m1[1][0]-m2[1][0],
- m1[1][1]-m2[1][1],
- m1[1][2]-m2[1][2],
- m1[2][0]-m2[2][0],
- m1[2][1]-m2[2][1],
- m1[2][2]-m2[2][2]);
+ m1[0].mVec128 - m2[0].mVec128,
+ m1[1].mVec128 - m2[1].mVec128,
+ m1[2].mVec128 - m2[2].mVec128);
+#else
+ return btMatrix3x3(
+ m1[0][0]-m2[0][0],
+ m1[0][1]-m2[0][1],
+ m1[0][2]-m2[0][2],
+
+ m1[1][0]-m2[1][0],
+ m1[1][1]-m2[1][1],
+ m1[1][2]-m2[1][2],
+
+ m1[2][0]-m2[2][0],
+ m1[2][1]-m2[2][1],
+ m1[2][2]-m2[2][2]);
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3&
btMatrix3x3::operator-=(const btMatrix3x3& m)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ m_el[0].mVec128 = m_el[0].mVec128 - m.m_el[0].mVec128;
+ m_el[1].mVec128 = m_el[1].mVec128 - m.m_el[1].mVec128;
+ m_el[2].mVec128 = m_el[2].mVec128 - m.m_el[2].mVec128;
+#else
setValue(
m_el[0][0]-m.m_el[0][0],
m_el[0][1]-m.m_el[0][1],
@@ -602,6 +938,7 @@ btMatrix3x3::operator-=(const btMatrix3x3& m)
m_el[2][0]-m.m_el[2][0],
m_el[2][1]-m.m_el[2][1],
m_el[2][2]-m.m_el[2][2]);
+#endif
return *this;
}
@@ -616,18 +953,59 @@ btMatrix3x3::determinant() const
SIMD_FORCE_INLINE btMatrix3x3
btMatrix3x3::absolute() const
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ return btMatrix3x3(
+ _mm_and_ps(m_el[0].mVec128, btvAbsfMask),
+ _mm_and_ps(m_el[1].mVec128, btvAbsfMask),
+ _mm_and_ps(m_el[2].mVec128, btvAbsfMask));
+#elif defined(BT_USE_NEON)
+ return btMatrix3x3(
+ (float32x4_t)vandq_s32((int32x4_t)m_el[0].mVec128, btv3AbsMask),
+ (float32x4_t)vandq_s32((int32x4_t)m_el[1].mVec128, btv3AbsMask),
+ (float32x4_t)vandq_s32((int32x4_t)m_el[2].mVec128, btv3AbsMask));
+#else
return btMatrix3x3(
- btFabs(m_el[0].x()), btFabs(m_el[0].y()), btFabs(m_el[0].z()),
- btFabs(m_el[1].x()), btFabs(m_el[1].y()), btFabs(m_el[1].z()),
- btFabs(m_el[2].x()), btFabs(m_el[2].y()), btFabs(m_el[2].z()));
+ btFabs(m_el[0].x()), btFabs(m_el[0].y()), btFabs(m_el[0].z()),
+ btFabs(m_el[1].x()), btFabs(m_el[1].y()), btFabs(m_el[1].z()),
+ btFabs(m_el[2].x()), btFabs(m_el[2].y()), btFabs(m_el[2].z()));
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3
btMatrix3x3::transpose() const
{
- return btMatrix3x3(m_el[0].x(), m_el[1].x(), m_el[2].x(),
- m_el[0].y(), m_el[1].y(), m_el[2].y(),
- m_el[0].z(), m_el[1].z(), m_el[2].z());
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ __m128 v0 = m_el[0].mVec128;
+ __m128 v1 = m_el[1].mVec128;
+ __m128 v2 = m_el[2].mVec128; // x2 y2 z2 w2
+ __m128 vT;
+
+ v2 = _mm_and_ps(v2, btvFFF0fMask); // x2 y2 z2 0
+
+ vT = _mm_unpackhi_ps(v0, v1); // z0 z1 * *
+ v0 = _mm_unpacklo_ps(v0, v1); // x0 x1 y0 y1
+
+ v1 = _mm_shuffle_ps(v0, v2, BT_SHUFFLE(2, 3, 1, 3) ); // y0 y1 y2 0
+ v0 = _mm_shuffle_ps(v0, v2, BT_SHUFFLE(0, 1, 0, 3) ); // x0 x1 x2 0
+ v2 = btCastdTo128f(_mm_move_sd(btCastfTo128d(v2), btCastfTo128d(vT))); // z0 z1 z2 0
+
+
+ return btMatrix3x3( v0, v1, v2 );
+#elif defined(BT_USE_NEON)
+ // note: zeros the w channel. We can preserve it at the cost of two more vtrn instructions.
+ static const uint32x2_t zMask = (const uint32x2_t) {-1, 0 };
+ float32x4x2_t top = vtrnq_f32( m_el[0].mVec128, m_el[1].mVec128 ); // {x0 x1 z0 z1}, {y0 y1 w0 w1}
+ float32x2x2_t bl = vtrn_f32( vget_low_f32(m_el[2].mVec128), vdup_n_f32(0.0f) ); // {x2 0 }, {y2 0}
+ float32x4_t v0 = vcombine_f32( vget_low_f32(top.val[0]), bl.val[0] );
+ float32x4_t v1 = vcombine_f32( vget_low_f32(top.val[1]), bl.val[1] );
+ float32x2_t q = (float32x2_t) vand_u32( (uint32x2_t) vget_high_f32( m_el[2].mVec128), zMask );
+ float32x4_t v2 = vcombine_f32( vget_high_f32(top.val[0]), q ); // z0 z1 z2 0
+ return btMatrix3x3( v0, v1, v2 );
+#else
+ return btMatrix3x3( m_el[0].x(), m_el[1].x(), m_el[2].x(),
+ m_el[0].y(), m_el[1].y(), m_el[2].y(),
+ m_el[0].z(), m_el[1].z(), m_el[2].z());
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3
@@ -653,7 +1031,47 @@ btMatrix3x3::inverse() const
SIMD_FORCE_INLINE btMatrix3x3
btMatrix3x3::transposeTimes(const btMatrix3x3& m) const
{
- return btMatrix3x3(
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ // zeros w
+// static const __m128i xyzMask = (const __m128i){ -1ULL, 0xffffffffULL };
+ __m128 row = m_el[0].mVec128;
+ __m128 m0 = _mm_and_ps( m.getRow(0).mVec128, btvFFF0fMask );
+ __m128 m1 = _mm_and_ps( m.getRow(1).mVec128, btvFFF0fMask);
+ __m128 m2 = _mm_and_ps( m.getRow(2).mVec128, btvFFF0fMask );
+ __m128 r0 = _mm_mul_ps(m0, _mm_shuffle_ps(row, row, 0));
+ __m128 r1 = _mm_mul_ps(m0, _mm_shuffle_ps(row, row, 0x55));
+ __m128 r2 = _mm_mul_ps(m0, _mm_shuffle_ps(row, row, 0xaa));
+ row = m_el[1].mVec128;
+ r0 = _mm_add_ps( r0, _mm_mul_ps(m1, _mm_shuffle_ps(row, row, 0)));
+ r1 = _mm_add_ps( r1, _mm_mul_ps(m1, _mm_shuffle_ps(row, row, 0x55)));
+ r2 = _mm_add_ps( r2, _mm_mul_ps(m1, _mm_shuffle_ps(row, row, 0xaa)));
+ row = m_el[2].mVec128;
+ r0 = _mm_add_ps( r0, _mm_mul_ps(m2, _mm_shuffle_ps(row, row, 0)));
+ r1 = _mm_add_ps( r1, _mm_mul_ps(m2, _mm_shuffle_ps(row, row, 0x55)));
+ r2 = _mm_add_ps( r2, _mm_mul_ps(m2, _mm_shuffle_ps(row, row, 0xaa)));
+ return btMatrix3x3( r0, r1, r2 );
+
+#elif defined BT_USE_NEON
+ // zeros w
+ static const uint32x4_t xyzMask = (const uint32x4_t){ -1, -1, -1, 0 };
+ float32x4_t m0 = (float32x4_t) vandq_u32( (uint32x4_t) m.getRow(0).mVec128, xyzMask );
+ float32x4_t m1 = (float32x4_t) vandq_u32( (uint32x4_t) m.getRow(1).mVec128, xyzMask );
+ float32x4_t m2 = (float32x4_t) vandq_u32( (uint32x4_t) m.getRow(2).mVec128, xyzMask );
+ float32x4_t row = m_el[0].mVec128;
+ float32x4_t r0 = vmulq_lane_f32( m0, vget_low_f32(row), 0);
+ float32x4_t r1 = vmulq_lane_f32( m0, vget_low_f32(row), 1);
+ float32x4_t r2 = vmulq_lane_f32( m0, vget_high_f32(row), 0);
+ row = m_el[1].mVec128;
+ r0 = vmlaq_lane_f32( r0, m1, vget_low_f32(row), 0);
+ r1 = vmlaq_lane_f32( r1, m1, vget_low_f32(row), 1);
+ r2 = vmlaq_lane_f32( r2, m1, vget_high_f32(row), 0);
+ row = m_el[2].mVec128;
+ r0 = vmlaq_lane_f32( r0, m2, vget_low_f32(row), 0);
+ r1 = vmlaq_lane_f32( r1, m2, vget_low_f32(row), 1);
+ r2 = vmlaq_lane_f32( r2, m2, vget_high_f32(row), 0);
+ return btMatrix3x3( r0, r1, r2 );
+#else
+ return btMatrix3x3(
m_el[0].x() * m[0].x() + m_el[1].x() * m[1].x() + m_el[2].x() * m[2].x(),
m_el[0].x() * m[0].y() + m_el[1].x() * m[1].y() + m_el[2].x() * m[2].y(),
m_el[0].x() * m[0].z() + m_el[1].x() * m[1].z() + m_el[2].x() * m[2].z(),
@@ -663,38 +1081,196 @@ btMatrix3x3::transposeTimes(const btMatrix3x3& m) const
m_el[0].z() * m[0].x() + m_el[1].z() * m[1].x() + m_el[2].z() * m[2].x(),
m_el[0].z() * m[0].y() + m_el[1].z() * m[1].y() + m_el[2].z() * m[2].y(),
m_el[0].z() * m[0].z() + m_el[1].z() * m[1].z() + m_el[2].z() * m[2].z());
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3
btMatrix3x3::timesTranspose(const btMatrix3x3& m) const
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ __m128 a0 = m_el[0].mVec128;
+ __m128 a1 = m_el[1].mVec128;
+ __m128 a2 = m_el[2].mVec128;
+
+ btMatrix3x3 mT = m.transpose(); // we rely on transpose() zeroing w channel so that we don't have to do it here
+ __m128 mx = mT[0].mVec128;
+ __m128 my = mT[1].mVec128;
+ __m128 mz = mT[2].mVec128;
+
+ __m128 r0 = _mm_mul_ps(mx, _mm_shuffle_ps(a0, a0, 0x00));
+ __m128 r1 = _mm_mul_ps(mx, _mm_shuffle_ps(a1, a1, 0x00));
+ __m128 r2 = _mm_mul_ps(mx, _mm_shuffle_ps(a2, a2, 0x00));
+ r0 = _mm_add_ps(r0, _mm_mul_ps(my, _mm_shuffle_ps(a0, a0, 0x55)));
+ r1 = _mm_add_ps(r1, _mm_mul_ps(my, _mm_shuffle_ps(a1, a1, 0x55)));
+ r2 = _mm_add_ps(r2, _mm_mul_ps(my, _mm_shuffle_ps(a2, a2, 0x55)));
+ r0 = _mm_add_ps(r0, _mm_mul_ps(mz, _mm_shuffle_ps(a0, a0, 0xaa)));
+ r1 = _mm_add_ps(r1, _mm_mul_ps(mz, _mm_shuffle_ps(a1, a1, 0xaa)));
+ r2 = _mm_add_ps(r2, _mm_mul_ps(mz, _mm_shuffle_ps(a2, a2, 0xaa)));
+ return btMatrix3x3( r0, r1, r2);
+
+#elif defined BT_USE_NEON
+ float32x4_t a0 = m_el[0].mVec128;
+ float32x4_t a1 = m_el[1].mVec128;
+ float32x4_t a2 = m_el[2].mVec128;
+
+ btMatrix3x3 mT = m.transpose(); // we rely on transpose() zeroing w channel so that we don't have to do it here
+ float32x4_t mx = mT[0].mVec128;
+ float32x4_t my = mT[1].mVec128;
+ float32x4_t mz = mT[2].mVec128;
+
+ float32x4_t r0 = vmulq_lane_f32( mx, vget_low_f32(a0), 0);
+ float32x4_t r1 = vmulq_lane_f32( mx, vget_low_f32(a1), 0);
+ float32x4_t r2 = vmulq_lane_f32( mx, vget_low_f32(a2), 0);
+ r0 = vmlaq_lane_f32( r0, my, vget_low_f32(a0), 1);
+ r1 = vmlaq_lane_f32( r1, my, vget_low_f32(a1), 1);
+ r2 = vmlaq_lane_f32( r2, my, vget_low_f32(a2), 1);
+ r0 = vmlaq_lane_f32( r0, mz, vget_high_f32(a0), 0);
+ r1 = vmlaq_lane_f32( r1, mz, vget_high_f32(a1), 0);
+ r2 = vmlaq_lane_f32( r2, mz, vget_high_f32(a2), 0);
+ return btMatrix3x3( r0, r1, r2 );
+
+#else
return btMatrix3x3(
m_el[0].dot(m[0]), m_el[0].dot(m[1]), m_el[0].dot(m[2]),
m_el[1].dot(m[0]), m_el[1].dot(m[1]), m_el[1].dot(m[2]),
m_el[2].dot(m[0]), m_el[2].dot(m[1]), m_el[2].dot(m[2]));
-
+#endif
}
SIMD_FORCE_INLINE btVector3
operator*(const btMatrix3x3& m, const btVector3& v)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))|| defined (BT_USE_NEON)
+ return v.dot3(m[0], m[1], m[2]);
+#else
return btVector3(m[0].dot(v), m[1].dot(v), m[2].dot(v));
+#endif
}
SIMD_FORCE_INLINE btVector3
operator*(const btVector3& v, const btMatrix3x3& m)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+
+ const __m128 vv = v.mVec128;
+
+ __m128 c0 = bt_splat_ps( vv, 0);
+ __m128 c1 = bt_splat_ps( vv, 1);
+ __m128 c2 = bt_splat_ps( vv, 2);
+
+ c0 = _mm_mul_ps(c0, _mm_and_ps(m[0].mVec128, btvFFF0fMask) );
+ c1 = _mm_mul_ps(c1, _mm_and_ps(m[1].mVec128, btvFFF0fMask) );
+ c0 = _mm_add_ps(c0, c1);
+ c2 = _mm_mul_ps(c2, _mm_and_ps(m[2].mVec128, btvFFF0fMask) );
+
+ return btVector3(_mm_add_ps(c0, c2));
+#elif defined(BT_USE_NEON)
+ const float32x4_t vv = v.mVec128;
+ const float32x2_t vlo = vget_low_f32(vv);
+ const float32x2_t vhi = vget_high_f32(vv);
+
+ float32x4_t c0, c1, c2;
+
+ c0 = (float32x4_t) vandq_s32((int32x4_t)m[0].mVec128, btvFFF0Mask);
+ c1 = (float32x4_t) vandq_s32((int32x4_t)m[1].mVec128, btvFFF0Mask);
+ c2 = (float32x4_t) vandq_s32((int32x4_t)m[2].mVec128, btvFFF0Mask);
+
+ c0 = vmulq_lane_f32(c0, vlo, 0);
+ c1 = vmulq_lane_f32(c1, vlo, 1);
+ c2 = vmulq_lane_f32(c2, vhi, 0);
+ c0 = vaddq_f32(c0, c1);
+ c0 = vaddq_f32(c0, c2);
+
+ return btVector3(c0);
+#else
return btVector3(m.tdotx(v), m.tdoty(v), m.tdotz(v));
+#endif
}
SIMD_FORCE_INLINE btMatrix3x3
operator*(const btMatrix3x3& m1, const btMatrix3x3& m2)
{
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+
+ __m128 m10 = m1[0].mVec128;
+ __m128 m11 = m1[1].mVec128;
+ __m128 m12 = m1[2].mVec128;
+
+ __m128 m2v = _mm_and_ps(m2[0].mVec128, btvFFF0fMask);
+
+ __m128 c0 = bt_splat_ps( m10, 0);
+ __m128 c1 = bt_splat_ps( m11, 0);
+ __m128 c2 = bt_splat_ps( m12, 0);
+
+ c0 = _mm_mul_ps(c0, m2v);
+ c1 = _mm_mul_ps(c1, m2v);
+ c2 = _mm_mul_ps(c2, m2v);
+
+ m2v = _mm_and_ps(m2[1].mVec128, btvFFF0fMask);
+
+ __m128 c0_1 = bt_splat_ps( m10, 1);
+ __m128 c1_1 = bt_splat_ps( m11, 1);
+ __m128 c2_1 = bt_splat_ps( m12, 1);
+
+ c0_1 = _mm_mul_ps(c0_1, m2v);
+ c1_1 = _mm_mul_ps(c1_1, m2v);
+ c2_1 = _mm_mul_ps(c2_1, m2v);
+
+ m2v = _mm_and_ps(m2[2].mVec128, btvFFF0fMask);
+
+ c0 = _mm_add_ps(c0, c0_1);
+ c1 = _mm_add_ps(c1, c1_1);
+ c2 = _mm_add_ps(c2, c2_1);
+
+ m10 = bt_splat_ps( m10, 2);
+ m11 = bt_splat_ps( m11, 2);
+ m12 = bt_splat_ps( m12, 2);
+
+ m10 = _mm_mul_ps(m10, m2v);
+ m11 = _mm_mul_ps(m11, m2v);
+ m12 = _mm_mul_ps(m12, m2v);
+
+ c0 = _mm_add_ps(c0, m10);
+ c1 = _mm_add_ps(c1, m11);
+ c2 = _mm_add_ps(c2, m12);
+
+ return btMatrix3x3(c0, c1, c2);
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t rv0, rv1, rv2;
+ float32x4_t v0, v1, v2;
+ float32x4_t mv0, mv1, mv2;
+
+ v0 = m1[0].mVec128;
+ v1 = m1[1].mVec128;
+ v2 = m1[2].mVec128;
+
+ mv0 = (float32x4_t) vandq_s32((int32x4_t)m2[0].mVec128, btvFFF0Mask);
+ mv1 = (float32x4_t) vandq_s32((int32x4_t)m2[1].mVec128, btvFFF0Mask);
+ mv2 = (float32x4_t) vandq_s32((int32x4_t)m2[2].mVec128, btvFFF0Mask);
+
+ rv0 = vmulq_lane_f32(mv0, vget_low_f32(v0), 0);
+ rv1 = vmulq_lane_f32(mv0, vget_low_f32(v1), 0);
+ rv2 = vmulq_lane_f32(mv0, vget_low_f32(v2), 0);
+
+ rv0 = vmlaq_lane_f32(rv0, mv1, vget_low_f32(v0), 1);
+ rv1 = vmlaq_lane_f32(rv1, mv1, vget_low_f32(v1), 1);
+ rv2 = vmlaq_lane_f32(rv2, mv1, vget_low_f32(v2), 1);
+
+ rv0 = vmlaq_lane_f32(rv0, mv2, vget_high_f32(v0), 0);
+ rv1 = vmlaq_lane_f32(rv1, mv2, vget_high_f32(v1), 0);
+ rv2 = vmlaq_lane_f32(rv2, mv2, vget_high_f32(v2), 0);
+
+ return btMatrix3x3(rv0, rv1, rv2);
+
+#else
return btMatrix3x3(
m2.tdotx( m1[0]), m2.tdoty( m1[0]), m2.tdotz( m1[0]),
m2.tdotx( m1[1]), m2.tdoty( m1[1]), m2.tdotz( m1[1]),
m2.tdotx( m1[2]), m2.tdoty( m1[2]), m2.tdotz( m1[2]));
+#endif
}
/*
@@ -716,9 +1292,24 @@ m1[0][2] * m2[0][2] + m1[1][2] * m2[1][2] + m1[2][2] * m2[2][2]);
* It will test all elements are equal. */
SIMD_FORCE_INLINE bool operator==(const btMatrix3x3& m1, const btMatrix3x3& m2)
{
- return ( m1[0][0] == m2[0][0] && m1[1][0] == m2[1][0] && m1[2][0] == m2[2][0] &&
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+
+ __m128 c0, c1, c2;
+
+ c0 = _mm_cmpeq_ps(m1[0].mVec128, m2[0].mVec128);
+ c1 = _mm_cmpeq_ps(m1[1].mVec128, m2[1].mVec128);
+ c2 = _mm_cmpeq_ps(m1[2].mVec128, m2[2].mVec128);
+
+ c0 = _mm_and_ps(c0, c1);
+ c0 = _mm_and_ps(c0, c2);
+
+ return (0x7 == _mm_movemask_ps((__m128)c0));
+#else
+ return
+ ( m1[0][0] == m2[0][0] && m1[1][0] == m2[1][0] && m1[2][0] == m2[2][0] &&
m1[0][1] == m2[0][1] && m1[1][1] == m2[1][1] && m1[2][1] == m2[2][1] &&
m1[0][2] == m2[0][2] && m1[1][2] == m2[1][2] && m1[2][2] == m2[2][2] );
+#endif
}
///for serialization
diff --git a/extern/bullet2/src/LinearMath/btPolarDecomposition.cpp b/extern/bullet2/src/LinearMath/btPolarDecomposition.cpp
new file mode 100644
index 00000000000..a4dca7fdd40
--- /dev/null
+++ b/extern/bullet2/src/LinearMath/btPolarDecomposition.cpp
@@ -0,0 +1,99 @@
+#include "btPolarDecomposition.h"
+#include "btMinMax.h"
+
+namespace
+{
+ btScalar abs_column_sum(const btMatrix3x3& a, int i)
+ {
+ return btFabs(a[0][i]) + btFabs(a[1][i]) + btFabs(a[2][i]);
+ }
+
+ btScalar abs_row_sum(const btMatrix3x3& a, int i)
+ {
+ return btFabs(a[i][0]) + btFabs(a[i][1]) + btFabs(a[i][2]);
+ }
+
+ btScalar p1_norm(const btMatrix3x3& a)
+ {
+ const btScalar sum0 = abs_column_sum(a,0);
+ const btScalar sum1 = abs_column_sum(a,1);
+ const btScalar sum2 = abs_column_sum(a,2);
+ return btMax(btMax(sum0, sum1), sum2);
+ }
+
+ btScalar pinf_norm(const btMatrix3x3& a)
+ {
+ const btScalar sum0 = abs_row_sum(a,0);
+ const btScalar sum1 = abs_row_sum(a,1);
+ const btScalar sum2 = abs_row_sum(a,2);
+ return btMax(btMax(sum0, sum1), sum2);
+ }
+}
+
+const btScalar btPolarDecomposition::DEFAULT_TOLERANCE = btScalar(0.0001);
+const unsigned int btPolarDecomposition::DEFAULT_MAX_ITERATIONS = 16;
+
+btPolarDecomposition::btPolarDecomposition(btScalar tolerance, unsigned int maxIterations)
+: m_tolerance(tolerance)
+, m_maxIterations(maxIterations)
+{
+}
+
+unsigned int btPolarDecomposition::decompose(const btMatrix3x3& a, btMatrix3x3& u, btMatrix3x3& h) const
+{
+ // Use the 'u' and 'h' matrices for intermediate calculations
+ u = a;
+ h = a.inverse();
+
+ for (unsigned int i = 0; i < m_maxIterations; ++i)
+ {
+ const btScalar h_1 = p1_norm(h);
+ const btScalar h_inf = pinf_norm(h);
+ const btScalar u_1 = p1_norm(u);
+ const btScalar u_inf = pinf_norm(u);
+
+ const btScalar h_norm = h_1 * h_inf;
+ const btScalar u_norm = u_1 * u_inf;
+
+ // The matrix is effectively singular so we cannot invert it
+ if (btFuzzyZero(h_norm) || btFuzzyZero(u_norm))
+ break;
+
+ const btScalar gamma = btPow(h_norm / u_norm, 0.25f);
+ const btScalar inv_gamma = btScalar(1.0) / gamma;
+
+ // Determine the delta to 'u'
+ const btMatrix3x3 delta = (u * (gamma - btScalar(2.0)) + h.transpose() * inv_gamma) * btScalar(0.5);
+
+ // Update the matrices
+ u += delta;
+ h = u.inverse();
+
+ // Check for convergence
+ if (p1_norm(delta) <= m_tolerance * u_1)
+ {
+ h = u.transpose() * a;
+ h = (h + h.transpose()) * 0.5;
+ return i;
+ }
+ }
+
+ // The algorithm has failed to converge to the specified tolerance, but we
+ // want to make sure that the matrices returned are in the right form.
+ h = u.transpose() * a;
+ h = (h + h.transpose()) * 0.5;
+
+ return m_maxIterations;
+}
+
+unsigned int btPolarDecomposition::maxIterations() const
+{
+ return m_maxIterations;
+}
+
+unsigned int polarDecompose(const btMatrix3x3& a, btMatrix3x3& u, btMatrix3x3& h)
+{
+ static btPolarDecomposition polar;
+ return polar.decompose(a, u, h);
+}
+
diff --git a/extern/bullet2/src/LinearMath/btPolarDecomposition.h b/extern/bullet2/src/LinearMath/btPolarDecomposition.h
new file mode 100644
index 00000000000..56156676415
--- /dev/null
+++ b/extern/bullet2/src/LinearMath/btPolarDecomposition.h
@@ -0,0 +1,73 @@
+#ifndef POLARDECOMPOSITION_H
+#define POLARDECOMPOSITION_H
+
+#include "btMatrix3x3.h"
+
+/**
+ * This class is used to compute the polar decomposition of a matrix. In
+ * general, the polar decomposition factorizes a matrix, A, into two parts: a
+ * unitary matrix (U) and a positive, semi-definite Hermitian matrix (H).
+ * However, in this particular implementation the original matrix, A, is
+ * required to be a square 3x3 matrix with real elements. This means that U will
+ * be an orthogonal matrix and H with be a positive-definite, symmetric matrix.
+ */
+class btPolarDecomposition
+{
+ public:
+ static const btScalar DEFAULT_TOLERANCE;
+ static const unsigned int DEFAULT_MAX_ITERATIONS;
+
+ /**
+ * Creates an instance with optional parameters.
+ *
+ * @param tolerance - the tolerance used to determine convergence of the
+ * algorithm
+ * @param maxIterations - the maximum number of iterations used to achieve
+ * convergence
+ */
+ btPolarDecomposition(btScalar tolerance = DEFAULT_TOLERANCE,
+ unsigned int maxIterations = DEFAULT_MAX_ITERATIONS);
+
+ /**
+ * Decomposes a matrix into orthogonal and symmetric, positive-definite
+ * parts. If the number of iterations returned by this function is equal to
+ * the maximum number of iterations, the algorithm has failed to converge.
+ *
+ * @param a - the original matrix
+ * @param u - the resulting orthogonal matrix
+ * @param h - the resulting symmetric matrix
+ *
+ * @return the number of iterations performed by the algorithm.
+ */
+ unsigned int decompose(const btMatrix3x3& a, btMatrix3x3& u, btMatrix3x3& h) const;
+
+ /**
+ * Returns the maximum number of iterations that this algorithm will perform
+ * to achieve convergence.
+ *
+ * @return maximum number of iterations
+ */
+ unsigned int maxIterations() const;
+
+ private:
+ btScalar m_tolerance;
+ unsigned int m_maxIterations;
+};
+
+/**
+ * This functions decomposes the matrix 'a' into two parts: an orthogonal matrix
+ * 'u' and a symmetric, positive-definite matrix 'h'. If the number of
+ * iterations returned by this function is equal to
+ * btPolarDecomposition::DEFAULT_MAX_ITERATIONS, the algorithm has failed to
+ * converge.
+ *
+ * @param a - the original matrix
+ * @param u - the resulting orthogonal matrix
+ * @param h - the resulting symmetric matrix
+ *
+ * @return the number of iterations performed by the algorithm.
+ */
+unsigned int polarDecompose(const btMatrix3x3& a, btMatrix3x3& u, btMatrix3x3& h);
+
+#endif // POLARDECOMPOSITION_H
+
diff --git a/extern/bullet2/src/LinearMath/btQuadWord.h b/extern/bullet2/src/LinearMath/btQuadWord.h
index d5e9daa45a2..11067ef47d9 100644
--- a/extern/bullet2/src/LinearMath/btQuadWord.h
+++ b/extern/bullet2/src/LinearMath/btQuadWord.h
@@ -20,6 +20,9 @@ subject to the following restrictions:
#include "btMinMax.h"
+
+
+
#if defined (__CELLOS_LV2) && defined (__SPU__)
#include <altivec.h>
#endif
@@ -47,11 +50,53 @@ public:
}
protected:
#else //__CELLOS_LV2__ __SPU__
+
+#if defined(BT_USE_SSE) || defined(BT_USE_NEON)
+ union {
+ btSimdFloat4 mVec128;
+ btScalar m_floats[4];
+ };
+public:
+ SIMD_FORCE_INLINE btSimdFloat4 get128() const
+ {
+ return mVec128;
+ }
+ SIMD_FORCE_INLINE void set128(btSimdFloat4 v128)
+ {
+ mVec128 = v128;
+ }
+#else
btScalar m_floats[4];
+#endif // BT_USE_SSE
+
#endif //__CELLOS_LV2__ __SPU__
public:
+#if defined(BT_USE_SSE) || defined(BT_USE_NEON)
+
+ // Set Vector
+ SIMD_FORCE_INLINE btQuadWord(const btSimdFloat4 vec)
+ {
+ mVec128 = vec;
+ }
+
+ // Copy constructor
+ SIMD_FORCE_INLINE btQuadWord(const btQuadWord& rhs)
+ {
+ mVec128 = rhs.mVec128;
+ }
+
+ // Assignment Operator
+ SIMD_FORCE_INLINE btQuadWord&
+ operator=(const btQuadWord& v)
+ {
+ mVec128 = v.mVec128;
+
+ return *this;
+ }
+
+#endif
/**@brief Return the x value */
SIMD_FORCE_INLINE const btScalar& getX() const { return m_floats[0]; }
@@ -60,13 +105,13 @@ protected:
/**@brief Return the z value */
SIMD_FORCE_INLINE const btScalar& getZ() const { return m_floats[2]; }
/**@brief Set the x value */
- SIMD_FORCE_INLINE void setX(btScalar x) { m_floats[0] = x;};
+ SIMD_FORCE_INLINE void setX(btScalar _x) { m_floats[0] = _x;};
/**@brief Set the y value */
- SIMD_FORCE_INLINE void setY(btScalar y) { m_floats[1] = y;};
+ SIMD_FORCE_INLINE void setY(btScalar _y) { m_floats[1] = _y;};
/**@brief Set the z value */
- SIMD_FORCE_INLINE void setZ(btScalar z) { m_floats[2] = z;};
+ SIMD_FORCE_INLINE void setZ(btScalar _z) { m_floats[2] = _z;};
/**@brief Set the w value */
- SIMD_FORCE_INLINE void setW(btScalar w) { m_floats[3] = w;};
+ SIMD_FORCE_INLINE void setW(btScalar _w) { m_floats[3] = _w;};
/**@brief Return the x value */
SIMD_FORCE_INLINE const btScalar& x() const { return m_floats[0]; }
/**@brief Return the y value */
@@ -84,7 +129,14 @@ protected:
SIMD_FORCE_INLINE bool operator==(const btQuadWord& other) const
{
- return ((m_floats[3]==other.m_floats[3]) && (m_floats[2]==other.m_floats[2]) && (m_floats[1]==other.m_floats[1]) && (m_floats[0]==other.m_floats[0]));
+#ifdef BT_USE_SSE
+ return (0xf == _mm_movemask_ps((__m128)_mm_cmpeq_ps(mVec128, other.mVec128)));
+#else
+ return ((m_floats[3]==other.m_floats[3]) &&
+ (m_floats[2]==other.m_floats[2]) &&
+ (m_floats[1]==other.m_floats[1]) &&
+ (m_floats[0]==other.m_floats[0]));
+#endif
}
SIMD_FORCE_INLINE bool operator!=(const btQuadWord& other) const
@@ -97,11 +149,11 @@ protected:
* @param y Value of y
* @param z Value of z
*/
- SIMD_FORCE_INLINE void setValue(const btScalar& x, const btScalar& y, const btScalar& z)
+ SIMD_FORCE_INLINE void setValue(const btScalar& _x, const btScalar& _y, const btScalar& _z)
{
- m_floats[0]=x;
- m_floats[1]=y;
- m_floats[2]=z;
+ m_floats[0]=_x;
+ m_floats[1]=_y;
+ m_floats[2]=_z;
m_floats[3] = 0.f;
}
@@ -118,12 +170,12 @@ protected:
* @param z Value of z
* @param w Value of w
*/
- SIMD_FORCE_INLINE void setValue(const btScalar& x, const btScalar& y, const btScalar& z,const btScalar& w)
+ SIMD_FORCE_INLINE void setValue(const btScalar& _x, const btScalar& _y, const btScalar& _z,const btScalar& _w)
{
- m_floats[0]=x;
- m_floats[1]=y;
- m_floats[2]=z;
- m_floats[3]=w;
+ m_floats[0]=_x;
+ m_floats[1]=_y;
+ m_floats[2]=_z;
+ m_floats[3]=_w;
}
/**@brief No initialization constructor */
SIMD_FORCE_INLINE btQuadWord()
@@ -136,9 +188,9 @@ protected:
* @param y Value of y
* @param z Value of z
*/
- SIMD_FORCE_INLINE btQuadWord(const btScalar& x, const btScalar& y, const btScalar& z)
+ SIMD_FORCE_INLINE btQuadWord(const btScalar& _x, const btScalar& _y, const btScalar& _z)
{
- m_floats[0] = x, m_floats[1] = y, m_floats[2] = z, m_floats[3] = 0.0f;
+ m_floats[0] = _x, m_floats[1] = _y, m_floats[2] = _z, m_floats[3] = 0.0f;
}
/**@brief Initializing constructor
@@ -147,9 +199,9 @@ protected:
* @param z Value of z
* @param w Value of w
*/
- SIMD_FORCE_INLINE btQuadWord(const btScalar& x, const btScalar& y, const btScalar& z,const btScalar& w)
+ SIMD_FORCE_INLINE btQuadWord(const btScalar& _x, const btScalar& _y, const btScalar& _z,const btScalar& _w)
{
- m_floats[0] = x, m_floats[1] = y, m_floats[2] = z, m_floats[3] = w;
+ m_floats[0] = _x, m_floats[1] = _y, m_floats[2] = _z, m_floats[3] = _w;
}
/**@brief Set each element to the max of the current values and the values of another btQuadWord
@@ -157,21 +209,33 @@ protected:
*/
SIMD_FORCE_INLINE void setMax(const btQuadWord& other)
{
- btSetMax(m_floats[0], other.m_floats[0]);
+ #ifdef BT_USE_SSE
+ mVec128 = _mm_max_ps(mVec128, other.mVec128);
+ #elif defined(BT_USE_NEON)
+ mVec128 = vmaxq_f32(mVec128, other.mVec128);
+ #else
+ btSetMax(m_floats[0], other.m_floats[0]);
btSetMax(m_floats[1], other.m_floats[1]);
btSetMax(m_floats[2], other.m_floats[2]);
btSetMax(m_floats[3], other.m_floats[3]);
- }
+ #endif
+ }
/**@brief Set each element to the min of the current values and the values of another btQuadWord
* @param other The other btQuadWord to compare with
*/
SIMD_FORCE_INLINE void setMin(const btQuadWord& other)
{
- btSetMin(m_floats[0], other.m_floats[0]);
+ #ifdef BT_USE_SSE
+ mVec128 = _mm_min_ps(mVec128, other.mVec128);
+ #elif defined(BT_USE_NEON)
+ mVec128 = vminq_f32(mVec128, other.mVec128);
+ #else
+ btSetMin(m_floats[0], other.m_floats[0]);
btSetMin(m_floats[1], other.m_floats[1]);
btSetMin(m_floats[2], other.m_floats[2]);
btSetMin(m_floats[3], other.m_floats[3]);
- }
+ #endif
+ }
diff --git a/extern/bullet2/src/LinearMath/btQuaternion.h b/extern/bullet2/src/LinearMath/btQuaternion.h
index ee79f6eaeee..7d7f25fb4d3 100644
--- a/extern/bullet2/src/LinearMath/btQuaternion.h
+++ b/extern/bullet2/src/LinearMath/btQuaternion.h
@@ -21,24 +21,65 @@ subject to the following restrictions:
#include "btVector3.h"
#include "btQuadWord.h"
+
+
+
+
+#ifdef BT_USE_SSE
+
+const __m128 ATTRIBUTE_ALIGNED16(vOnes) = {1.0f, 1.0f, 1.0f, 1.0f};
+
+#endif
+
+#if defined(BT_USE_SSE) || defined(BT_USE_NEON)
+
+const btSimdFloat4 ATTRIBUTE_ALIGNED16(vQInv) = {-0.0f, -0.0f, -0.0f, +0.0f};
+const btSimdFloat4 ATTRIBUTE_ALIGNED16(vPPPM) = {+0.0f, +0.0f, +0.0f, -0.0f};
+
+#endif
+
/**@brief The btQuaternion implements quaternion to perform linear algebra rotations in combination with btMatrix3x3, btVector3 and btTransform. */
class btQuaternion : public btQuadWord {
public:
/**@brief No initialization constructor */
btQuaternion() {}
+#if (defined(BT_USE_SSE_IN_API) && defined(BT_USE_SSE))|| defined(BT_USE_NEON)
+ // Set Vector
+ SIMD_FORCE_INLINE btQuaternion(const btSimdFloat4 vec)
+ {
+ mVec128 = vec;
+ }
+
+ // Copy constructor
+ SIMD_FORCE_INLINE btQuaternion(const btQuaternion& rhs)
+ {
+ mVec128 = rhs.mVec128;
+ }
+
+ // Assignment Operator
+ SIMD_FORCE_INLINE btQuaternion&
+ operator=(const btQuaternion& v)
+ {
+ mVec128 = v.mVec128;
+
+ return *this;
+ }
+
+#endif
+
// template <typename btScalar>
// explicit Quaternion(const btScalar *v) : Tuple4<btScalar>(v) {}
/**@brief Constructor from scalars */
- btQuaternion(const btScalar& x, const btScalar& y, const btScalar& z, const btScalar& w)
- : btQuadWord(x, y, z, w)
+ btQuaternion(const btScalar& _x, const btScalar& _y, const btScalar& _z, const btScalar& _w)
+ : btQuadWord(_x, _y, _z, _w)
{}
/**@brief Axis angle Constructor
* @param axis The axis which the rotation is around
* @param angle The magnitude of the rotation around the angle (Radians) */
- btQuaternion(const btVector3& axis, const btScalar& angle)
+ btQuaternion(const btVector3& _axis, const btScalar& _angle)
{
- setRotation(axis, angle);
+ setRotation(_axis, _angle);
}
/**@brief Constructor from Euler angles
* @param yaw Angle around Y unless BT_EULER_DEFAULT_ZYX defined then Z
@@ -55,13 +96,13 @@ public:
/**@brief Set the rotation using axis angle notation
* @param axis The axis around which to rotate
* @param angle The magnitude of the rotation in Radians */
- void setRotation(const btVector3& axis, const btScalar& angle)
+ void setRotation(const btVector3& axis, const btScalar& _angle)
{
btScalar d = axis.length();
btAssert(d != btScalar(0.0));
- btScalar s = btSin(angle * btScalar(0.5)) / d;
+ btScalar s = btSin(_angle * btScalar(0.5)) / d;
setValue(axis.x() * s, axis.y() * s, axis.z() * s,
- btCos(angle * btScalar(0.5)));
+ btCos(_angle * btScalar(0.5)));
}
/**@brief Set the quaternion using Euler angles
* @param yaw Angle around Y
@@ -107,7 +148,16 @@ public:
* @param q The quaternion to add to this one */
SIMD_FORCE_INLINE btQuaternion& operator+=(const btQuaternion& q)
{
- m_floats[0] += q.x(); m_floats[1] += q.y(); m_floats[2] += q.z(); m_floats[3] += q.m_floats[3];
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_add_ps(mVec128, q.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vaddq_f32(mVec128, q.mVec128);
+#else
+ m_floats[0] += q.x();
+ m_floats[1] += q.y();
+ m_floats[2] += q.z();
+ m_floats[3] += q.m_floats[3];
+#endif
return *this;
}
@@ -115,15 +165,35 @@ public:
* @param q The quaternion to subtract from this one */
btQuaternion& operator-=(const btQuaternion& q)
{
- m_floats[0] -= q.x(); m_floats[1] -= q.y(); m_floats[2] -= q.z(); m_floats[3] -= q.m_floats[3];
- return *this;
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_sub_ps(mVec128, q.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vsubq_f32(mVec128, q.mVec128);
+#else
+ m_floats[0] -= q.x();
+ m_floats[1] -= q.y();
+ m_floats[2] -= q.z();
+ m_floats[3] -= q.m_floats[3];
+#endif
+ return *this;
}
/**@brief Scale this quaternion
* @param s The scalar to scale by */
btQuaternion& operator*=(const btScalar& s)
{
- m_floats[0] *= s; m_floats[1] *= s; m_floats[2] *= s; m_floats[3] *= s;
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vs = _mm_load_ss(&s); // (S 0 0 0)
+ vs = bt_pshufd_ps(vs, 0); // (S S S S)
+ mVec128 = _mm_mul_ps(mVec128, vs);
+#elif defined(BT_USE_NEON)
+ mVec128 = vmulq_n_f32(mVec128, s);
+#else
+ m_floats[0] *= s;
+ m_floats[1] *= s;
+ m_floats[2] *= s;
+ m_floats[3] *= s;
+#endif
return *this;
}
@@ -132,17 +202,111 @@ public:
* Equivilant to this = this * q */
btQuaternion& operator*=(const btQuaternion& q)
{
- setValue(m_floats[3] * q.x() + m_floats[0] * q.m_floats[3] + m_floats[1] * q.z() - m_floats[2] * q.y(),
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vQ2 = q.get128();
+
+ __m128 A1 = bt_pshufd_ps(mVec128, BT_SHUFFLE(0,1,2,0));
+ __m128 B1 = bt_pshufd_ps(vQ2, BT_SHUFFLE(3,3,3,0));
+
+ A1 = A1 * B1;
+
+ __m128 A2 = bt_pshufd_ps(mVec128, BT_SHUFFLE(1,2,0,1));
+ __m128 B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(2,0,1,1));
+
+ A2 = A2 * B2;
+
+ B1 = bt_pshufd_ps(mVec128, BT_SHUFFLE(2,0,1,2));
+ B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(1,2,0,2));
+
+ B1 = B1 * B2; // A3 *= B3
+
+ mVec128 = bt_splat_ps(mVec128, 3); // A0
+ mVec128 = mVec128 * vQ2; // A0 * B0
+
+ A1 = A1 + A2; // AB12
+ mVec128 = mVec128 - B1; // AB03 = AB0 - AB3
+ A1 = _mm_xor_ps(A1, vPPPM); // change sign of the last element
+ mVec128 = mVec128+ A1; // AB03 + AB12
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t vQ1 = mVec128;
+ float32x4_t vQ2 = q.get128();
+ float32x4_t A0, A1, B1, A2, B2, A3, B3;
+ float32x2_t vQ1zx, vQ2wx, vQ1yz, vQ2zx, vQ2yz, vQ2xz;
+
+ {
+ float32x2x2_t tmp;
+ tmp = vtrn_f32( vget_high_f32(vQ1), vget_low_f32(vQ1) ); // {z x}, {w y}
+ vQ1zx = tmp.val[0];
+
+ tmp = vtrn_f32( vget_high_f32(vQ2), vget_low_f32(vQ2) ); // {z x}, {w y}
+ vQ2zx = tmp.val[0];
+ }
+ vQ2wx = vext_f32(vget_high_f32(vQ2), vget_low_f32(vQ2), 1);
+
+ vQ1yz = vext_f32(vget_low_f32(vQ1), vget_high_f32(vQ1), 1);
+
+ vQ2yz = vext_f32(vget_low_f32(vQ2), vget_high_f32(vQ2), 1);
+ vQ2xz = vext_f32(vQ2zx, vQ2zx, 1);
+
+ A1 = vcombine_f32(vget_low_f32(vQ1), vQ1zx); // X Y z x
+ B1 = vcombine_f32(vdup_lane_f32(vget_high_f32(vQ2), 1), vQ2wx); // W W W X
+
+ A2 = vcombine_f32(vQ1yz, vget_low_f32(vQ1));
+ B2 = vcombine_f32(vQ2zx, vdup_lane_f32(vget_low_f32(vQ2), 1));
+
+ A3 = vcombine_f32(vQ1zx, vQ1yz); // Z X Y Z
+ B3 = vcombine_f32(vQ2yz, vQ2xz); // Y Z x z
+
+ A1 = vmulq_f32(A1, B1);
+ A2 = vmulq_f32(A2, B2);
+ A3 = vmulq_f32(A3, B3); // A3 *= B3
+ A0 = vmulq_lane_f32(vQ2, vget_high_f32(vQ1), 1); // A0 * B0
+
+ A1 = vaddq_f32(A1, A2); // AB12 = AB1 + AB2
+ A0 = vsubq_f32(A0, A3); // AB03 = AB0 - AB3
+
+ // change the sign of the last element
+ A1 = (btSimdFloat4)veorq_s32((int32x4_t)A1, (int32x4_t)vPPPM);
+ A0 = vaddq_f32(A0, A1); // AB03 + AB12
+
+ mVec128 = A0;
+#else
+ setValue(
+ m_floats[3] * q.x() + m_floats[0] * q.m_floats[3] + m_floats[1] * q.z() - m_floats[2] * q.y(),
m_floats[3] * q.y() + m_floats[1] * q.m_floats[3] + m_floats[2] * q.x() - m_floats[0] * q.z(),
m_floats[3] * q.z() + m_floats[2] * q.m_floats[3] + m_floats[0] * q.y() - m_floats[1] * q.x(),
m_floats[3] * q.m_floats[3] - m_floats[0] * q.x() - m_floats[1] * q.y() - m_floats[2] * q.z());
+#endif
return *this;
}
/**@brief Return the dot product between this quaternion and another
* @param q The other quaternion */
btScalar dot(const btQuaternion& q) const
{
- return m_floats[0] * q.x() + m_floats[1] * q.y() + m_floats[2] * q.z() + m_floats[3] * q.m_floats[3];
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vd;
+
+ vd = _mm_mul_ps(mVec128, q.mVec128);
+
+ __m128 t = _mm_movehl_ps(vd, vd);
+ vd = _mm_add_ps(vd, t);
+ t = _mm_shuffle_ps(vd, vd, 0x55);
+ vd = _mm_add_ss(vd, t);
+
+ return _mm_cvtss_f32(vd);
+#elif defined(BT_USE_NEON)
+ float32x4_t vd = vmulq_f32(mVec128, q.mVec128);
+ float32x2_t x = vpadd_f32(vget_low_f32(vd), vget_high_f32(vd));
+ x = vpadd_f32(x, x);
+ return vget_lane_f32(x, 0);
+#else
+ return m_floats[0] * q.x() +
+ m_floats[1] * q.y() +
+ m_floats[2] * q.z() +
+ m_floats[3] * q.m_floats[3];
+#endif
}
/**@brief Return the length squared of the quaternion */
@@ -161,7 +325,25 @@ public:
* Such that x^2 + y^2 + z^2 +w^2 = 1 */
btQuaternion& normalize()
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vd;
+
+ vd = _mm_mul_ps(mVec128, mVec128);
+
+ __m128 t = _mm_movehl_ps(vd, vd);
+ vd = _mm_add_ps(vd, t);
+ t = _mm_shuffle_ps(vd, vd, 0x55);
+ vd = _mm_add_ss(vd, t);
+
+ vd = _mm_sqrt_ss(vd);
+ vd = _mm_div_ss(vOnes, vd);
+ vd = bt_pshufd_ps(vd, 0); // splat
+ mVec128 = _mm_mul_ps(mVec128, vd);
+
+ return *this;
+#else
return *this /= length();
+#endif
}
/**@brief Return a scaled version of this quaternion
@@ -169,10 +351,18 @@ public:
SIMD_FORCE_INLINE btQuaternion
operator*(const btScalar& s) const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vs = _mm_load_ss(&s); // (S 0 0 0)
+ vs = bt_pshufd_ps(vs, 0x00); // (S S S S)
+
+ return btQuaternion(_mm_mul_ps(mVec128, vs));
+#elif defined(BT_USE_NEON)
+ return btQuaternion(vmulq_n_f32(mVec128, s));
+#else
return btQuaternion(x() * s, y() * s, z() * s, m_floats[3] * s);
+#endif
}
-
/**@brief Return an inversely scaled versionof this quaternion
* @param s The inverse scale factor */
btQuaternion operator/(const btScalar& s) const
@@ -223,7 +413,13 @@ public:
/**@brief Return the inverse of this quaternion */
btQuaternion inverse() const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btQuaternion(_mm_xor_ps(mVec128, vQInv));
+#elif defined(BT_USE_NEON)
+ return btQuaternion((btSimdFloat4)veorq_s32((int32x4_t)mVec128, (int32x4_t)vQInv));
+#else
return btQuaternion(-m_floats[0], -m_floats[1], -m_floats[2], m_floats[3]);
+#endif
}
/**@brief Return the sum of this quaternion and the other
@@ -231,8 +427,14 @@ public:
SIMD_FORCE_INLINE btQuaternion
operator+(const btQuaternion& q2) const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btQuaternion(_mm_add_ps(mVec128, q2.mVec128));
+#elif defined(BT_USE_NEON)
+ return btQuaternion(vaddq_f32(mVec128, q2.mVec128));
+#else
const btQuaternion& q1 = *this;
return btQuaternion(q1.x() + q2.x(), q1.y() + q2.y(), q1.z() + q2.z(), q1.m_floats[3] + q2.m_floats[3]);
+#endif
}
/**@brief Return the difference between this quaternion and the other
@@ -240,16 +442,28 @@ public:
SIMD_FORCE_INLINE btQuaternion
operator-(const btQuaternion& q2) const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btQuaternion(_mm_sub_ps(mVec128, q2.mVec128));
+#elif defined(BT_USE_NEON)
+ return btQuaternion(vsubq_f32(mVec128, q2.mVec128));
+#else
const btQuaternion& q1 = *this;
return btQuaternion(q1.x() - q2.x(), q1.y() - q2.y(), q1.z() - q2.z(), q1.m_floats[3] - q2.m_floats[3]);
+#endif
}
/**@brief Return the negative of this quaternion
* This simply negates each element */
SIMD_FORCE_INLINE btQuaternion operator-() const
{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btQuaternion(_mm_xor_ps(mVec128, btvMzeroMask));
+#elif defined(BT_USE_NEON)
+ return btQuaternion((btSimdFloat4)veorq_s32((int32x4_t)mVec128, (int32x4_t)btvMzeroMask) );
+#else
const btQuaternion& q2 = *this;
return btQuaternion( - q2.x(), - q2.y(), - q2.z(), - q2.m_floats[3]);
+#endif
}
/**@todo document this and it's use */
SIMD_FORCE_INLINE btQuaternion farthest( const btQuaternion& qd) const
@@ -284,7 +498,7 @@ public:
btAssert(magnitude > btScalar(0));
btScalar product = dot(q) / magnitude;
- if (btFabs(product) != btScalar(1))
+ if (btFabs(product) < btScalar(1))
{
// Take care of long angle case see http://en.wikipedia.org/wiki/Slerp
const btScalar sign = (product < 0) ? btScalar(-1) : btScalar(1);
@@ -323,29 +537,257 @@ public:
/**@brief Return the product of two quaternions */
SIMD_FORCE_INLINE btQuaternion
-operator*(const btQuaternion& q1, const btQuaternion& q2) {
- return btQuaternion(q1.w() * q2.x() + q1.x() * q2.w() + q1.y() * q2.z() - q1.z() * q2.y(),
+operator*(const btQuaternion& q1, const btQuaternion& q2)
+{
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vQ1 = q1.get128();
+ __m128 vQ2 = q2.get128();
+ __m128 A0, A1, B1, A2, B2;
+
+ A1 = bt_pshufd_ps(vQ1, BT_SHUFFLE(0,1,2,0)); // X Y z x // vtrn
+ B1 = bt_pshufd_ps(vQ2, BT_SHUFFLE(3,3,3,0)); // W W W X // vdup vext
+
+ A1 = A1 * B1;
+
+ A2 = bt_pshufd_ps(vQ1, BT_SHUFFLE(1,2,0,1)); // Y Z X Y // vext
+ B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(2,0,1,1)); // z x Y Y // vtrn vdup
+
+ A2 = A2 * B2;
+
+ B1 = bt_pshufd_ps(vQ1, BT_SHUFFLE(2,0,1,2)); // z x Y Z // vtrn vext
+ B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(1,2,0,2)); // Y Z x z // vext vtrn
+
+ B1 = B1 * B2; // A3 *= B3
+
+ A0 = bt_splat_ps(vQ1, 3); // A0
+ A0 = A0 * vQ2; // A0 * B0
+
+ A1 = A1 + A2; // AB12
+ A0 = A0 - B1; // AB03 = AB0 - AB3
+
+ A1 = _mm_xor_ps(A1, vPPPM); // change sign of the last element
+ A0 = A0 + A1; // AB03 + AB12
+
+ return btQuaternion(A0);
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t vQ1 = q1.get128();
+ float32x4_t vQ2 = q2.get128();
+ float32x4_t A0, A1, B1, A2, B2, A3, B3;
+ float32x2_t vQ1zx, vQ2wx, vQ1yz, vQ2zx, vQ2yz, vQ2xz;
+
+ {
+ float32x2x2_t tmp;
+ tmp = vtrn_f32( vget_high_f32(vQ1), vget_low_f32(vQ1) ); // {z x}, {w y}
+ vQ1zx = tmp.val[0];
+
+ tmp = vtrn_f32( vget_high_f32(vQ2), vget_low_f32(vQ2) ); // {z x}, {w y}
+ vQ2zx = tmp.val[0];
+ }
+ vQ2wx = vext_f32(vget_high_f32(vQ2), vget_low_f32(vQ2), 1);
+
+ vQ1yz = vext_f32(vget_low_f32(vQ1), vget_high_f32(vQ1), 1);
+
+ vQ2yz = vext_f32(vget_low_f32(vQ2), vget_high_f32(vQ2), 1);
+ vQ2xz = vext_f32(vQ2zx, vQ2zx, 1);
+
+ A1 = vcombine_f32(vget_low_f32(vQ1), vQ1zx); // X Y z x
+ B1 = vcombine_f32(vdup_lane_f32(vget_high_f32(vQ2), 1), vQ2wx); // W W W X
+
+ A2 = vcombine_f32(vQ1yz, vget_low_f32(vQ1));
+ B2 = vcombine_f32(vQ2zx, vdup_lane_f32(vget_low_f32(vQ2), 1));
+
+ A3 = vcombine_f32(vQ1zx, vQ1yz); // Z X Y Z
+ B3 = vcombine_f32(vQ2yz, vQ2xz); // Y Z x z
+
+ A1 = vmulq_f32(A1, B1);
+ A2 = vmulq_f32(A2, B2);
+ A3 = vmulq_f32(A3, B3); // A3 *= B3
+ A0 = vmulq_lane_f32(vQ2, vget_high_f32(vQ1), 1); // A0 * B0
+
+ A1 = vaddq_f32(A1, A2); // AB12 = AB1 + AB2
+ A0 = vsubq_f32(A0, A3); // AB03 = AB0 - AB3
+
+ // change the sign of the last element
+ A1 = (btSimdFloat4)veorq_s32((int32x4_t)A1, (int32x4_t)vPPPM);
+ A0 = vaddq_f32(A0, A1); // AB03 + AB12
+
+ return btQuaternion(A0);
+
+#else
+ return btQuaternion(
+ q1.w() * q2.x() + q1.x() * q2.w() + q1.y() * q2.z() - q1.z() * q2.y(),
q1.w() * q2.y() + q1.y() * q2.w() + q1.z() * q2.x() - q1.x() * q2.z(),
q1.w() * q2.z() + q1.z() * q2.w() + q1.x() * q2.y() - q1.y() * q2.x(),
q1.w() * q2.w() - q1.x() * q2.x() - q1.y() * q2.y() - q1.z() * q2.z());
+#endif
}
SIMD_FORCE_INLINE btQuaternion
operator*(const btQuaternion& q, const btVector3& w)
{
- return btQuaternion( q.w() * w.x() + q.y() * w.z() - q.z() * w.y(),
- q.w() * w.y() + q.z() * w.x() - q.x() * w.z(),
- q.w() * w.z() + q.x() * w.y() - q.y() * w.x(),
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vQ1 = q.get128();
+ __m128 vQ2 = w.get128();
+ __m128 A1, B1, A2, B2, A3, B3;
+
+ A1 = bt_pshufd_ps(vQ1, BT_SHUFFLE(3,3,3,0));
+ B1 = bt_pshufd_ps(vQ2, BT_SHUFFLE(0,1,2,0));
+
+ A1 = A1 * B1;
+
+ A2 = bt_pshufd_ps(vQ1, BT_SHUFFLE(1,2,0,1));
+ B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(2,0,1,1));
+
+ A2 = A2 * B2;
+
+ A3 = bt_pshufd_ps(vQ1, BT_SHUFFLE(2,0,1,2));
+ B3 = bt_pshufd_ps(vQ2, BT_SHUFFLE(1,2,0,2));
+
+ A3 = A3 * B3; // A3 *= B3
+
+ A1 = A1 + A2; // AB12
+ A1 = _mm_xor_ps(A1, vPPPM); // change sign of the last element
+ A1 = A1 - A3; // AB123 = AB12 - AB3
+
+ return btQuaternion(A1);
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t vQ1 = q.get128();
+ float32x4_t vQ2 = w.get128();
+ float32x4_t A1, B1, A2, B2, A3, B3;
+ float32x2_t vQ1wx, vQ2zx, vQ1yz, vQ2yz, vQ1zx, vQ2xz;
+
+ vQ1wx = vext_f32(vget_high_f32(vQ1), vget_low_f32(vQ1), 1);
+ {
+ float32x2x2_t tmp;
+
+ tmp = vtrn_f32( vget_high_f32(vQ2), vget_low_f32(vQ2) ); // {z x}, {w y}
+ vQ2zx = tmp.val[0];
+
+ tmp = vtrn_f32( vget_high_f32(vQ1), vget_low_f32(vQ1) ); // {z x}, {w y}
+ vQ1zx = tmp.val[0];
+ }
+
+ vQ1yz = vext_f32(vget_low_f32(vQ1), vget_high_f32(vQ1), 1);
+
+ vQ2yz = vext_f32(vget_low_f32(vQ2), vget_high_f32(vQ2), 1);
+ vQ2xz = vext_f32(vQ2zx, vQ2zx, 1);
+
+ A1 = vcombine_f32(vdup_lane_f32(vget_high_f32(vQ1), 1), vQ1wx); // W W W X
+ B1 = vcombine_f32(vget_low_f32(vQ2), vQ2zx); // X Y z x
+
+ A2 = vcombine_f32(vQ1yz, vget_low_f32(vQ1));
+ B2 = vcombine_f32(vQ2zx, vdup_lane_f32(vget_low_f32(vQ2), 1));
+
+ A3 = vcombine_f32(vQ1zx, vQ1yz); // Z X Y Z
+ B3 = vcombine_f32(vQ2yz, vQ2xz); // Y Z x z
+
+ A1 = vmulq_f32(A1, B1);
+ A2 = vmulq_f32(A2, B2);
+ A3 = vmulq_f32(A3, B3); // A3 *= B3
+
+ A1 = vaddq_f32(A1, A2); // AB12 = AB1 + AB2
+
+ // change the sign of the last element
+ A1 = (btSimdFloat4)veorq_s32((int32x4_t)A1, (int32x4_t)vPPPM);
+
+ A1 = vsubq_f32(A1, A3); // AB123 = AB12 - AB3
+
+ return btQuaternion(A1);
+
+#else
+ return btQuaternion(
+ q.w() * w.x() + q.y() * w.z() - q.z() * w.y(),
+ q.w() * w.y() + q.z() * w.x() - q.x() * w.z(),
+ q.w() * w.z() + q.x() * w.y() - q.y() * w.x(),
-q.x() * w.x() - q.y() * w.y() - q.z() * w.z());
+#endif
}
SIMD_FORCE_INLINE btQuaternion
operator*(const btVector3& w, const btQuaternion& q)
{
- return btQuaternion( w.x() * q.w() + w.y() * q.z() - w.z() * q.y(),
- w.y() * q.w() + w.z() * q.x() - w.x() * q.z(),
- w.z() * q.w() + w.x() * q.y() - w.y() * q.x(),
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vQ1 = w.get128();
+ __m128 vQ2 = q.get128();
+ __m128 A1, B1, A2, B2, A3, B3;
+
+ A1 = bt_pshufd_ps(vQ1, BT_SHUFFLE(0,1,2,0)); // X Y z x
+ B1 = bt_pshufd_ps(vQ2, BT_SHUFFLE(3,3,3,0)); // W W W X
+
+ A1 = A1 * B1;
+
+ A2 = bt_pshufd_ps(vQ1, BT_SHUFFLE(1,2,0,1));
+ B2 = bt_pshufd_ps(vQ2, BT_SHUFFLE(2,0,1,1));
+
+ A2 = A2 *B2;
+
+ A3 = bt_pshufd_ps(vQ1, BT_SHUFFLE(2,0,1,2));
+ B3 = bt_pshufd_ps(vQ2, BT_SHUFFLE(1,2,0,2));
+
+ A3 = A3 * B3; // A3 *= B3
+
+ A1 = A1 + A2; // AB12
+ A1 = _mm_xor_ps(A1, vPPPM); // change sign of the last element
+ A1 = A1 - A3; // AB123 = AB12 - AB3
+
+ return btQuaternion(A1);
+
+#elif defined(BT_USE_NEON)
+
+ float32x4_t vQ1 = w.get128();
+ float32x4_t vQ2 = q.get128();
+ float32x4_t A1, B1, A2, B2, A3, B3;
+ float32x2_t vQ1zx, vQ2wx, vQ1yz, vQ2zx, vQ2yz, vQ2xz;
+
+ {
+ float32x2x2_t tmp;
+
+ tmp = vtrn_f32( vget_high_f32(vQ1), vget_low_f32(vQ1) ); // {z x}, {w y}
+ vQ1zx = tmp.val[0];
+
+ tmp = vtrn_f32( vget_high_f32(vQ2), vget_low_f32(vQ2) ); // {z x}, {w y}
+ vQ2zx = tmp.val[0];
+ }
+ vQ2wx = vext_f32(vget_high_f32(vQ2), vget_low_f32(vQ2), 1);
+
+ vQ1yz = vext_f32(vget_low_f32(vQ1), vget_high_f32(vQ1), 1);
+
+ vQ2yz = vext_f32(vget_low_f32(vQ2), vget_high_f32(vQ2), 1);
+ vQ2xz = vext_f32(vQ2zx, vQ2zx, 1);
+
+ A1 = vcombine_f32(vget_low_f32(vQ1), vQ1zx); // X Y z x
+ B1 = vcombine_f32(vdup_lane_f32(vget_high_f32(vQ2), 1), vQ2wx); // W W W X
+
+ A2 = vcombine_f32(vQ1yz, vget_low_f32(vQ1));
+ B2 = vcombine_f32(vQ2zx, vdup_lane_f32(vget_low_f32(vQ2), 1));
+
+ A3 = vcombine_f32(vQ1zx, vQ1yz); // Z X Y Z
+ B3 = vcombine_f32(vQ2yz, vQ2xz); // Y Z x z
+
+ A1 = vmulq_f32(A1, B1);
+ A2 = vmulq_f32(A2, B2);
+ A3 = vmulq_f32(A3, B3); // A3 *= B3
+
+ A1 = vaddq_f32(A1, A2); // AB12 = AB1 + AB2
+
+ // change the sign of the last element
+ A1 = (btSimdFloat4)veorq_s32((int32x4_t)A1, (int32x4_t)vPPPM);
+
+ A1 = vsubq_f32(A1, A3); // AB123 = AB12 - AB3
+
+ return btQuaternion(A1);
+
+#else
+ return btQuaternion(
+ +w.x() * q.w() + w.y() * q.z() - w.z() * q.y(),
+ +w.y() * q.w() + w.z() * q.x() - w.x() * q.z(),
+ +w.z() * q.w() + w.x() * q.y() - w.y() * q.x(),
-w.x() * q.x() - w.y() * q.y() - w.z() * q.z());
+#endif
}
/**@brief Calculate the dot product between two quaternions */
@@ -365,7 +807,7 @@ length(const btQuaternion& q)
/**@brief Return the angle between two quaternions*/
SIMD_FORCE_INLINE btScalar
-angle(const btQuaternion& q1, const btQuaternion& q2)
+btAngle(const btQuaternion& q1, const btQuaternion& q2)
{
return q1.angle(q2);
}
@@ -393,7 +835,13 @@ quatRotate(const btQuaternion& rotation, const btVector3& v)
{
btQuaternion q = rotation * v;
q *= rotation.inverse();
+#if defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btVector3(_mm_and_ps(q.get128(), btvFFF0fMask));
+#elif defined(BT_USE_NEON)
+ return btVector3((float32x4_t)vandq_s32((int32x4_t)q.get128(), btvFFF0Mask));
+#else
return btVector3(q.getX(),q.getY(),q.getZ());
+#endif
}
SIMD_FORCE_INLINE btQuaternion
@@ -427,4 +875,3 @@ shortestArcQuatNormalize2(btVector3& v0,btVector3& v1)
-
diff --git a/extern/bullet2/src/LinearMath/btScalar.h b/extern/bullet2/src/LinearMath/btScalar.h
index ecae972243c..aaa1d6de6b4 100644
--- a/extern/bullet2/src/LinearMath/btScalar.h
+++ b/extern/bullet2/src/LinearMath/btScalar.h
@@ -28,7 +28,7 @@ subject to the following restrictions:
#include <float.h>
/* SVN $Revision$ on $Date$ from http://bullet.googlecode.com*/
-#define BT_BULLET_VERSION 280
+#define BT_BULLET_VERSION 281
inline int btGetVersion()
{
@@ -68,7 +68,20 @@ inline int btGetVersion()
#else
#if (defined (_WIN32) && (_MSC_VER) && _MSC_VER >= 1400) && (!defined (BT_USE_DOUBLE_PRECISION))
+ #if _MSC_VER>1400
+ #define BT_USE_SIMD_VECTOR3
+ #endif
+
#define BT_USE_SSE
+ #ifdef BT_USE_SSE
+ //BT_USE_SSE_IN_API is disabled under Windows by default, because
+ //it makes it harder to integrate Bullet into your application under Windows
+ //(structured embedding Bullet structs/classes need to be 16-byte aligned)
+ //with relatively little performance gain
+ //If you are not embedded Bullet data in your classes, or make sure that you align those classes on 16-byte boundaries
+ //you can manually enable this line or set it in the build system for a bit of performance gain (a few percent, dependent on usage)
+ //#define BT_USE_SSE_IN_API
+ #endif //BT_USE_SSE
#include <emmintrin.h>
#endif
@@ -76,9 +89,14 @@ inline int btGetVersion()
#endif //__MINGW32__
- #include <assert.h>
#ifdef BT_DEBUG
+ #ifdef _MSC_VER
+ #include <stdio.h>
+ #define btAssert(x) { if(!(x)){printf("Assert "__FILE__ ":%u ("#x")\n", __LINE__);__debugbreak(); }}
+ #else//_MSC_VER
+ #include <assert.h>
#define btAssert assert
+ #endif//_MSC_VER
#else
#define btAssert(x)
#endif
@@ -143,11 +161,37 @@ inline int btGetVersion()
#else
//non-windows systems
-#if (defined (__APPLE__) && defined (__i386__) && (!defined (BT_USE_DOUBLE_PRECISION)))
- #define BT_USE_SSE
- #include <emmintrin.h>
+#if (defined (__APPLE__) && (!defined (BT_USE_DOUBLE_PRECISION)))
+ #if defined (__i386__) || defined (__x86_64__)
+ #define BT_USE_SIMD_VECTOR3
+ #define BT_USE_SSE
+ //BT_USE_SSE_IN_API is enabled on Mac OSX by default, because memory is automatically aligned on 16-byte boundaries
+ //if apps run into issues, we will disable the next line
+ #define BT_USE_SSE_IN_API
+ #ifdef BT_USE_SSE
+ // include appropriate SSE level
+ #if defined (__SSE4_1__)
+ #include <smmintrin.h>
+ #elif defined (__SSSE3__)
+ #include <tmmintrin.h>
+ #elif defined (__SSE3__)
+ #include <pmmintrin.h>
+ #else
+ #include <emmintrin.h>
+ #endif
+ #endif //BT_USE_SSE
+ #elif defined( __armv7__ )
+ #ifdef __clang__
+ #define BT_USE_NEON 1
+ #define BT_USE_SIMD_VECTOR3
+
+ #if defined BT_USE_NEON && defined (__clang__)
+ #include <arm_neon.h>
+ #endif//BT_USE_NEON
+ #endif //__clang__
+ #endif//__arm__
- #define SIMD_FORCE_INLINE inline
+ #define SIMD_FORCE_INLINE inline __attribute__ ((always_inline))
///@todo: check out alignment methods for other platforms/compilers
#define ATTRIBUTE_ALIGNED16(a) a __attribute__ ((aligned (16)))
#define ATTRIBUTE_ALIGNED64(a) a __attribute__ ((aligned (64)))
@@ -157,10 +201,22 @@ inline int btGetVersion()
#endif
#if defined(DEBUG) || defined (_DEBUG)
+ #if defined (__i386__) || defined (__x86_64__)
+ #include <stdio.h>
+ #define btAssert(x)\
+ {\
+ if(!(x))\
+ {\
+ printf("Assert %s in line %d, file %s\n",#x, __LINE__, __FILE__);\
+ asm volatile ("int3");\
+ }\
+ }
+ #else//defined (__i386__) || defined (__x86_64__)
#define btAssert assert
- #else
+ #endif//defined (__i386__) || defined (__x86_64__)
+ #else//defined(DEBUG) || defined (_DEBUG)
#define btAssert(x)
- #endif
+ #endif//defined(DEBUG) || defined (_DEBUG)
//btFullAssert is optional, slows down a lot
#define btFullAssert(x)
@@ -210,6 +266,70 @@ typedef float btScalar;
#define BT_LARGE_FLOAT 1e18f
#endif
+#ifdef BT_USE_SSE
+typedef __m128 btSimdFloat4;
+#endif//BT_USE_SSE
+
+#if defined (BT_USE_SSE)
+//#if defined BT_USE_SSE_IN_API && defined (BT_USE_SSE)
+#ifdef _WIN32
+
+#ifndef BT_NAN
+static int btNanMask = 0x7F800001;
+#define BT_NAN (*(float*)&btNanMask)
+#endif
+
+#ifndef BT_INFINITY
+static int btInfinityMask = 0x7F800000;
+#define BT_INFINITY (*(float*)&btInfinityMask)
+#endif
+
+inline __m128 operator + (const __m128 A, const __m128 B)
+{
+ return _mm_add_ps(A, B);
+}
+
+inline __m128 operator - (const __m128 A, const __m128 B)
+{
+ return _mm_sub_ps(A, B);
+}
+
+inline __m128 operator * (const __m128 A, const __m128 B)
+{
+ return _mm_mul_ps(A, B);
+}
+
+#define btCastfTo128i(a) (_mm_castps_si128(a))
+#define btCastfTo128d(a) (_mm_castps_pd(a))
+#define btCastiTo128f(a) (_mm_castsi128_ps(a))
+#define btCastdTo128f(a) (_mm_castpd_ps(a))
+#define btCastdTo128i(a) (_mm_castpd_si128(a))
+#define btAssign128(r0,r1,r2,r3) _mm_setr_ps(r0,r1,r2,r3)
+
+#else//_WIN32
+
+#define btCastfTo128i(a) ((__m128i)(a))
+#define btCastfTo128d(a) ((__m128d)(a))
+#define btCastiTo128f(a) ((__m128) (a))
+#define btCastdTo128f(a) ((__m128) (a))
+#define btCastdTo128i(a) ((__m128i)(a))
+#define btAssign128(r0,r1,r2,r3) (__m128){r0,r1,r2,r3}
+#define BT_INFINITY INFINITY
+#define BT_NAN NAN
+#endif//_WIN32
+#endif //BT_USE_SSE_IN_API
+
+#ifdef BT_USE_NEON
+#include <arm_neon.h>
+
+typedef float32x4_t btSimdFloat4;
+#define BT_INFINITY INFINITY
+#define BT_NAN NAN
+#define btAssign128(r0,r1,r2,r3) (float32x4_t){r0,r1,r2,r3}
+#endif
+
+
+
#define BT_DECLARE_ALIGNED_ALLOCATOR() \
diff --git a/extern/bullet2/src/LinearMath/btSerializer.cpp b/extern/bullet2/src/LinearMath/btSerializer.cpp
index 49c25b7ea2a..d6b2b3a5a5c 100644
--- a/extern/bullet2/src/LinearMath/btSerializer.cpp
+++ b/extern/bullet2/src/LinearMath/btSerializer.cpp
@@ -1,841 +1,908 @@
char sBulletDNAstr[]= {
-83,68,78,65,78,65,77,69,44,1,0,0,109,95,115,105,122,101,0,109,
-95,99,97,112,97,99,105,116,121,0,42,109,95,100,97,116,97,0,109,95,
-99,111,108,108,105,115,105,111,110,83,104,97,112,101,115,0,109,95,99,111,
-108,108,105,115,105,111,110,79,98,106,101,99,116,115,0,109,95,99,111,110,
-115,116,114,97,105,110,116,115,0,42,102,105,114,115,116,0,42,108,97,115,
-116,0,109,95,102,108,111,97,116,115,91,52,93,0,109,95,101,108,91,51,
-93,0,109,95,98,97,115,105,115,0,109,95,111,114,105,103,105,110,0,109,
-95,114,111,111,116,78,111,100,101,73,110,100,101,120,0,109,95,115,117,98,
-116,114,101,101,83,105,122,101,0,109,95,113,117,97,110,116,105,122,101,100,
-65,97,98,98,77,105,110,91,51,93,0,109,95,113,117,97,110,116,105,122,
-101,100,65,97,98,98,77,97,120,91,51,93,0,109,95,97,97,98,98,77,
-105,110,79,114,103,0,109,95,97,97,98,98,77,97,120,79,114,103,0,109,
-95,101,115,99,97,112,101,73,110,100,101,120,0,109,95,115,117,98,80,97,
-114,116,0,109,95,116,114,105,97,110,103,108,101,73,110,100,101,120,0,109,
-95,112,97,100,91,52,93,0,109,95,101,115,99,97,112,101,73,110,100,101,
-120,79,114,84,114,105,97,110,103,108,101,73,110,100,101,120,0,109,95,98,
-118,104,65,97,98,98,77,105,110,0,109,95,98,118,104,65,97,98,98,77,
-97,120,0,109,95,98,118,104,81,117,97,110,116,105,122,97,116,105,111,110,
-0,109,95,99,117,114,78,111,100,101,73,110,100,101,120,0,109,95,117,115,
-101,81,117,97,110,116,105,122,97,116,105,111,110,0,109,95,110,117,109,67,
-111,110,116,105,103,117,111,117,115,76,101,97,102,78,111,100,101,115,0,109,
-95,110,117,109,81,117,97,110,116,105,122,101,100,67,111,110,116,105,103,117,
-111,117,115,78,111,100,101,115,0,42,109,95,99,111,110,116,105,103,117,111,
-117,115,78,111,100,101,115,80,116,114,0,42,109,95,113,117,97,110,116,105,
-122,101,100,67,111,110,116,105,103,117,111,117,115,78,111,100,101,115,80,116,
-114,0,42,109,95,115,117,98,84,114,101,101,73,110,102,111,80,116,114,0,
-109,95,116,114,97,118,101,114,115,97,108,77,111,100,101,0,109,95,110,117,
-109,83,117,98,116,114,101,101,72,101,97,100,101,114,115,0,42,109,95,110,
-97,109,101,0,109,95,115,104,97,112,101,84,121,112,101,0,109,95,112,97,
-100,100,105,110,103,91,52,93,0,109,95,99,111,108,108,105,115,105,111,110,
-83,104,97,112,101,68,97,116,97,0,109,95,108,111,99,97,108,83,99,97,
-108,105,110,103,0,109,95,112,108,97,110,101,78,111,114,109,97,108,0,109,
-95,112,108,97,110,101,67,111,110,115,116,97,110,116,0,109,95,105,109,112,
-108,105,99,105,116,83,104,97,112,101,68,105,109,101,110,115,105,111,110,115,
-0,109,95,99,111,108,108,105,115,105,111,110,77,97,114,103,105,110,0,109,
-95,112,97,100,100,105,110,103,0,109,95,112,111,115,0,109,95,114,97,100,
-105,117,115,0,109,95,99,111,110,118,101,120,73,110,116,101,114,110,97,108,
-83,104,97,112,101,68,97,116,97,0,42,109,95,108,111,99,97,108,80,111,
-115,105,116,105,111,110,65,114,114,97,121,80,116,114,0,109,95,108,111,99,
-97,108,80,111,115,105,116,105,111,110,65,114,114,97,121,83,105,122,101,0,
-109,95,118,97,108,117,101,0,109,95,112,97,100,91,50,93,0,109,95,118,
-97,108,117,101,115,91,51,93,0,109,95,112,97,100,0,42,109,95,118,101,
-114,116,105,99,101,115,51,102,0,42,109,95,118,101,114,116,105,99,101,115,
-51,100,0,42,109,95,105,110,100,105,99,101,115,51,50,0,42,109,95,51,
-105,110,100,105,99,101,115,49,54,0,42,109,95,51,105,110,100,105,99,101,
-115,56,0,42,109,95,105,110,100,105,99,101,115,49,54,0,109,95,110,117,
-109,84,114,105,97,110,103,108,101,115,0,109,95,110,117,109,86,101,114,116,
-105,99,101,115,0,42,109,95,109,101,115,104,80,97,114,116,115,80,116,114,
-0,109,95,115,99,97,108,105,110,103,0,109,95,110,117,109,77,101,115,104,
-80,97,114,116,115,0,109,95,109,101,115,104,73,110,116,101,114,102,97,99,
-101,0,42,109,95,113,117,97,110,116,105,122,101,100,70,108,111,97,116,66,
-118,104,0,42,109,95,113,117,97,110,116,105,122,101,100,68,111,117,98,108,
-101,66,118,104,0,42,109,95,116,114,105,97,110,103,108,101,73,110,102,111,
-77,97,112,0,109,95,112,97,100,51,91,52,93,0,109,95,116,114,105,109,
-101,115,104,83,104,97,112,101,68,97,116,97,0,109,95,116,114,97,110,115,
-102,111,114,109,0,42,109,95,99,104,105,108,100,83,104,97,112,101,0,109,
-95,99,104,105,108,100,83,104,97,112,101,84,121,112,101,0,109,95,99,104,
-105,108,100,77,97,114,103,105,110,0,42,109,95,99,104,105,108,100,83,104,
-97,112,101,80,116,114,0,109,95,110,117,109,67,104,105,108,100,83,104,97,
-112,101,115,0,109,95,117,112,65,120,105,115,0,109,95,102,108,97,103,115,
-0,109,95,101,100,103,101,86,48,86,49,65,110,103,108,101,0,109,95,101,
-100,103,101,86,49,86,50,65,110,103,108,101,0,109,95,101,100,103,101,86,
-50,86,48,65,110,103,108,101,0,42,109,95,104,97,115,104,84,97,98,108,
-101,80,116,114,0,42,109,95,110,101,120,116,80,116,114,0,42,109,95,118,
-97,108,117,101,65,114,114,97,121,80,116,114,0,42,109,95,107,101,121,65,
-114,114,97,121,80,116,114,0,109,95,99,111,110,118,101,120,69,112,115,105,
-108,111,110,0,109,95,112,108,97,110,97,114,69,112,115,105,108,111,110,0,
-109,95,101,113,117,97,108,86,101,114,116,101,120,84,104,114,101,115,104,111,
-108,100,0,109,95,101,100,103,101,68,105,115,116,97,110,99,101,84,104,114,
-101,115,104,111,108,100,0,109,95,122,101,114,111,65,114,101,97,84,104,114,
-101,115,104,111,108,100,0,109,95,110,101,120,116,83,105,122,101,0,109,95,
-104,97,115,104,84,97,98,108,101,83,105,122,101,0,109,95,110,117,109,86,
-97,108,117,101,115,0,109,95,110,117,109,75,101,121,115,0,109,95,103,105,
-109,112,97,99,116,83,117,98,84,121,112,101,0,42,109,95,117,110,115,99,
-97,108,101,100,80,111,105,110,116,115,70,108,111,97,116,80,116,114,0,42,
-109,95,117,110,115,99,97,108,101,100,80,111,105,110,116,115,68,111,117,98,
-108,101,80,116,114,0,109,95,110,117,109,85,110,115,99,97,108,101,100,80,
-111,105,110,116,115,0,109,95,112,97,100,100,105,110,103,51,91,52,93,0,
-42,109,95,98,114,111,97,100,112,104,97,115,101,72,97,110,100,108,101,0,
-42,109,95,99,111,108,108,105,115,105,111,110,83,104,97,112,101,0,42,109,
-95,114,111,111,116,67,111,108,108,105,115,105,111,110,83,104,97,112,101,0,
-109,95,119,111,114,108,100,84,114,97,110,115,102,111,114,109,0,109,95,105,
-110,116,101,114,112,111,108,97,116,105,111,110,87,111,114,108,100,84,114,97,
-110,115,102,111,114,109,0,109,95,105,110,116,101,114,112,111,108,97,116,105,
-111,110,76,105,110,101,97,114,86,101,108,111,99,105,116,121,0,109,95,105,
-110,116,101,114,112,111,108,97,116,105,111,110,65,110,103,117,108,97,114,86,
-101,108,111,99,105,116,121,0,109,95,97,110,105,115,111,116,114,111,112,105,
-99,70,114,105,99,116,105,111,110,0,109,95,99,111,110,116,97,99,116,80,
-114,111,99,101,115,115,105,110,103,84,104,114,101,115,104,111,108,100,0,109,
-95,100,101,97,99,116,105,118,97,116,105,111,110,84,105,109,101,0,109,95,
-102,114,105,99,116,105,111,110,0,109,95,114,101,115,116,105,116,117,116,105,
-111,110,0,109,95,104,105,116,70,114,97,99,116,105,111,110,0,109,95,99,
-99,100,83,119,101,112,116,83,112,104,101,114,101,82,97,100,105,117,115,0,
-109,95,99,99,100,77,111,116,105,111,110,84,104,114,101,115,104,111,108,100,
-0,109,95,104,97,115,65,110,105,115,111,116,114,111,112,105,99,70,114,105,
-99,116,105,111,110,0,109,95,99,111,108,108,105,115,105,111,110,70,108,97,
-103,115,0,109,95,105,115,108,97,110,100,84,97,103,49,0,109,95,99,111,
-109,112,97,110,105,111,110,73,100,0,109,95,97,99,116,105,118,97,116,105,
-111,110,83,116,97,116,101,49,0,109,95,105,110,116,101,114,110,97,108,84,
-121,112,101,0,109,95,99,104,101,99,107,67,111,108,108,105,100,101,87,105,
-116,104,0,109,95,99,111,108,108,105,115,105,111,110,79,98,106,101,99,116,
-68,97,116,97,0,109,95,105,110,118,73,110,101,114,116,105,97,84,101,110,
-115,111,114,87,111,114,108,100,0,109,95,108,105,110,101,97,114,86,101,108,
-111,99,105,116,121,0,109,95,97,110,103,117,108,97,114,86,101,108,111,99,
-105,116,121,0,109,95,97,110,103,117,108,97,114,70,97,99,116,111,114,0,
-109,95,108,105,110,101,97,114,70,97,99,116,111,114,0,109,95,103,114,97,
-118,105,116,121,0,109,95,103,114,97,118,105,116,121,95,97,99,99,101,108,
-101,114,97,116,105,111,110,0,109,95,105,110,118,73,110,101,114,116,105,97,
-76,111,99,97,108,0,109,95,116,111,116,97,108,70,111,114,99,101,0,109,
-95,116,111,116,97,108,84,111,114,113,117,101,0,109,95,105,110,118,101,114,
-115,101,77,97,115,115,0,109,95,108,105,110,101,97,114,68,97,109,112,105,
-110,103,0,109,95,97,110,103,117,108,97,114,68,97,109,112,105,110,103,0,
-109,95,97,100,100,105,116,105,111,110,97,108,68,97,109,112,105,110,103,70,
-97,99,116,111,114,0,109,95,97,100,100,105,116,105,111,110,97,108,76,105,
-110,101,97,114,68,97,109,112,105,110,103,84,104,114,101,115,104,111,108,100,
-83,113,114,0,109,95,97,100,100,105,116,105,111,110,97,108,65,110,103,117,
-108,97,114,68,97,109,112,105,110,103,84,104,114,101,115,104,111,108,100,83,
-113,114,0,109,95,97,100,100,105,116,105,111,110,97,108,65,110,103,117,108,
-97,114,68,97,109,112,105,110,103,70,97,99,116,111,114,0,109,95,108,105,
-110,101,97,114,83,108,101,101,112,105,110,103,84,104,114,101,115,104,111,108,
-100,0,109,95,97,110,103,117,108,97,114,83,108,101,101,112,105,110,103,84,
-104,114,101,115,104,111,108,100,0,109,95,97,100,100,105,116,105,111,110,97,
-108,68,97,109,112,105,110,103,0,109,95,110,117,109,67,111,110,115,116,114,
-97,105,110,116,82,111,119,115,0,110,117,98,0,42,109,95,114,98,65,0,
-42,109,95,114,98,66,0,109,95,111,98,106,101,99,116,84,121,112,101,0,
-109,95,117,115,101,114,67,111,110,115,116,114,97,105,110,116,84,121,112,101,
-0,109,95,117,115,101,114,67,111,110,115,116,114,97,105,110,116,73,100,0,
-109,95,110,101,101,100,115,70,101,101,100,98,97,99,107,0,109,95,97,112,
-112,108,105,101,100,73,109,112,117,108,115,101,0,109,95,100,98,103,68,114,
-97,119,83,105,122,101,0,109,95,100,105,115,97,98,108,101,67,111,108,108,
-105,115,105,111,110,115,66,101,116,119,101,101,110,76,105,110,107,101,100,66,
-111,100,105,101,115,0,109,95,111,118,101,114,114,105,100,101,78,117,109,83,
-111,108,118,101,114,73,116,101,114,97,116,105,111,110,115,0,109,95,98,114,
-101,97,107,105,110,103,73,109,112,117,108,115,101,84,104,114,101,115,104,111,
-108,100,0,109,95,105,115,69,110,97,98,108,101,100,0,109,95,116,121,112,
-101,67,111,110,115,116,114,97,105,110,116,68,97,116,97,0,109,95,112,105,
-118,111,116,73,110,65,0,109,95,112,105,118,111,116,73,110,66,0,109,95,
-114,98,65,70,114,97,109,101,0,109,95,114,98,66,70,114,97,109,101,0,
-109,95,117,115,101,82,101,102,101,114,101,110,99,101,70,114,97,109,101,65,
-0,109,95,97,110,103,117,108,97,114,79,110,108,121,0,109,95,101,110,97,
-98,108,101,65,110,103,117,108,97,114,77,111,116,111,114,0,109,95,109,111,
-116,111,114,84,97,114,103,101,116,86,101,108,111,99,105,116,121,0,109,95,
-109,97,120,77,111,116,111,114,73,109,112,117,108,115,101,0,109,95,108,111,
-119,101,114,76,105,109,105,116,0,109,95,117,112,112,101,114,76,105,109,105,
-116,0,109,95,108,105,109,105,116,83,111,102,116,110,101,115,115,0,109,95,
-98,105,97,115,70,97,99,116,111,114,0,109,95,114,101,108,97,120,97,116,
-105,111,110,70,97,99,116,111,114,0,109,95,115,119,105,110,103,83,112,97,
-110,49,0,109,95,115,119,105,110,103,83,112,97,110,50,0,109,95,116,119,
-105,115,116,83,112,97,110,0,109,95,100,97,109,112,105,110,103,0,109,95,
-108,105,110,101,97,114,85,112,112,101,114,76,105,109,105,116,0,109,95,108,
-105,110,101,97,114,76,111,119,101,114,76,105,109,105,116,0,109,95,97,110,
-103,117,108,97,114,85,112,112,101,114,76,105,109,105,116,0,109,95,97,110,
-103,117,108,97,114,76,111,119,101,114,76,105,109,105,116,0,109,95,117,115,
-101,76,105,110,101,97,114,82,101,102,101,114,101,110,99,101,70,114,97,109,
-101,65,0,109,95,117,115,101,79,102,102,115,101,116,70,111,114,67,111,110,
-115,116,114,97,105,110,116,70,114,97,109,101,0,109,95,54,100,111,102,68,
-97,116,97,0,109,95,115,112,114,105,110,103,69,110,97,98,108,101,100,91,
-54,93,0,109,95,101,113,117,105,108,105,98,114,105,117,109,80,111,105,110,
-116,91,54,93,0,109,95,115,112,114,105,110,103,83,116,105,102,102,110,101,
-115,115,91,54,93,0,109,95,115,112,114,105,110,103,68,97,109,112,105,110,
-103,91,54,93,0,109,95,108,105,110,101,97,114,83,116,105,102,102,110,101,
-115,115,0,109,95,97,110,103,117,108,97,114,83,116,105,102,102,110,101,115,
-115,0,109,95,118,111,108,117,109,101,83,116,105,102,102,110,101,115,115,0,
-42,109,95,109,97,116,101,114,105,97,108,0,109,95,112,111,115,105,116,105,
-111,110,0,109,95,112,114,101,118,105,111,117,115,80,111,115,105,116,105,111,
-110,0,109,95,118,101,108,111,99,105,116,121,0,109,95,97,99,99,117,109,
-117,108,97,116,101,100,70,111,114,99,101,0,109,95,110,111,114,109,97,108,
-0,109,95,97,114,101,97,0,109,95,97,116,116,97,99,104,0,109,95,110,
-111,100,101,73,110,100,105,99,101,115,91,50,93,0,109,95,114,101,115,116,
-76,101,110,103,116,104,0,109,95,98,98,101,110,100,105,110,103,0,109,95,
-110,111,100,101,73,110,100,105,99,101,115,91,51,93,0,109,95,114,101,115,
-116,65,114,101,97,0,109,95,99,48,91,52,93,0,109,95,110,111,100,101,
-73,110,100,105,99,101,115,91,52,93,0,109,95,114,101,115,116,86,111,108,
-117,109,101,0,109,95,99,49,0,109,95,99,50,0,109,95,99,48,0,109,
-95,108,111,99,97,108,70,114,97,109,101,0,42,109,95,114,105,103,105,100,
-66,111,100,121,0,109,95,110,111,100,101,73,110,100,101,120,0,109,95,97,
-101,114,111,77,111,100,101,108,0,109,95,98,97,117,109,103,97,114,116,101,
-0,109,95,100,114,97,103,0,109,95,108,105,102,116,0,109,95,112,114,101,
-115,115,117,114,101,0,109,95,118,111,108,117,109,101,0,109,95,100,121,110,
-97,109,105,99,70,114,105,99,116,105,111,110,0,109,95,112,111,115,101,77,
-97,116,99,104,0,109,95,114,105,103,105,100,67,111,110,116,97,99,116,72,
-97,114,100,110,101,115,115,0,109,95,107,105,110,101,116,105,99,67,111,110,
-116,97,99,116,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,67,
-111,110,116,97,99,116,72,97,114,100,110,101,115,115,0,109,95,97,110,99,
-104,111,114,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,82,105,
-103,105,100,67,108,117,115,116,101,114,72,97,114,100,110,101,115,115,0,109,
-95,115,111,102,116,75,105,110,101,116,105,99,67,108,117,115,116,101,114,72,
-97,114,100,110,101,115,115,0,109,95,115,111,102,116,83,111,102,116,67,108,
-117,115,116,101,114,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,
-82,105,103,105,100,67,108,117,115,116,101,114,73,109,112,117,108,115,101,83,
-112,108,105,116,0,109,95,115,111,102,116,75,105,110,101,116,105,99,67,108,
-117,115,116,101,114,73,109,112,117,108,115,101,83,112,108,105,116,0,109,95,
-115,111,102,116,83,111,102,116,67,108,117,115,116,101,114,73,109,112,117,108,
-115,101,83,112,108,105,116,0,109,95,109,97,120,86,111,108,117,109,101,0,
-109,95,116,105,109,101,83,99,97,108,101,0,109,95,118,101,108,111,99,105,
-116,121,73,116,101,114,97,116,105,111,110,115,0,109,95,112,111,115,105,116,
-105,111,110,73,116,101,114,97,116,105,111,110,115,0,109,95,100,114,105,102,
-116,73,116,101,114,97,116,105,111,110,115,0,109,95,99,108,117,115,116,101,
-114,73,116,101,114,97,116,105,111,110,115,0,109,95,114,111,116,0,109,95,
-115,99,97,108,101,0,109,95,97,113,113,0,109,95,99,111,109,0,42,109,
-95,112,111,115,105,116,105,111,110,115,0,42,109,95,119,101,105,103,104,116,
-115,0,109,95,110,117,109,80,111,115,105,116,105,111,110,115,0,109,95,110,
-117,109,87,101,105,103,116,115,0,109,95,98,118,111,108,117,109,101,0,109,
-95,98,102,114,97,109,101,0,109,95,102,114,97,109,101,120,102,111,114,109,
-0,109,95,108,111,99,105,105,0,109,95,105,110,118,119,105,0,109,95,118,
-105,109,112,117,108,115,101,115,91,50,93,0,109,95,100,105,109,112,117,108,
-115,101,115,91,50,93,0,109,95,108,118,0,109,95,97,118,0,42,109,95,
-102,114,97,109,101,114,101,102,115,0,42,109,95,110,111,100,101,73,110,100,
-105,99,101,115,0,42,109,95,109,97,115,115,101,115,0,109,95,110,117,109,
-70,114,97,109,101,82,101,102,115,0,109,95,110,117,109,78,111,100,101,115,
-0,109,95,110,117,109,77,97,115,115,101,115,0,109,95,105,100,109,97,115,
-115,0,109,95,105,109,97,115,115,0,109,95,110,118,105,109,112,117,108,115,
-101,115,0,109,95,110,100,105,109,112,117,108,115,101,115,0,109,95,110,100,
-97,109,112,105,110,103,0,109,95,108,100,97,109,112,105,110,103,0,109,95,
-97,100,97,109,112,105,110,103,0,109,95,109,97,116,99,104,105,110,103,0,
-109,95,109,97,120,83,101,108,102,67,111,108,108,105,115,105,111,110,73,109,
-112,117,108,115,101,0,109,95,115,101,108,102,67,111,108,108,105,115,105,111,
-110,73,109,112,117,108,115,101,70,97,99,116,111,114,0,109,95,99,111,110,
-116,97,105,110,115,65,110,99,104,111,114,0,109,95,99,111,108,108,105,100,
-101,0,109,95,99,108,117,115,116,101,114,73,110,100,101,120,0,42,109,95,
-98,111,100,121,65,0,42,109,95,98,111,100,121,66,0,109,95,114,101,102,
-115,91,50,93,0,109,95,99,102,109,0,109,95,101,114,112,0,109,95,115,
-112,108,105,116,0,109,95,100,101,108,101,116,101,0,109,95,114,101,108,80,
-111,115,105,116,105,111,110,91,50,93,0,109,95,98,111,100,121,65,116,121,
-112,101,0,109,95,98,111,100,121,66,116,121,112,101,0,109,95,106,111,105,
-110,116,84,121,112,101,0,42,109,95,112,111,115,101,0,42,42,109,95,109,
-97,116,101,114,105,97,108,115,0,42,109,95,110,111,100,101,115,0,42,109,
-95,108,105,110,107,115,0,42,109,95,102,97,99,101,115,0,42,109,95,116,
-101,116,114,97,104,101,100,114,97,0,42,109,95,97,110,99,104,111,114,115,
-0,42,109,95,99,108,117,115,116,101,114,115,0,42,109,95,106,111,105,110,
-116,115,0,109,95,110,117,109,77,97,116,101,114,105,97,108,115,0,109,95,
-110,117,109,76,105,110,107,115,0,109,95,110,117,109,70,97,99,101,115,0,
-109,95,110,117,109,84,101,116,114,97,104,101,100,114,97,0,109,95,110,117,
-109,65,110,99,104,111,114,115,0,109,95,110,117,109,67,108,117,115,116,101,
-114,115,0,109,95,110,117,109,74,111,105,110,116,115,0,109,95,99,111,110,
-102,105,103,0,84,89,80,69,72,0,0,0,99,104,97,114,0,117,99,104,
-97,114,0,115,104,111,114,116,0,117,115,104,111,114,116,0,105,110,116,0,
-108,111,110,103,0,117,108,111,110,103,0,102,108,111,97,116,0,100,111,117,
-98,108,101,0,118,111,105,100,0,80,111,105,110,116,101,114,65,114,114,97,
-121,0,98,116,80,104,121,115,105,99,115,83,121,115,116,101,109,0,76,105,
-115,116,66,97,115,101,0,98,116,86,101,99,116,111,114,51,70,108,111,97,
-116,68,97,116,97,0,98,116,86,101,99,116,111,114,51,68,111,117,98,108,
-101,68,97,116,97,0,98,116,77,97,116,114,105,120,51,120,51,70,108,111,
-97,116,68,97,116,97,0,98,116,77,97,116,114,105,120,51,120,51,68,111,
-117,98,108,101,68,97,116,97,0,98,116,84,114,97,110,115,102,111,114,109,
-70,108,111,97,116,68,97,116,97,0,98,116,84,114,97,110,115,102,111,114,
-109,68,111,117,98,108,101,68,97,116,97,0,98,116,66,118,104,83,117,98,
-116,114,101,101,73,110,102,111,68,97,116,97,0,98,116,79,112,116,105,109,
-105,122,101,100,66,118,104,78,111,100,101,70,108,111,97,116,68,97,116,97,
-0,98,116,79,112,116,105,109,105,122,101,100,66,118,104,78,111,100,101,68,
-111,117,98,108,101,68,97,116,97,0,98,116,81,117,97,110,116,105,122,101,
-100,66,118,104,78,111,100,101,68,97,116,97,0,98,116,81,117,97,110,116,
-105,122,101,100,66,118,104,70,108,111,97,116,68,97,116,97,0,98,116,81,
-117,97,110,116,105,122,101,100,66,118,104,68,111,117,98,108,101,68,97,116,
-97,0,98,116,67,111,108,108,105,115,105,111,110,83,104,97,112,101,68,97,
-116,97,0,98,116,83,116,97,116,105,99,80,108,97,110,101,83,104,97,112,
-101,68,97,116,97,0,98,116,67,111,110,118,101,120,73,110,116,101,114,110,
-97,108,83,104,97,112,101,68,97,116,97,0,98,116,80,111,115,105,116,105,
-111,110,65,110,100,82,97,100,105,117,115,0,98,116,77,117,108,116,105,83,
-112,104,101,114,101,83,104,97,112,101,68,97,116,97,0,98,116,73,110,116,
-73,110,100,101,120,68,97,116,97,0,98,116,83,104,111,114,116,73,110,116,
-73,110,100,101,120,68,97,116,97,0,98,116,83,104,111,114,116,73,110,116,
-73,110,100,101,120,84,114,105,112,108,101,116,68,97,116,97,0,98,116,67,
-104,97,114,73,110,100,101,120,84,114,105,112,108,101,116,68,97,116,97,0,
-98,116,77,101,115,104,80,97,114,116,68,97,116,97,0,98,116,83,116,114,
-105,100,105,110,103,77,101,115,104,73,110,116,101,114,102,97,99,101,68,97,
-116,97,0,98,116,84,114,105,97,110,103,108,101,77,101,115,104,83,104,97,
-112,101,68,97,116,97,0,98,116,84,114,105,97,110,103,108,101,73,110,102,
-111,77,97,112,68,97,116,97,0,98,116,83,99,97,108,101,100,84,114,105,
-97,110,103,108,101,77,101,115,104,83,104,97,112,101,68,97,116,97,0,98,
-116,67,111,109,112,111,117,110,100,83,104,97,112,101,67,104,105,108,100,68,
-97,116,97,0,98,116,67,111,109,112,111,117,110,100,83,104,97,112,101,68,
-97,116,97,0,98,116,67,121,108,105,110,100,101,114,83,104,97,112,101,68,
-97,116,97,0,98,116,67,97,112,115,117,108,101,83,104,97,112,101,68,97,
-116,97,0,98,116,84,114,105,97,110,103,108,101,73,110,102,111,68,97,116,
-97,0,98,116,71,73,109,112,97,99,116,77,101,115,104,83,104,97,112,101,
-68,97,116,97,0,98,116,67,111,110,118,101,120,72,117,108,108,83,104,97,
-112,101,68,97,116,97,0,98,116,67,111,108,108,105,115,105,111,110,79,98,
-106,101,99,116,68,111,117,98,108,101,68,97,116,97,0,98,116,67,111,108,
-108,105,115,105,111,110,79,98,106,101,99,116,70,108,111,97,116,68,97,116,
-97,0,98,116,82,105,103,105,100,66,111,100,121,70,108,111,97,116,68,97,
-116,97,0,98,116,82,105,103,105,100,66,111,100,121,68,111,117,98,108,101,
-68,97,116,97,0,98,116,67,111,110,115,116,114,97,105,110,116,73,110,102,
-111,49,0,98,116,84,121,112,101,100,67,111,110,115,116,114,97,105,110,116,
-68,97,116,97,0,98,116,82,105,103,105,100,66,111,100,121,68,97,116,97,
-0,98,116,80,111,105,110,116,50,80,111,105,110,116,67,111,110,115,116,114,
-97,105,110,116,70,108,111,97,116,68,97,116,97,0,98,116,80,111,105,110,
-116,50,80,111,105,110,116,67,111,110,115,116,114,97,105,110,116,68,111,117,
-98,108,101,68,97,116,97,0,98,116,72,105,110,103,101,67,111,110,115,116,
-114,97,105,110,116,68,111,117,98,108,101,68,97,116,97,0,98,116,72,105,
-110,103,101,67,111,110,115,116,114,97,105,110,116,70,108,111,97,116,68,97,
-116,97,0,98,116,67,111,110,101,84,119,105,115,116,67,111,110,115,116,114,
-97,105,110,116,68,97,116,97,0,98,116,71,101,110,101,114,105,99,54,68,
-111,102,67,111,110,115,116,114,97,105,110,116,68,97,116,97,0,98,116,71,
-101,110,101,114,105,99,54,68,111,102,83,112,114,105,110,103,67,111,110,115,
-116,114,97,105,110,116,68,97,116,97,0,98,116,83,108,105,100,101,114,67,
-111,110,115,116,114,97,105,110,116,68,97,116,97,0,83,111,102,116,66,111,
-100,121,77,97,116,101,114,105,97,108,68,97,116,97,0,83,111,102,116,66,
-111,100,121,78,111,100,101,68,97,116,97,0,83,111,102,116,66,111,100,121,
-76,105,110,107,68,97,116,97,0,83,111,102,116,66,111,100,121,70,97,99,
-101,68,97,116,97,0,83,111,102,116,66,111,100,121,84,101,116,114,97,68,
-97,116,97,0,83,111,102,116,82,105,103,105,100,65,110,99,104,111,114,68,
-97,116,97,0,83,111,102,116,66,111,100,121,67,111,110,102,105,103,68,97,
-116,97,0,83,111,102,116,66,111,100,121,80,111,115,101,68,97,116,97,0,
-83,111,102,116,66,111,100,121,67,108,117,115,116,101,114,68,97,116,97,0,
-98,116,83,111,102,116,66,111,100,121,74,111,105,110,116,68,97,116,97,0,
-98,116,83,111,102,116,66,111,100,121,70,108,111,97,116,68,97,116,97,0,
-84,76,69,78,1,0,1,0,2,0,2,0,4,0,4,0,4,0,4,0,
-8,0,0,0,12,0,36,0,8,0,16,0,32,0,48,0,96,0,64,0,
--128,0,20,0,48,0,80,0,16,0,84,0,-124,0,12,0,52,0,52,0,
-20,0,64,0,4,0,4,0,8,0,4,0,32,0,28,0,60,0,56,0,
-76,0,76,0,24,0,60,0,60,0,16,0,64,0,68,0,-56,1,-8,0,
--32,1,-104,3,8,0,52,0,0,0,84,0,116,0,92,1,-36,0,-44,0,
--4,0,92,1,-52,0,16,0,100,0,20,0,36,0,100,0,92,0,104,0,
--64,0,92,1,104,0,-92,1,83,84,82,67,61,0,0,0,10,0,3,0,
-4,0,0,0,4,0,1,0,9,0,2,0,11,0,3,0,10,0,3,0,
-10,0,4,0,10,0,5,0,12,0,2,0,9,0,6,0,9,0,7,0,
-13,0,1,0,7,0,8,0,14,0,1,0,8,0,8,0,15,0,1,0,
-13,0,9,0,16,0,1,0,14,0,9,0,17,0,2,0,15,0,10,0,
-13,0,11,0,18,0,2,0,16,0,10,0,14,0,11,0,19,0,4,0,
-4,0,12,0,4,0,13,0,2,0,14,0,2,0,15,0,20,0,6,0,
-13,0,16,0,13,0,17,0,4,0,18,0,4,0,19,0,4,0,20,0,
-0,0,21,0,21,0,6,0,14,0,16,0,14,0,17,0,4,0,18,0,
-4,0,19,0,4,0,20,0,0,0,21,0,22,0,3,0,2,0,14,0,
-2,0,15,0,4,0,22,0,23,0,12,0,13,0,23,0,13,0,24,0,
-13,0,25,0,4,0,26,0,4,0,27,0,4,0,28,0,4,0,29,0,
-20,0,30,0,22,0,31,0,19,0,32,0,4,0,33,0,4,0,34,0,
-24,0,12,0,14,0,23,0,14,0,24,0,14,0,25,0,4,0,26,0,
-4,0,27,0,4,0,28,0,4,0,29,0,21,0,30,0,22,0,31,0,
-4,0,33,0,4,0,34,0,19,0,32,0,25,0,3,0,0,0,35,0,
-4,0,36,0,0,0,37,0,26,0,5,0,25,0,38,0,13,0,39,0,
-13,0,40,0,7,0,41,0,0,0,21,0,27,0,5,0,25,0,38,0,
-13,0,39,0,13,0,42,0,7,0,43,0,4,0,44,0,28,0,2,0,
-13,0,45,0,7,0,46,0,29,0,4,0,27,0,47,0,28,0,48,0,
-4,0,49,0,0,0,37,0,30,0,1,0,4,0,50,0,31,0,2,0,
-2,0,50,0,0,0,51,0,32,0,2,0,2,0,52,0,0,0,51,0,
-33,0,2,0,0,0,52,0,0,0,53,0,34,0,8,0,13,0,54,0,
-14,0,55,0,30,0,56,0,32,0,57,0,33,0,58,0,31,0,59,0,
-4,0,60,0,4,0,61,0,35,0,4,0,34,0,62,0,13,0,63,0,
-4,0,64,0,0,0,37,0,36,0,7,0,25,0,38,0,35,0,65,0,
-23,0,66,0,24,0,67,0,37,0,68,0,7,0,43,0,0,0,69,0,
-38,0,2,0,36,0,70,0,13,0,39,0,39,0,4,0,17,0,71,0,
-25,0,72,0,4,0,73,0,7,0,74,0,40,0,4,0,25,0,38,0,
-39,0,75,0,4,0,76,0,7,0,43,0,41,0,3,0,27,0,47,0,
-4,0,77,0,0,0,37,0,42,0,3,0,27,0,47,0,4,0,77,0,
-0,0,37,0,43,0,4,0,4,0,78,0,7,0,79,0,7,0,80,0,
-7,0,81,0,37,0,14,0,4,0,82,0,4,0,83,0,43,0,84,0,
-4,0,85,0,7,0,86,0,7,0,87,0,7,0,88,0,7,0,89,0,
-7,0,90,0,4,0,91,0,4,0,92,0,4,0,93,0,4,0,94,0,
-0,0,37,0,44,0,5,0,25,0,38,0,35,0,65,0,13,0,39,0,
-7,0,43,0,4,0,95,0,45,0,5,0,27,0,47,0,13,0,96,0,
-14,0,97,0,4,0,98,0,0,0,99,0,46,0,24,0,9,0,100,0,
-9,0,101,0,25,0,102,0,0,0,35,0,18,0,103,0,18,0,104,0,
-14,0,105,0,14,0,106,0,14,0,107,0,8,0,108,0,8,0,109,0,
-8,0,110,0,8,0,111,0,8,0,112,0,8,0,113,0,8,0,114,0,
-4,0,115,0,4,0,116,0,4,0,117,0,4,0,118,0,4,0,119,0,
-4,0,120,0,4,0,121,0,0,0,37,0,47,0,23,0,9,0,100,0,
-9,0,101,0,25,0,102,0,0,0,35,0,17,0,103,0,17,0,104,0,
-13,0,105,0,13,0,106,0,13,0,107,0,7,0,108,0,7,0,109,0,
-7,0,110,0,7,0,111,0,7,0,112,0,7,0,113,0,7,0,114,0,
-4,0,115,0,4,0,116,0,4,0,117,0,4,0,118,0,4,0,119,0,
-4,0,120,0,4,0,121,0,48,0,21,0,47,0,122,0,15,0,123,0,
-13,0,124,0,13,0,125,0,13,0,126,0,13,0,127,0,13,0,-128,0,
-13,0,-127,0,13,0,-126,0,13,0,-125,0,13,0,-124,0,7,0,-123,0,
-7,0,-122,0,7,0,-121,0,7,0,-120,0,7,0,-119,0,7,0,-118,0,
-7,0,-117,0,7,0,-116,0,7,0,-115,0,4,0,-114,0,49,0,22,0,
-46,0,122,0,16,0,123,0,14,0,124,0,14,0,125,0,14,0,126,0,
-14,0,127,0,14,0,-128,0,14,0,-127,0,14,0,-126,0,14,0,-125,0,
-14,0,-124,0,8,0,-123,0,8,0,-122,0,8,0,-121,0,8,0,-120,0,
-8,0,-119,0,8,0,-118,0,8,0,-117,0,8,0,-116,0,8,0,-115,0,
-4,0,-114,0,0,0,37,0,50,0,2,0,4,0,-113,0,4,0,-112,0,
-51,0,13,0,52,0,-111,0,52,0,-110,0,0,0,35,0,4,0,-109,0,
-4,0,-108,0,4,0,-107,0,4,0,-106,0,7,0,-105,0,7,0,-104,0,
-4,0,-103,0,4,0,-102,0,7,0,-101,0,4,0,-100,0,53,0,3,0,
-51,0,-99,0,13,0,-98,0,13,0,-97,0,54,0,3,0,51,0,-99,0,
-14,0,-98,0,14,0,-97,0,55,0,13,0,51,0,-99,0,18,0,-96,0,
-18,0,-95,0,4,0,-94,0,4,0,-93,0,4,0,-92,0,7,0,-91,0,
-7,0,-90,0,7,0,-89,0,7,0,-88,0,7,0,-87,0,7,0,-86,0,
-7,0,-85,0,56,0,13,0,51,0,-99,0,17,0,-96,0,17,0,-95,0,
-4,0,-94,0,4,0,-93,0,4,0,-92,0,7,0,-91,0,7,0,-90,0,
-7,0,-89,0,7,0,-88,0,7,0,-87,0,7,0,-86,0,7,0,-85,0,
-57,0,11,0,51,0,-99,0,17,0,-96,0,17,0,-95,0,7,0,-84,0,
-7,0,-83,0,7,0,-82,0,7,0,-87,0,7,0,-86,0,7,0,-85,0,
-7,0,-81,0,0,0,21,0,58,0,9,0,51,0,-99,0,17,0,-96,0,
-17,0,-95,0,13,0,-80,0,13,0,-79,0,13,0,-78,0,13,0,-77,0,
-4,0,-76,0,4,0,-75,0,59,0,5,0,58,0,-74,0,4,0,-73,0,
-7,0,-72,0,7,0,-71,0,7,0,-70,0,60,0,9,0,51,0,-99,0,
-17,0,-96,0,17,0,-95,0,7,0,-80,0,7,0,-79,0,7,0,-78,0,
-7,0,-77,0,4,0,-76,0,4,0,-75,0,61,0,4,0,7,0,-69,0,
-7,0,-68,0,7,0,-67,0,4,0,78,0,62,0,10,0,61,0,-66,0,
-13,0,-65,0,13,0,-64,0,13,0,-63,0,13,0,-62,0,13,0,-61,0,
-7,0,-123,0,7,0,-60,0,4,0,-59,0,4,0,53,0,63,0,4,0,
-61,0,-66,0,4,0,-58,0,7,0,-57,0,4,0,-56,0,64,0,4,0,
-13,0,-61,0,61,0,-66,0,4,0,-55,0,7,0,-54,0,65,0,7,0,
-13,0,-53,0,61,0,-66,0,4,0,-52,0,7,0,-51,0,7,0,-50,0,
-7,0,-49,0,4,0,53,0,66,0,6,0,15,0,-48,0,13,0,-50,0,
-13,0,-47,0,52,0,-46,0,4,0,-45,0,7,0,-49,0,67,0,26,0,
-4,0,-44,0,7,0,-43,0,7,0,-81,0,7,0,-42,0,7,0,-41,0,
-7,0,-40,0,7,0,-39,0,7,0,-38,0,7,0,-37,0,7,0,-36,0,
-7,0,-35,0,7,0,-34,0,7,0,-33,0,7,0,-32,0,7,0,-31,0,
-7,0,-30,0,7,0,-29,0,7,0,-28,0,7,0,-27,0,7,0,-26,0,
-7,0,-25,0,4,0,-24,0,4,0,-23,0,4,0,-22,0,4,0,-21,0,
-4,0,116,0,68,0,12,0,15,0,-20,0,15,0,-19,0,15,0,-18,0,
-13,0,-17,0,13,0,-16,0,7,0,-15,0,4,0,-14,0,4,0,-13,0,
-4,0,-12,0,4,0,-11,0,7,0,-51,0,4,0,53,0,69,0,27,0,
-17,0,-10,0,15,0,-9,0,15,0,-8,0,13,0,-17,0,13,0,-7,0,
-13,0,-6,0,13,0,-5,0,13,0,-4,0,13,0,-3,0,4,0,-2,0,
-7,0,-1,0,4,0,0,1,4,0,1,1,4,0,2,1,7,0,3,1,
-7,0,4,1,4,0,5,1,4,0,6,1,7,0,7,1,7,0,8,1,
-7,0,9,1,7,0,10,1,7,0,11,1,7,0,12,1,4,0,13,1,
-4,0,14,1,4,0,15,1,70,0,12,0,9,0,16,1,9,0,17,1,
-13,0,18,1,7,0,19,1,7,0,20,1,7,0,21,1,4,0,22,1,
-13,0,23,1,4,0,24,1,4,0,25,1,4,0,26,1,4,0,53,0,
-71,0,19,0,47,0,122,0,68,0,27,1,61,0,28,1,62,0,29,1,
-63,0,30,1,64,0,31,1,65,0,32,1,66,0,33,1,69,0,34,1,
-70,0,35,1,4,0,36,1,4,0,1,1,4,0,37,1,4,0,38,1,
-4,0,39,1,4,0,40,1,4,0,41,1,4,0,42,1,67,0,43,1,
-};
+char(83),char(68),char(78),char(65),char(78),char(65),char(77),char(69),char(63),char(1),char(0),char(0),char(109),char(95),char(115),char(105),char(122),char(101),char(0),char(109),
+char(95),char(99),char(97),char(112),char(97),char(99),char(105),char(116),char(121),char(0),char(42),char(109),char(95),char(100),char(97),char(116),char(97),char(0),char(109),char(95),
+char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(115),char(0),char(109),char(95),char(99),char(111),
+char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),char(115),char(0),char(109),char(95),char(99),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(115),char(0),char(42),char(102),char(105),char(114),char(115),char(116),char(0),char(42),char(108),char(97),char(115),
+char(116),char(0),char(109),char(95),char(102),char(108),char(111),char(97),char(116),char(115),char(91),char(52),char(93),char(0),char(109),char(95),char(101),char(108),char(91),char(51),
+char(93),char(0),char(109),char(95),char(98),char(97),char(115),char(105),char(115),char(0),char(109),char(95),char(111),char(114),char(105),char(103),char(105),char(110),char(0),char(109),
+char(95),char(114),char(111),char(111),char(116),char(78),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(115),char(117),char(98),
+char(116),char(114),char(101),char(101),char(83),char(105),char(122),char(101),char(0),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),
+char(65),char(97),char(98),char(98),char(77),char(105),char(110),char(91),char(51),char(93),char(0),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),
+char(101),char(100),char(65),char(97),char(98),char(98),char(77),char(97),char(120),char(91),char(51),char(93),char(0),char(109),char(95),char(97),char(97),char(98),char(98),char(77),
+char(105),char(110),char(79),char(114),char(103),char(0),char(109),char(95),char(97),char(97),char(98),char(98),char(77),char(97),char(120),char(79),char(114),char(103),char(0),char(109),
+char(95),char(101),char(115),char(99),char(97),char(112),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(115),char(117),char(98),char(80),char(97),
+char(114),char(116),char(0),char(109),char(95),char(116),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),
+char(95),char(112),char(97),char(100),char(91),char(52),char(93),char(0),char(109),char(95),char(101),char(115),char(99),char(97),char(112),char(101),char(73),char(110),char(100),char(101),
+char(120),char(79),char(114),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(98),
+char(118),char(104),char(65),char(97),char(98),char(98),char(77),char(105),char(110),char(0),char(109),char(95),char(98),char(118),char(104),char(65),char(97),char(98),char(98),char(77),
+char(97),char(120),char(0),char(109),char(95),char(98),char(118),char(104),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(97),char(116),char(105),char(111),char(110),
+char(0),char(109),char(95),char(99),char(117),char(114),char(78),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(117),char(115),
+char(101),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(97),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(110),char(117),char(109),char(67),
+char(111),char(110),char(116),char(105),char(103),char(117),char(111),char(117),char(115),char(76),char(101),char(97),char(102),char(78),char(111),char(100),char(101),char(115),char(0),char(109),
+char(95),char(110),char(117),char(109),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(67),char(111),char(110),char(116),char(105),char(103),char(117),
+char(111),char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(0),char(42),char(109),char(95),char(99),char(111),char(110),char(116),char(105),char(103),char(117),char(111),
+char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),
+char(122),char(101),char(100),char(67),char(111),char(110),char(116),char(105),char(103),char(117),char(111),char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(80),char(116),
+char(114),char(0),char(42),char(109),char(95),char(115),char(117),char(98),char(84),char(114),char(101),char(101),char(73),char(110),char(102),char(111),char(80),char(116),char(114),char(0),
+char(109),char(95),char(116),char(114),char(97),char(118),char(101),char(114),char(115),char(97),char(108),char(77),char(111),char(100),char(101),char(0),char(109),char(95),char(110),char(117),
+char(109),char(83),char(117),char(98),char(116),char(114),char(101),char(101),char(72),char(101),char(97),char(100),char(101),char(114),char(115),char(0),char(42),char(109),char(95),char(110),
+char(97),char(109),char(101),char(0),char(109),char(95),char(115),char(104),char(97),char(112),char(101),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(112),char(97),
+char(100),char(100),char(105),char(110),char(103),char(91),char(52),char(93),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),
+char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(108),char(111),char(99),char(97),char(108),char(83),char(99),char(97),
+char(108),char(105),char(110),char(103),char(0),char(109),char(95),char(112),char(108),char(97),char(110),char(101),char(78),char(111),char(114),char(109),char(97),char(108),char(0),char(109),
+char(95),char(112),char(108),char(97),char(110),char(101),char(67),char(111),char(110),char(115),char(116),char(97),char(110),char(116),char(0),char(109),char(95),char(105),char(109),char(112),
+char(108),char(105),char(99),char(105),char(116),char(83),char(104),char(97),char(112),char(101),char(68),char(105),char(109),char(101),char(110),char(115),char(105),char(111),char(110),char(115),
+char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(77),char(97),char(114),char(103),char(105),char(110),char(0),char(109),
+char(95),char(112),char(97),char(100),char(100),char(105),char(110),char(103),char(0),char(109),char(95),char(112),char(111),char(115),char(0),char(109),char(95),char(114),char(97),char(100),
+char(105),char(117),char(115),char(0),char(109),char(95),char(99),char(111),char(110),char(118),char(101),char(120),char(73),char(110),char(116),char(101),char(114),char(110),char(97),char(108),
+char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(42),char(109),char(95),char(108),char(111),char(99),char(97),char(108),char(80),char(111),
+char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(109),char(95),char(108),char(111),char(99),
+char(97),char(108),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(114),char(114),char(97),char(121),char(83),char(105),char(122),char(101),char(0),
+char(109),char(95),char(118),char(97),char(108),char(117),char(101),char(0),char(109),char(95),char(112),char(97),char(100),char(91),char(50),char(93),char(0),char(109),char(95),char(118),
+char(97),char(108),char(117),char(101),char(115),char(91),char(51),char(93),char(0),char(109),char(95),char(112),char(97),char(100),char(0),char(42),char(109),char(95),char(118),char(101),
+char(114),char(116),char(105),char(99),char(101),char(115),char(51),char(102),char(0),char(42),char(109),char(95),char(118),char(101),char(114),char(116),char(105),char(99),char(101),char(115),
+char(51),char(100),char(0),char(42),char(109),char(95),char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(51),char(50),char(0),char(42),char(109),char(95),char(51),
+char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(49),char(54),char(0),char(42),char(109),char(95),char(51),char(105),char(110),char(100),char(105),char(99),char(101),
+char(115),char(56),char(0),char(42),char(109),char(95),char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(49),char(54),char(0),char(109),char(95),char(110),char(117),
+char(109),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(86),char(101),char(114),char(116),
+char(105),char(99),char(101),char(115),char(0),char(42),char(109),char(95),char(109),char(101),char(115),char(104),char(80),char(97),char(114),char(116),char(115),char(80),char(116),char(114),
+char(0),char(109),char(95),char(115),char(99),char(97),char(108),char(105),char(110),char(103),char(0),char(109),char(95),char(110),char(117),char(109),char(77),char(101),char(115),char(104),
+char(80),char(97),char(114),char(116),char(115),char(0),char(109),char(95),char(109),char(101),char(115),char(104),char(73),char(110),char(116),char(101),char(114),char(102),char(97),char(99),
+char(101),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(70),char(108),char(111),char(97),char(116),char(66),
+char(118),char(104),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(68),char(111),char(117),char(98),char(108),
+char(101),char(66),char(118),char(104),char(0),char(42),char(109),char(95),char(116),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(102),char(111),
+char(77),char(97),char(112),char(0),char(109),char(95),char(112),char(97),char(100),char(51),char(91),char(52),char(93),char(0),char(109),char(95),char(116),char(114),char(105),char(109),
+char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(116),char(114),char(97),char(110),char(115),
+char(102),char(111),char(114),char(109),char(0),char(42),char(109),char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),char(97),char(112),char(101),char(0),char(109),
+char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),char(97),char(112),char(101),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(99),char(104),
+char(105),char(108),char(100),char(77),char(97),char(114),char(103),char(105),char(110),char(0),char(42),char(109),char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),
+char(97),char(112),char(101),char(80),char(116),char(114),char(0),char(109),char(95),char(110),char(117),char(109),char(67),char(104),char(105),char(108),char(100),char(83),char(104),char(97),
+char(112),char(101),char(115),char(0),char(109),char(95),char(117),char(112),char(65),char(120),char(105),char(115),char(0),char(109),char(95),char(102),char(108),char(97),char(103),char(115),
+char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(86),char(48),char(86),char(49),char(65),char(110),char(103),char(108),char(101),char(0),char(109),char(95),char(101),
+char(100),char(103),char(101),char(86),char(49),char(86),char(50),char(65),char(110),char(103),char(108),char(101),char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(86),
+char(50),char(86),char(48),char(65),char(110),char(103),char(108),char(101),char(0),char(42),char(109),char(95),char(104),char(97),char(115),char(104),char(84),char(97),char(98),char(108),
+char(101),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(110),char(101),char(120),char(116),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(118),
+char(97),char(108),char(117),char(101),char(65),char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(107),char(101),char(121),char(65),
+char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(109),char(95),char(99),char(111),char(110),char(118),char(101),char(120),char(69),char(112),char(115),char(105),
+char(108),char(111),char(110),char(0),char(109),char(95),char(112),char(108),char(97),char(110),char(97),char(114),char(69),char(112),char(115),char(105),char(108),char(111),char(110),char(0),
+char(109),char(95),char(101),char(113),char(117),char(97),char(108),char(86),char(101),char(114),char(116),char(101),char(120),char(84),char(104),char(114),char(101),char(115),char(104),char(111),
+char(108),char(100),char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(68),char(105),char(115),char(116),char(97),char(110),char(99),char(101),char(84),char(104),char(114),
+char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(122),char(101),char(114),char(111),char(65),char(114),char(101),char(97),char(84),char(104),char(114),
+char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(110),char(101),char(120),char(116),char(83),char(105),char(122),char(101),char(0),char(109),char(95),
+char(104),char(97),char(115),char(104),char(84),char(97),char(98),char(108),char(101),char(83),char(105),char(122),char(101),char(0),char(109),char(95),char(110),char(117),char(109),char(86),
+char(97),char(108),char(117),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(75),char(101),char(121),char(115),char(0),char(109),char(95),char(103),char(105),
+char(109),char(112),char(97),char(99),char(116),char(83),char(117),char(98),char(84),char(121),char(112),char(101),char(0),char(42),char(109),char(95),char(117),char(110),char(115),char(99),
+char(97),char(108),char(101),char(100),char(80),char(111),char(105),char(110),char(116),char(115),char(70),char(108),char(111),char(97),char(116),char(80),char(116),char(114),char(0),char(42),
+char(109),char(95),char(117),char(110),char(115),char(99),char(97),char(108),char(101),char(100),char(80),char(111),char(105),char(110),char(116),char(115),char(68),char(111),char(117),char(98),
+char(108),char(101),char(80),char(116),char(114),char(0),char(109),char(95),char(110),char(117),char(109),char(85),char(110),char(115),char(99),char(97),char(108),char(101),char(100),char(80),
+char(111),char(105),char(110),char(116),char(115),char(0),char(109),char(95),char(112),char(97),char(100),char(100),char(105),char(110),char(103),char(51),char(91),char(52),char(93),char(0),
+char(42),char(109),char(95),char(98),char(114),char(111),char(97),char(100),char(112),char(104),char(97),char(115),char(101),char(72),char(97),char(110),char(100),char(108),char(101),char(0),
+char(42),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(0),char(42),char(109),
+char(95),char(114),char(111),char(111),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(0),
+char(109),char(95),char(119),char(111),char(114),char(108),char(100),char(84),char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(105),
+char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),char(111),char(110),char(87),char(111),char(114),char(108),char(100),char(84),char(114),char(97),
+char(110),char(115),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(105),char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),
+char(111),char(110),char(76),char(105),char(110),char(101),char(97),char(114),char(86),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(105),
+char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),char(111),char(110),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(86),
+char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(105),char(115),char(111),char(116),char(114),char(111),char(112),char(105),
+char(99),char(70),char(114),char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(111),char(110),char(116),char(97),char(99),char(116),char(80),
+char(114),char(111),char(99),char(101),char(115),char(115),char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(100),char(101),char(97),char(99),char(116),char(105),char(118),char(97),char(116),char(105),char(111),char(110),char(84),char(105),char(109),char(101),char(0),char(109),char(95),
+char(102),char(114),char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(114),char(111),char(108),char(108),char(105),char(110),char(103),char(70),char(114),
+char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(105),char(116),char(117),char(116),char(105),char(111),char(110),
+char(0),char(109),char(95),char(104),char(105),char(116),char(70),char(114),char(97),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(99),char(100),
+char(83),char(119),char(101),char(112),char(116),char(83),char(112),char(104),char(101),char(114),char(101),char(82),char(97),char(100),char(105),char(117),char(115),char(0),char(109),char(95),
+char(99),char(99),char(100),char(77),char(111),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(104),char(97),char(115),char(65),char(110),char(105),char(115),char(111),char(116),char(114),char(111),char(112),char(105),char(99),char(70),char(114),char(105),char(99),char(116),
+char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(70),char(108),char(97),char(103),char(115),
+char(0),char(109),char(95),char(105),char(115),char(108),char(97),char(110),char(100),char(84),char(97),char(103),char(49),char(0),char(109),char(95),char(99),char(111),char(109),char(112),
+char(97),char(110),char(105),char(111),char(110),char(73),char(100),char(0),char(109),char(95),char(97),char(99),char(116),char(105),char(118),char(97),char(116),char(105),char(111),char(110),
+char(83),char(116),char(97),char(116),char(101),char(49),char(0),char(109),char(95),char(105),char(110),char(116),char(101),char(114),char(110),char(97),char(108),char(84),char(121),char(112),
+char(101),char(0),char(109),char(95),char(99),char(104),char(101),char(99),char(107),char(67),char(111),char(108),char(108),char(105),char(100),char(101),char(87),char(105),char(116),char(104),
+char(0),char(109),char(95),char(115),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(0),char(109),char(95),char(103),char(114),char(97),char(118),
+char(105),char(116),char(121),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),
+char(116),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(105),char(110),char(118),char(73),char(110),char(101),char(114),char(116),char(105),char(97),char(84),char(101),
+char(110),char(115),char(111),char(114),char(87),char(111),char(114),char(108),char(100),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(86),char(101),
+char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(86),char(101),char(108),char(111),
+char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(70),char(97),char(99),char(116),char(111),char(114),
+char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(103),char(114),
+char(97),char(118),char(105),char(116),char(121),char(95),char(97),char(99),char(99),char(101),char(108),char(101),char(114),char(97),char(116),char(105),char(111),char(110),char(0),char(109),
+char(95),char(105),char(110),char(118),char(73),char(110),char(101),char(114),char(116),char(105),char(97),char(76),char(111),char(99),char(97),char(108),char(0),char(109),char(95),char(116),
+char(111),char(116),char(97),char(108),char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(116),char(111),char(116),char(97),char(108),char(84),char(111),char(114),
+char(113),char(117),char(101),char(0),char(109),char(95),char(105),char(110),char(118),char(101),char(114),char(115),char(101),char(77),char(97),char(115),char(115),char(0),char(109),char(95),
+char(108),char(105),char(110),char(101),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(110),char(103),char(117),
+char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(100),char(100),char(105),char(116),char(105),char(111),
+char(110),char(97),char(108),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(97),
+char(100),char(100),char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(76),char(105),char(110),char(101),char(97),char(114),char(68),char(97),char(109),char(112),char(105),
+char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(83),char(113),char(114),char(0),char(109),char(95),char(97),char(100),char(100),
+char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),
+char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(83),char(113),char(114),char(0),char(109),char(95),char(97),char(100),char(100),char(105),
+char(116),char(105),char(111),char(110),char(97),char(108),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),
+char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(108),char(101),char(101),char(112),
+char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),
+char(97),char(114),char(83),char(108),char(101),char(101),char(112),char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),
+char(109),char(95),char(97),char(100),char(100),char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),
+char(109),char(95),char(110),char(117),char(109),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(82),char(111),char(119),char(115),char(0),
+char(110),char(117),char(98),char(0),char(42),char(109),char(95),char(114),char(98),char(65),char(0),char(42),char(109),char(95),char(114),char(98),char(66),char(0),char(109),char(95),
+char(111),char(98),char(106),char(101),char(99),char(116),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(114),char(67),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(114),char(67),char(111),
+char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(73),char(100),char(0),char(109),char(95),char(110),char(101),char(101),char(100),char(115),char(70),char(101),
+char(101),char(100),char(98),char(97),char(99),char(107),char(0),char(109),char(95),char(97),char(112),char(112),char(108),char(105),char(101),char(100),char(73),char(109),char(112),char(117),
+char(108),char(115),char(101),char(0),char(109),char(95),char(100),char(98),char(103),char(68),char(114),char(97),char(119),char(83),char(105),char(122),char(101),char(0),char(109),char(95),
+char(100),char(105),char(115),char(97),char(98),char(108),char(101),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(115),char(66),char(101),char(116),
+char(119),char(101),char(101),char(110),char(76),char(105),char(110),char(107),char(101),char(100),char(66),char(111),char(100),char(105),char(101),char(115),char(0),char(109),char(95),char(111),
+char(118),char(101),char(114),char(114),char(105),char(100),char(101),char(78),char(117),char(109),char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(116),char(101),char(114),
+char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(98),char(114),char(101),char(97),char(107),char(105),char(110),char(103),char(73),char(109),char(112),
+char(117),char(108),char(115),char(101),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(105),char(115),char(69),char(110),
+char(97),char(98),char(108),char(101),char(100),char(0),char(109),char(95),char(116),char(121),char(112),char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(112),char(105),char(118),char(111),char(116),char(73),char(110),char(65),char(0),char(109),char(95),
+char(112),char(105),char(118),char(111),char(116),char(73),char(110),char(66),char(0),char(109),char(95),char(114),char(98),char(65),char(70),char(114),char(97),char(109),char(101),char(0),
+char(109),char(95),char(114),char(98),char(66),char(70),char(114),char(97),char(109),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(82),char(101),char(102),char(101),
+char(114),char(101),char(110),char(99),char(101),char(70),char(114),char(97),char(109),char(101),char(65),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),
+char(114),char(79),char(110),char(108),char(121),char(0),char(109),char(95),char(101),char(110),char(97),char(98),char(108),char(101),char(65),char(110),char(103),char(117),char(108),char(97),
+char(114),char(77),char(111),char(116),char(111),char(114),char(0),char(109),char(95),char(109),char(111),char(116),char(111),char(114),char(84),char(97),char(114),char(103),char(101),char(116),
+char(86),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(109),char(97),char(120),char(77),char(111),char(116),char(111),char(114),char(73),
+char(109),char(112),char(117),char(108),char(115),char(101),char(0),char(109),char(95),char(108),char(111),char(119),char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),
+char(109),char(95),char(117),char(112),char(112),char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(108),char(105),char(109),char(105),char(116),
+char(83),char(111),char(102),char(116),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(98),char(105),char(97),char(115),char(70),char(97),char(99),char(116),char(111),
+char(114),char(0),char(109),char(95),char(114),char(101),char(108),char(97),char(120),char(97),char(116),char(105),char(111),char(110),char(70),char(97),char(99),char(116),char(111),char(114),
+char(0),char(109),char(95),char(115),char(119),char(105),char(110),char(103),char(83),char(112),char(97),char(110),char(49),char(0),char(109),char(95),char(115),char(119),char(105),char(110),
+char(103),char(83),char(112),char(97),char(110),char(50),char(0),char(109),char(95),char(116),char(119),char(105),char(115),char(116),char(83),char(112),char(97),char(110),char(0),char(109),
+char(95),char(100),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(85),char(112),char(112),
+char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(76),char(111),char(119),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(85),char(112),char(112),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(76),char(111),char(119),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(117),char(115),char(101),char(76),char(105),char(110),char(101),char(97),char(114),char(82),char(101),
+char(102),char(101),char(114),char(101),char(110),char(99),char(101),char(70),char(114),char(97),char(109),char(101),char(65),char(0),char(109),char(95),char(117),char(115),char(101),char(79),
+char(102),char(102),char(115),char(101),char(116),char(70),char(111),char(114),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(70),char(114),
+char(97),char(109),char(101),char(0),char(109),char(95),char(54),char(100),char(111),char(102),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(115),char(112),char(114),
+char(105),char(110),char(103),char(69),char(110),char(97),char(98),char(108),char(101),char(100),char(91),char(54),char(93),char(0),char(109),char(95),char(101),char(113),char(117),char(105),
+char(108),char(105),char(98),char(114),char(105),char(117),char(109),char(80),char(111),char(105),char(110),char(116),char(91),char(54),char(93),char(0),char(109),char(95),char(115),char(112),
+char(114),char(105),char(110),char(103),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(91),char(54),char(93),char(0),char(109),char(95),char(115),
+char(112),char(114),char(105),char(110),char(103),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(91),char(54),char(93),char(0),char(109),char(95),char(116),char(97),
+char(117),char(0),char(109),char(95),char(116),char(105),char(109),char(101),char(83),char(116),char(101),char(112),char(0),char(109),char(95),char(109),char(97),char(120),char(69),char(114),
+char(114),char(111),char(114),char(82),char(101),char(100),char(117),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(115),char(111),char(114),char(0),char(109),
+char(95),char(101),char(114),char(112),char(0),char(109),char(95),char(101),char(114),char(112),char(50),char(0),char(109),char(95),char(103),char(108),char(111),char(98),char(97),char(108),
+char(67),char(102),char(109),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(80),char(101),
+char(110),char(101),char(116),char(114),char(97),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(84),char(117),char(114),char(110),char(69),char(114),char(112),
+char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(108),char(111),char(112),char(0),char(109),char(95),char(119),char(97),char(114),char(109),
+char(115),char(116),char(97),char(114),char(116),char(105),char(110),char(103),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(109),char(97),char(120),
+char(71),char(121),char(114),char(111),char(115),char(99),char(111),char(112),char(105),char(99),char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(115),char(105),
+char(110),char(103),char(108),char(101),char(65),char(120),char(105),char(115),char(82),char(111),char(108),char(108),char(105),char(110),char(103),char(70),char(114),char(105),char(99),char(116),
+char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(110),char(117),char(109),char(73),char(116),
+char(101),char(114),char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(115),char(111),char(108),char(118),char(101),char(114),char(77),char(111),char(100),
+char(101),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(105),char(110),char(103),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(82),char(101),
+char(115),char(116),char(105),char(116),char(117),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(109),char(105),char(110),char(105),char(109),char(117),char(109),char(83),char(111),char(108),char(118),char(101),char(114),char(66),char(97),char(116),char(99),char(104),char(83),
+char(105),char(122),char(101),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(0),char(109),
+char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(97),
+char(110),char(103),char(117),char(108),char(97),char(114),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(118),char(111),
+char(108),char(117),char(109),char(101),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(42),char(109),char(95),char(109),char(97),char(116),
+char(101),char(114),char(105),char(97),char(108),char(0),char(109),char(95),char(112),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(112),
+char(114),char(101),char(118),char(105),char(111),char(117),char(115),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(118),char(101),
+char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(99),char(99),char(117),char(109),char(117),char(108),char(97),char(116),char(101),char(100),
+char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(110),char(111),char(114),char(109),char(97),char(108),char(0),char(109),char(95),char(97),char(114),char(101),
+char(97),char(0),char(109),char(95),char(97),char(116),char(116),char(97),char(99),char(104),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),
+char(105),char(99),char(101),char(115),char(91),char(50),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(76),char(101),char(110),char(103),char(116),char(104),
+char(0),char(109),char(95),char(98),char(98),char(101),char(110),char(100),char(105),char(110),char(103),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),
+char(100),char(105),char(99),char(101),char(115),char(91),char(51),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(65),char(114),char(101),char(97),char(0),
+char(109),char(95),char(99),char(48),char(91),char(52),char(93),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(105),char(99),char(101),
+char(115),char(91),char(52),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(86),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),
+char(99),char(49),char(0),char(109),char(95),char(99),char(50),char(0),char(109),char(95),char(99),char(48),char(0),char(109),char(95),char(108),char(111),char(99),char(97),char(108),
+char(70),char(114),char(97),char(109),char(101),char(0),char(42),char(109),char(95),char(114),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(0),char(109),
+char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(97),char(101),char(114),char(111),char(77),char(111),char(100),
+char(101),char(108),char(0),char(109),char(95),char(98),char(97),char(117),char(109),char(103),char(97),char(114),char(116),char(101),char(0),char(109),char(95),char(100),char(114),char(97),
+char(103),char(0),char(109),char(95),char(108),char(105),char(102),char(116),char(0),char(109),char(95),char(112),char(114),char(101),char(115),char(115),char(117),char(114),char(101),char(0),
+char(109),char(95),char(118),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(100),char(121),char(110),char(97),char(109),char(105),char(99),char(70),char(114),
+char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(112),char(111),char(115),char(101),char(77),char(97),char(116),char(99),char(104),char(0),char(109),
+char(95),char(114),char(105),char(103),char(105),char(100),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(72),char(97),char(114),char(100),char(110),char(101),char(115),
+char(115),char(0),char(109),char(95),char(107),char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(72),char(97),
+char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(67),char(111),char(110),char(116),char(97),char(99),char(116),
+char(72),char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(97),char(110),char(99),char(104),char(111),char(114),char(72),char(97),char(114),
+char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(67),char(108),char(117),
+char(115),char(116),char(101),char(114),char(72),char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(75),
+char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(72),char(97),char(114),char(100),char(110),char(101),char(115),
+char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(83),char(111),char(102),char(116),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(72),
+char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(67),
+char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),char(116),char(0),char(109),
+char(95),char(115),char(111),char(102),char(116),char(75),char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(73),
+char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),char(116),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(83),char(111),
+char(102),char(116),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),
+char(116),char(0),char(109),char(95),char(109),char(97),char(120),char(86),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(116),char(105),char(109),char(101),
+char(83),char(99),char(97),char(108),char(101),char(0),char(109),char(95),char(118),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(73),char(116),char(101),char(114),
+char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(112),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(73),char(116),char(101),
+char(114),char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(100),char(114),char(105),char(102),char(116),char(73),char(116),char(101),char(114),char(97),
+char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(99),char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(116),char(101),char(114),char(97),
+char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(114),char(111),char(116),char(0),char(109),char(95),char(115),char(99),char(97),char(108),char(101),char(0),
+char(109),char(95),char(97),char(113),char(113),char(0),char(109),char(95),char(99),char(111),char(109),char(0),char(42),char(109),char(95),char(112),char(111),char(115),char(105),char(116),
+char(105),char(111),char(110),char(115),char(0),char(42),char(109),char(95),char(119),char(101),char(105),char(103),char(104),char(116),char(115),char(0),char(109),char(95),char(110),char(117),
+char(109),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(87),char(101),char(105),char(103),
+char(116),char(115),char(0),char(109),char(95),char(98),char(118),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(98),char(102),char(114),char(97),char(109),
+char(101),char(0),char(109),char(95),char(102),char(114),char(97),char(109),char(101),char(120),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(108),char(111),char(99),
+char(105),char(105),char(0),char(109),char(95),char(105),char(110),char(118),char(119),char(105),char(0),char(109),char(95),char(118),char(105),char(109),char(112),char(117),char(108),char(115),
+char(101),char(115),char(91),char(50),char(93),char(0),char(109),char(95),char(100),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(91),char(50),char(93),
+char(0),char(109),char(95),char(108),char(118),char(0),char(109),char(95),char(97),char(118),char(0),char(42),char(109),char(95),char(102),char(114),char(97),char(109),char(101),char(114),
+char(101),char(102),char(115),char(0),char(42),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(105),char(99),char(101),char(115),char(0),char(42),
+char(109),char(95),char(109),char(97),char(115),char(115),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(70),char(114),char(97),char(109),char(101),char(82),
+char(101),char(102),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(78),char(111),char(100),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),
+char(77),char(97),char(115),char(115),char(101),char(115),char(0),char(109),char(95),char(105),char(100),char(109),char(97),char(115),char(115),char(0),char(109),char(95),char(105),char(109),
+char(97),char(115),char(115),char(0),char(109),char(95),char(110),char(118),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(0),char(109),char(95),char(110),
+char(100),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(0),char(109),char(95),char(110),char(100),char(97),char(109),char(112),char(105),char(110),char(103),
+char(0),char(109),char(95),char(108),char(100),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(100),char(97),char(109),char(112),char(105),
+char(110),char(103),char(0),char(109),char(95),char(109),char(97),char(116),char(99),char(104),char(105),char(110),char(103),char(0),char(109),char(95),char(109),char(97),char(120),char(83),
+char(101),char(108),char(102),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(0),
+char(109),char(95),char(115),char(101),char(108),char(102),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(73),char(109),char(112),char(117),char(108),
+char(115),char(101),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(99),char(111),char(110),char(116),char(97),char(105),char(110),char(115),char(65),
+char(110),char(99),char(104),char(111),char(114),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(100),char(101),char(0),char(109),char(95),char(99),char(108),
+char(117),char(115),char(116),char(101),char(114),char(73),char(110),char(100),char(101),char(120),char(0),char(42),char(109),char(95),char(98),char(111),char(100),char(121),char(65),char(0),
+char(42),char(109),char(95),char(98),char(111),char(100),char(121),char(66),char(0),char(109),char(95),char(114),char(101),char(102),char(115),char(91),char(50),char(93),char(0),char(109),
+char(95),char(99),char(102),char(109),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(0),char(109),char(95),char(100),char(101),char(108),char(101),char(116),
+char(101),char(0),char(109),char(95),char(114),char(101),char(108),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(91),char(50),char(93),char(0),char(109),
+char(95),char(98),char(111),char(100),char(121),char(65),char(116),char(121),char(112),char(101),char(0),char(109),char(95),char(98),char(111),char(100),char(121),char(66),char(116),char(121),
+char(112),char(101),char(0),char(109),char(95),char(106),char(111),char(105),char(110),char(116),char(84),char(121),char(112),char(101),char(0),char(42),char(109),char(95),char(112),char(111),
+char(115),char(101),char(0),char(42),char(42),char(109),char(95),char(109),char(97),char(116),char(101),char(114),char(105),char(97),char(108),char(115),char(0),char(42),char(109),char(95),
+char(110),char(111),char(100),char(101),char(115),char(0),char(42),char(109),char(95),char(108),char(105),char(110),char(107),char(115),char(0),char(42),char(109),char(95),char(102),char(97),
+char(99),char(101),char(115),char(0),char(42),char(109),char(95),char(116),char(101),char(116),char(114),char(97),char(104),char(101),char(100),char(114),char(97),char(0),char(42),char(109),
+char(95),char(97),char(110),char(99),char(104),char(111),char(114),char(115),char(0),char(42),char(109),char(95),char(99),char(108),char(117),char(115),char(116),char(101),char(114),char(115),
+char(0),char(42),char(109),char(95),char(106),char(111),char(105),char(110),char(116),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(77),char(97),char(116),char(101),
+char(114),char(105),char(97),char(108),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(76),char(105),char(110),char(107),char(115),char(0),char(109),char(95),char(110),
+char(117),char(109),char(70),char(97),char(99),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(84),char(101),char(116),char(114),char(97),char(104),char(101),
+char(100),char(114),char(97),char(0),char(109),char(95),char(110),char(117),char(109),char(65),char(110),char(99),char(104),char(111),char(114),char(115),char(0),char(109),char(95),char(110),
+char(117),char(109),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(74),char(111),char(105),char(110),
+char(116),char(115),char(0),char(109),char(95),char(99),char(111),char(110),char(102),char(105),char(103),char(0),char(84),char(89),char(80),char(69),char(76),char(0),char(0),char(0),
+char(99),char(104),char(97),char(114),char(0),char(117),char(99),char(104),char(97),char(114),char(0),char(115),char(104),char(111),char(114),char(116),char(0),char(117),char(115),char(104),
+char(111),char(114),char(116),char(0),char(105),char(110),char(116),char(0),char(108),char(111),char(110),char(103),char(0),char(117),char(108),char(111),char(110),char(103),char(0),char(102),
+char(108),char(111),char(97),char(116),char(0),char(100),char(111),char(117),char(98),char(108),char(101),char(0),char(118),char(111),char(105),char(100),char(0),char(80),char(111),char(105),
+char(110),char(116),char(101),char(114),char(65),char(114),char(114),char(97),char(121),char(0),char(98),char(116),char(80),char(104),char(121),char(115),char(105),char(99),char(115),char(83),
+char(121),char(115),char(116),char(101),char(109),char(0),char(76),char(105),char(115),char(116),char(66),char(97),char(115),char(101),char(0),char(98),char(116),char(86),char(101),char(99),
+char(116),char(111),char(114),char(51),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(86),char(101),char(99),char(116),
+char(111),char(114),char(51),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(97),char(116),char(114),
+char(105),char(120),char(51),char(120),char(51),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(97),char(116),
+char(114),char(105),char(120),char(51),char(120),char(51),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),
+char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(84),char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(66),char(118),char(104),char(83),char(117),char(98),char(116),char(114),char(101),char(101),char(73),char(110),char(102),char(111),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(79),char(112),char(116),char(105),char(109),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(70),
+char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(79),char(112),char(116),char(105),char(109),char(105),char(122),char(101),char(100),
+char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(70),char(108),char(111),char(97),char(116),
+char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(68),
+char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),
+char(110),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),char(116),char(97),char(116),char(105),char(99),char(80),
+char(108),char(97),char(110),char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(118),
+char(101),char(120),char(73),char(110),char(116),char(101),char(114),char(110),char(97),char(108),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(110),char(100),char(82),char(97),char(100),char(105),char(117),char(115),char(0),
+char(98),char(116),char(77),char(117),char(108),char(116),char(105),char(83),char(112),char(104),char(101),char(114),char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(104),char(111),char(114),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(104),char(111),char(114),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(84),char(114),char(105),char(112),char(108),char(101),char(116),
+char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(104),char(97),char(114),char(73),char(110),char(100),char(101),char(120),char(84),char(114),char(105),char(112),
+char(108),char(101),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(101),char(115),char(104),char(80),char(97),char(114),char(116),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(83),char(116),char(114),char(105),char(100),char(105),char(110),char(103),char(77),char(101),char(115),char(104),char(73),char(110),char(116),
+char(101),char(114),char(102),char(97),char(99),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),char(97),char(110),char(103),char(108),
+char(101),char(77),char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),
+char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(102),char(111),char(77),char(97),char(112),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),
+char(99),char(97),char(108),char(101),char(100),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(77),char(101),char(115),char(104),char(83),char(104),char(97),
+char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(109),char(112),char(111),char(117),char(110),char(100),char(83),char(104),char(97),
+char(112),char(101),char(67),char(104),char(105),char(108),char(100),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(109),char(112),char(111),char(117),
+char(110),char(100),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(121),char(108),char(105),char(110),char(100),
+char(101),char(114),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(97),char(112),char(115),char(117),char(108),
+char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),char(97),char(110),char(103),char(108),
+char(101),char(73),char(110),char(102),char(111),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(73),char(109),char(112),char(97),char(99),char(116),char(77),
+char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(118),char(101),
+char(120),char(72),char(117),char(108),char(108),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),
+char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),
+char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(68),char(121),char(110),char(97),char(109),char(105),char(99),char(115),
+char(87),char(111),char(114),char(108),char(100),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),
+char(110),char(116),char(97),char(99),char(116),char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(68),char(111),char(117),char(98),char(108),
+char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(68),char(121),char(110),char(97),char(109),char(105),char(99),char(115),char(87),char(111),char(114),char(108),
+char(100),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(116),char(97),char(99),char(116),
+char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(73),char(110),char(102),char(111),char(49),
+char(0),char(98),char(116),char(84),char(121),char(112),char(101),char(100),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(68),char(97),char(116),char(97),char(0),char(98),
+char(116),char(80),char(111),char(105),char(110),char(116),char(50),char(80),char(111),char(105),char(110),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(80),char(111),char(105),char(110),char(116),char(50),
+char(80),char(111),char(105),char(110),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(111),char(117),char(98),char(108),
+char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(72),char(105),char(110),char(103),char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),
+char(105),char(110),char(116),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(72),char(105),char(110),char(103),
+char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(67),char(111),char(110),char(101),char(84),char(119),char(105),char(115),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(101),char(110),char(101),char(114),char(105),char(99),char(54),char(68),char(111),char(102),
+char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(101),char(110),
+char(101),char(114),char(105),char(99),char(54),char(68),char(111),char(102),char(83),char(112),char(114),char(105),char(110),char(103),char(67),char(111),char(110),char(115),char(116),char(114),
+char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),char(108),char(105),char(100),char(101),char(114),char(67),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),
+char(77),char(97),char(116),char(101),char(114),char(105),char(97),char(108),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),
+char(121),char(78),char(111),char(100),char(101),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(76),char(105),
+char(110),char(107),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(70),char(97),char(99),char(101),char(68),
+char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(84),char(101),char(116),char(114),char(97),char(68),char(97),char(116),
+char(97),char(0),char(83),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(65),char(110),char(99),char(104),char(111),char(114),char(68),char(97),char(116),
+char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(67),char(111),char(110),char(102),char(105),char(103),char(68),char(97),char(116),char(97),
+char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(80),char(111),char(115),char(101),char(68),char(97),char(116),char(97),char(0),char(83),char(111),
+char(102),char(116),char(66),char(111),char(100),char(121),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(74),char(111),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(0),char(0),
+char(84),char(76),char(69),char(78),char(1),char(0),char(1),char(0),char(2),char(0),char(2),char(0),char(4),char(0),char(4),char(0),char(4),char(0),char(4),char(0),
+char(8),char(0),char(0),char(0),char(12),char(0),char(36),char(0),char(8),char(0),char(16),char(0),char(32),char(0),char(48),char(0),char(96),char(0),char(64),char(0),
+char(-128),char(0),char(20),char(0),char(48),char(0),char(80),char(0),char(16),char(0),char(84),char(0),char(-124),char(0),char(12),char(0),char(52),char(0),char(52),char(0),
+char(20),char(0),char(64),char(0),char(4),char(0),char(4),char(0),char(8),char(0),char(4),char(0),char(32),char(0),char(28),char(0),char(60),char(0),char(56),char(0),
+char(76),char(0),char(76),char(0),char(24),char(0),char(60),char(0),char(60),char(0),char(16),char(0),char(64),char(0),char(68),char(0),char(-48),char(1),char(0),char(1),
+char(-72),char(0),char(-104),char(0),char(104),char(0),char(88),char(0),char(-24),char(1),char(-96),char(3),char(8),char(0),char(52),char(0),char(0),char(0),char(84),char(0),
+char(116),char(0),char(92),char(1),char(-36),char(0),char(-44),char(0),char(-4),char(0),char(92),char(1),char(-52),char(0),char(16),char(0),char(100),char(0),char(20),char(0),
+char(36),char(0),char(100),char(0),char(92),char(0),char(104),char(0),char(-64),char(0),char(92),char(1),char(104),char(0),char(-84),char(1),char(83),char(84),char(82),char(67),
+char(65),char(0),char(0),char(0),char(10),char(0),char(3),char(0),char(4),char(0),char(0),char(0),char(4),char(0),char(1),char(0),char(9),char(0),char(2),char(0),
+char(11),char(0),char(3),char(0),char(10),char(0),char(3),char(0),char(10),char(0),char(4),char(0),char(10),char(0),char(5),char(0),char(12),char(0),char(2),char(0),
+char(9),char(0),char(6),char(0),char(9),char(0),char(7),char(0),char(13),char(0),char(1),char(0),char(7),char(0),char(8),char(0),char(14),char(0),char(1),char(0),
+char(8),char(0),char(8),char(0),char(15),char(0),char(1),char(0),char(13),char(0),char(9),char(0),char(16),char(0),char(1),char(0),char(14),char(0),char(9),char(0),
+char(17),char(0),char(2),char(0),char(15),char(0),char(10),char(0),char(13),char(0),char(11),char(0),char(18),char(0),char(2),char(0),char(16),char(0),char(10),char(0),
+char(14),char(0),char(11),char(0),char(19),char(0),char(4),char(0),char(4),char(0),char(12),char(0),char(4),char(0),char(13),char(0),char(2),char(0),char(14),char(0),
+char(2),char(0),char(15),char(0),char(20),char(0),char(6),char(0),char(13),char(0),char(16),char(0),char(13),char(0),char(17),char(0),char(4),char(0),char(18),char(0),
+char(4),char(0),char(19),char(0),char(4),char(0),char(20),char(0),char(0),char(0),char(21),char(0),char(21),char(0),char(6),char(0),char(14),char(0),char(16),char(0),
+char(14),char(0),char(17),char(0),char(4),char(0),char(18),char(0),char(4),char(0),char(19),char(0),char(4),char(0),char(20),char(0),char(0),char(0),char(21),char(0),
+char(22),char(0),char(3),char(0),char(2),char(0),char(14),char(0),char(2),char(0),char(15),char(0),char(4),char(0),char(22),char(0),char(23),char(0),char(12),char(0),
+char(13),char(0),char(23),char(0),char(13),char(0),char(24),char(0),char(13),char(0),char(25),char(0),char(4),char(0),char(26),char(0),char(4),char(0),char(27),char(0),
+char(4),char(0),char(28),char(0),char(4),char(0),char(29),char(0),char(20),char(0),char(30),char(0),char(22),char(0),char(31),char(0),char(19),char(0),char(32),char(0),
+char(4),char(0),char(33),char(0),char(4),char(0),char(34),char(0),char(24),char(0),char(12),char(0),char(14),char(0),char(23),char(0),char(14),char(0),char(24),char(0),
+char(14),char(0),char(25),char(0),char(4),char(0),char(26),char(0),char(4),char(0),char(27),char(0),char(4),char(0),char(28),char(0),char(4),char(0),char(29),char(0),
+char(21),char(0),char(30),char(0),char(22),char(0),char(31),char(0),char(4),char(0),char(33),char(0),char(4),char(0),char(34),char(0),char(19),char(0),char(32),char(0),
+char(25),char(0),char(3),char(0),char(0),char(0),char(35),char(0),char(4),char(0),char(36),char(0),char(0),char(0),char(37),char(0),char(26),char(0),char(5),char(0),
+char(25),char(0),char(38),char(0),char(13),char(0),char(39),char(0),char(13),char(0),char(40),char(0),char(7),char(0),char(41),char(0),char(0),char(0),char(21),char(0),
+char(27),char(0),char(5),char(0),char(25),char(0),char(38),char(0),char(13),char(0),char(39),char(0),char(13),char(0),char(42),char(0),char(7),char(0),char(43),char(0),
+char(4),char(0),char(44),char(0),char(28),char(0),char(2),char(0),char(13),char(0),char(45),char(0),char(7),char(0),char(46),char(0),char(29),char(0),char(4),char(0),
+char(27),char(0),char(47),char(0),char(28),char(0),char(48),char(0),char(4),char(0),char(49),char(0),char(0),char(0),char(37),char(0),char(30),char(0),char(1),char(0),
+char(4),char(0),char(50),char(0),char(31),char(0),char(2),char(0),char(2),char(0),char(50),char(0),char(0),char(0),char(51),char(0),char(32),char(0),char(2),char(0),
+char(2),char(0),char(52),char(0),char(0),char(0),char(51),char(0),char(33),char(0),char(2),char(0),char(0),char(0),char(52),char(0),char(0),char(0),char(53),char(0),
+char(34),char(0),char(8),char(0),char(13),char(0),char(54),char(0),char(14),char(0),char(55),char(0),char(30),char(0),char(56),char(0),char(32),char(0),char(57),char(0),
+char(33),char(0),char(58),char(0),char(31),char(0),char(59),char(0),char(4),char(0),char(60),char(0),char(4),char(0),char(61),char(0),char(35),char(0),char(4),char(0),
+char(34),char(0),char(62),char(0),char(13),char(0),char(63),char(0),char(4),char(0),char(64),char(0),char(0),char(0),char(37),char(0),char(36),char(0),char(7),char(0),
+char(25),char(0),char(38),char(0),char(35),char(0),char(65),char(0),char(23),char(0),char(66),char(0),char(24),char(0),char(67),char(0),char(37),char(0),char(68),char(0),
+char(7),char(0),char(43),char(0),char(0),char(0),char(69),char(0),char(38),char(0),char(2),char(0),char(36),char(0),char(70),char(0),char(13),char(0),char(39),char(0),
+char(39),char(0),char(4),char(0),char(17),char(0),char(71),char(0),char(25),char(0),char(72),char(0),char(4),char(0),char(73),char(0),char(7),char(0),char(74),char(0),
+char(40),char(0),char(4),char(0),char(25),char(0),char(38),char(0),char(39),char(0),char(75),char(0),char(4),char(0),char(76),char(0),char(7),char(0),char(43),char(0),
+char(41),char(0),char(3),char(0),char(27),char(0),char(47),char(0),char(4),char(0),char(77),char(0),char(0),char(0),char(37),char(0),char(42),char(0),char(3),char(0),
+char(27),char(0),char(47),char(0),char(4),char(0),char(77),char(0),char(0),char(0),char(37),char(0),char(43),char(0),char(4),char(0),char(4),char(0),char(78),char(0),
+char(7),char(0),char(79),char(0),char(7),char(0),char(80),char(0),char(7),char(0),char(81),char(0),char(37),char(0),char(14),char(0),char(4),char(0),char(82),char(0),
+char(4),char(0),char(83),char(0),char(43),char(0),char(84),char(0),char(4),char(0),char(85),char(0),char(7),char(0),char(86),char(0),char(7),char(0),char(87),char(0),
+char(7),char(0),char(88),char(0),char(7),char(0),char(89),char(0),char(7),char(0),char(90),char(0),char(4),char(0),char(91),char(0),char(4),char(0),char(92),char(0),
+char(4),char(0),char(93),char(0),char(4),char(0),char(94),char(0),char(0),char(0),char(37),char(0),char(44),char(0),char(5),char(0),char(25),char(0),char(38),char(0),
+char(35),char(0),char(65),char(0),char(13),char(0),char(39),char(0),char(7),char(0),char(43),char(0),char(4),char(0),char(95),char(0),char(45),char(0),char(5),char(0),
+char(27),char(0),char(47),char(0),char(13),char(0),char(96),char(0),char(14),char(0),char(97),char(0),char(4),char(0),char(98),char(0),char(0),char(0),char(99),char(0),
+char(46),char(0),char(25),char(0),char(9),char(0),char(100),char(0),char(9),char(0),char(101),char(0),char(25),char(0),char(102),char(0),char(0),char(0),char(35),char(0),
+char(18),char(0),char(103),char(0),char(18),char(0),char(104),char(0),char(14),char(0),char(105),char(0),char(14),char(0),char(106),char(0),char(14),char(0),char(107),char(0),
+char(8),char(0),char(108),char(0),char(8),char(0),char(109),char(0),char(8),char(0),char(110),char(0),char(8),char(0),char(111),char(0),char(8),char(0),char(112),char(0),
+char(8),char(0),char(113),char(0),char(8),char(0),char(114),char(0),char(8),char(0),char(115),char(0),char(4),char(0),char(116),char(0),char(4),char(0),char(117),char(0),
+char(4),char(0),char(118),char(0),char(4),char(0),char(119),char(0),char(4),char(0),char(120),char(0),char(4),char(0),char(121),char(0),char(4),char(0),char(122),char(0),
+char(0),char(0),char(37),char(0),char(47),char(0),char(25),char(0),char(9),char(0),char(100),char(0),char(9),char(0),char(101),char(0),char(25),char(0),char(102),char(0),
+char(0),char(0),char(35),char(0),char(17),char(0),char(103),char(0),char(17),char(0),char(104),char(0),char(13),char(0),char(105),char(0),char(13),char(0),char(106),char(0),
+char(13),char(0),char(107),char(0),char(7),char(0),char(108),char(0),char(7),char(0),char(109),char(0),char(7),char(0),char(110),char(0),char(7),char(0),char(111),char(0),
+char(7),char(0),char(112),char(0),char(7),char(0),char(113),char(0),char(7),char(0),char(114),char(0),char(7),char(0),char(115),char(0),char(4),char(0),char(116),char(0),
+char(4),char(0),char(117),char(0),char(4),char(0),char(118),char(0),char(4),char(0),char(119),char(0),char(4),char(0),char(120),char(0),char(4),char(0),char(121),char(0),
+char(4),char(0),char(122),char(0),char(0),char(0),char(37),char(0),char(48),char(0),char(2),char(0),char(49),char(0),char(123),char(0),char(14),char(0),char(124),char(0),
+char(50),char(0),char(2),char(0),char(51),char(0),char(123),char(0),char(13),char(0),char(124),char(0),char(52),char(0),char(21),char(0),char(47),char(0),char(125),char(0),
+char(15),char(0),char(126),char(0),char(13),char(0),char(127),char(0),char(13),char(0),char(-128),char(0),char(13),char(0),char(-127),char(0),char(13),char(0),char(-126),char(0),
+char(13),char(0),char(124),char(0),char(13),char(0),char(-125),char(0),char(13),char(0),char(-124),char(0),char(13),char(0),char(-123),char(0),char(13),char(0),char(-122),char(0),
+char(7),char(0),char(-121),char(0),char(7),char(0),char(-120),char(0),char(7),char(0),char(-119),char(0),char(7),char(0),char(-118),char(0),char(7),char(0),char(-117),char(0),
+char(7),char(0),char(-116),char(0),char(7),char(0),char(-115),char(0),char(7),char(0),char(-114),char(0),char(7),char(0),char(-113),char(0),char(4),char(0),char(-112),char(0),
+char(53),char(0),char(22),char(0),char(46),char(0),char(125),char(0),char(16),char(0),char(126),char(0),char(14),char(0),char(127),char(0),char(14),char(0),char(-128),char(0),
+char(14),char(0),char(-127),char(0),char(14),char(0),char(-126),char(0),char(14),char(0),char(124),char(0),char(14),char(0),char(-125),char(0),char(14),char(0),char(-124),char(0),
+char(14),char(0),char(-123),char(0),char(14),char(0),char(-122),char(0),char(8),char(0),char(-121),char(0),char(8),char(0),char(-120),char(0),char(8),char(0),char(-119),char(0),
+char(8),char(0),char(-118),char(0),char(8),char(0),char(-117),char(0),char(8),char(0),char(-116),char(0),char(8),char(0),char(-115),char(0),char(8),char(0),char(-114),char(0),
+char(8),char(0),char(-113),char(0),char(4),char(0),char(-112),char(0),char(0),char(0),char(37),char(0),char(54),char(0),char(2),char(0),char(4),char(0),char(-111),char(0),
+char(4),char(0),char(-110),char(0),char(55),char(0),char(13),char(0),char(56),char(0),char(-109),char(0),char(56),char(0),char(-108),char(0),char(0),char(0),char(35),char(0),
+char(4),char(0),char(-107),char(0),char(4),char(0),char(-106),char(0),char(4),char(0),char(-105),char(0),char(4),char(0),char(-104),char(0),char(7),char(0),char(-103),char(0),
+char(7),char(0),char(-102),char(0),char(4),char(0),char(-101),char(0),char(4),char(0),char(-100),char(0),char(7),char(0),char(-99),char(0),char(4),char(0),char(-98),char(0),
+char(57),char(0),char(3),char(0),char(55),char(0),char(-97),char(0),char(13),char(0),char(-96),char(0),char(13),char(0),char(-95),char(0),char(58),char(0),char(3),char(0),
+char(55),char(0),char(-97),char(0),char(14),char(0),char(-96),char(0),char(14),char(0),char(-95),char(0),char(59),char(0),char(13),char(0),char(55),char(0),char(-97),char(0),
+char(18),char(0),char(-94),char(0),char(18),char(0),char(-93),char(0),char(4),char(0),char(-92),char(0),char(4),char(0),char(-91),char(0),char(4),char(0),char(-90),char(0),
+char(7),char(0),char(-89),char(0),char(7),char(0),char(-88),char(0),char(7),char(0),char(-87),char(0),char(7),char(0),char(-86),char(0),char(7),char(0),char(-85),char(0),
+char(7),char(0),char(-84),char(0),char(7),char(0),char(-83),char(0),char(60),char(0),char(13),char(0),char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),
+char(17),char(0),char(-93),char(0),char(4),char(0),char(-92),char(0),char(4),char(0),char(-91),char(0),char(4),char(0),char(-90),char(0),char(7),char(0),char(-89),char(0),
+char(7),char(0),char(-88),char(0),char(7),char(0),char(-87),char(0),char(7),char(0),char(-86),char(0),char(7),char(0),char(-85),char(0),char(7),char(0),char(-84),char(0),
+char(7),char(0),char(-83),char(0),char(61),char(0),char(11),char(0),char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),
+char(7),char(0),char(-82),char(0),char(7),char(0),char(-81),char(0),char(7),char(0),char(-80),char(0),char(7),char(0),char(-85),char(0),char(7),char(0),char(-84),char(0),
+char(7),char(0),char(-83),char(0),char(7),char(0),char(-79),char(0),char(0),char(0),char(21),char(0),char(62),char(0),char(9),char(0),char(55),char(0),char(-97),char(0),
+char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),char(13),char(0),char(-78),char(0),char(13),char(0),char(-77),char(0),char(13),char(0),char(-76),char(0),
+char(13),char(0),char(-75),char(0),char(4),char(0),char(-74),char(0),char(4),char(0),char(-73),char(0),char(63),char(0),char(5),char(0),char(62),char(0),char(-72),char(0),
+char(4),char(0),char(-71),char(0),char(7),char(0),char(-70),char(0),char(7),char(0),char(-69),char(0),char(7),char(0),char(-68),char(0),char(64),char(0),char(9),char(0),
+char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),char(7),char(0),char(-78),char(0),char(7),char(0),char(-77),char(0),
+char(7),char(0),char(-76),char(0),char(7),char(0),char(-75),char(0),char(4),char(0),char(-74),char(0),char(4),char(0),char(-73),char(0),char(49),char(0),char(22),char(0),
+char(8),char(0),char(-67),char(0),char(8),char(0),char(-79),char(0),char(8),char(0),char(110),char(0),char(8),char(0),char(-66),char(0),char(8),char(0),char(112),char(0),
+char(8),char(0),char(-65),char(0),char(8),char(0),char(-64),char(0),char(8),char(0),char(-63),char(0),char(8),char(0),char(-62),char(0),char(8),char(0),char(-61),char(0),
+char(8),char(0),char(-60),char(0),char(8),char(0),char(-59),char(0),char(8),char(0),char(-58),char(0),char(8),char(0),char(-57),char(0),char(8),char(0),char(-56),char(0),
+char(8),char(0),char(-55),char(0),char(4),char(0),char(-54),char(0),char(4),char(0),char(-53),char(0),char(4),char(0),char(-52),char(0),char(4),char(0),char(-51),char(0),
+char(4),char(0),char(-50),char(0),char(0),char(0),char(37),char(0),char(51),char(0),char(22),char(0),char(7),char(0),char(-67),char(0),char(7),char(0),char(-79),char(0),
+char(7),char(0),char(110),char(0),char(7),char(0),char(-66),char(0),char(7),char(0),char(112),char(0),char(7),char(0),char(-65),char(0),char(7),char(0),char(-64),char(0),
+char(7),char(0),char(-63),char(0),char(7),char(0),char(-62),char(0),char(7),char(0),char(-61),char(0),char(7),char(0),char(-60),char(0),char(7),char(0),char(-59),char(0),
+char(7),char(0),char(-58),char(0),char(7),char(0),char(-57),char(0),char(7),char(0),char(-56),char(0),char(7),char(0),char(-55),char(0),char(4),char(0),char(-54),char(0),
+char(4),char(0),char(-53),char(0),char(4),char(0),char(-52),char(0),char(4),char(0),char(-51),char(0),char(4),char(0),char(-50),char(0),char(0),char(0),char(37),char(0),
+char(65),char(0),char(4),char(0),char(7),char(0),char(-49),char(0),char(7),char(0),char(-48),char(0),char(7),char(0),char(-47),char(0),char(4),char(0),char(78),char(0),
+char(66),char(0),char(10),char(0),char(65),char(0),char(-46),char(0),char(13),char(0),char(-45),char(0),char(13),char(0),char(-44),char(0),char(13),char(0),char(-43),char(0),
+char(13),char(0),char(-42),char(0),char(13),char(0),char(-41),char(0),char(7),char(0),char(-121),char(0),char(7),char(0),char(-40),char(0),char(4),char(0),char(-39),char(0),
+char(4),char(0),char(53),char(0),char(67),char(0),char(4),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-38),char(0),char(7),char(0),char(-37),char(0),
+char(4),char(0),char(-36),char(0),char(68),char(0),char(4),char(0),char(13),char(0),char(-41),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-35),char(0),
+char(7),char(0),char(-34),char(0),char(69),char(0),char(7),char(0),char(13),char(0),char(-33),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-32),char(0),
+char(7),char(0),char(-31),char(0),char(7),char(0),char(-30),char(0),char(7),char(0),char(-29),char(0),char(4),char(0),char(53),char(0),char(70),char(0),char(6),char(0),
+char(15),char(0),char(-28),char(0),char(13),char(0),char(-30),char(0),char(13),char(0),char(-27),char(0),char(56),char(0),char(-26),char(0),char(4),char(0),char(-25),char(0),
+char(7),char(0),char(-29),char(0),char(71),char(0),char(26),char(0),char(4),char(0),char(-24),char(0),char(7),char(0),char(-23),char(0),char(7),char(0),char(-79),char(0),
+char(7),char(0),char(-22),char(0),char(7),char(0),char(-21),char(0),char(7),char(0),char(-20),char(0),char(7),char(0),char(-19),char(0),char(7),char(0),char(-18),char(0),
+char(7),char(0),char(-17),char(0),char(7),char(0),char(-16),char(0),char(7),char(0),char(-15),char(0),char(7),char(0),char(-14),char(0),char(7),char(0),char(-13),char(0),
+char(7),char(0),char(-12),char(0),char(7),char(0),char(-11),char(0),char(7),char(0),char(-10),char(0),char(7),char(0),char(-9),char(0),char(7),char(0),char(-8),char(0),
+char(7),char(0),char(-7),char(0),char(7),char(0),char(-6),char(0),char(7),char(0),char(-5),char(0),char(4),char(0),char(-4),char(0),char(4),char(0),char(-3),char(0),
+char(4),char(0),char(-2),char(0),char(4),char(0),char(-1),char(0),char(4),char(0),char(117),char(0),char(72),char(0),char(12),char(0),char(15),char(0),char(0),char(1),
+char(15),char(0),char(1),char(1),char(15),char(0),char(2),char(1),char(13),char(0),char(3),char(1),char(13),char(0),char(4),char(1),char(7),char(0),char(5),char(1),
+char(4),char(0),char(6),char(1),char(4),char(0),char(7),char(1),char(4),char(0),char(8),char(1),char(4),char(0),char(9),char(1),char(7),char(0),char(-31),char(0),
+char(4),char(0),char(53),char(0),char(73),char(0),char(27),char(0),char(17),char(0),char(10),char(1),char(15),char(0),char(11),char(1),char(15),char(0),char(12),char(1),
+char(13),char(0),char(3),char(1),char(13),char(0),char(13),char(1),char(13),char(0),char(14),char(1),char(13),char(0),char(15),char(1),char(13),char(0),char(16),char(1),
+char(13),char(0),char(17),char(1),char(4),char(0),char(18),char(1),char(7),char(0),char(19),char(1),char(4),char(0),char(20),char(1),char(4),char(0),char(21),char(1),
+char(4),char(0),char(22),char(1),char(7),char(0),char(23),char(1),char(7),char(0),char(24),char(1),char(4),char(0),char(25),char(1),char(4),char(0),char(26),char(1),
+char(7),char(0),char(27),char(1),char(7),char(0),char(28),char(1),char(7),char(0),char(29),char(1),char(7),char(0),char(30),char(1),char(7),char(0),char(31),char(1),
+char(7),char(0),char(32),char(1),char(4),char(0),char(33),char(1),char(4),char(0),char(34),char(1),char(4),char(0),char(35),char(1),char(74),char(0),char(12),char(0),
+char(9),char(0),char(36),char(1),char(9),char(0),char(37),char(1),char(13),char(0),char(38),char(1),char(7),char(0),char(39),char(1),char(7),char(0),char(-63),char(0),
+char(7),char(0),char(40),char(1),char(4),char(0),char(41),char(1),char(13),char(0),char(42),char(1),char(4),char(0),char(43),char(1),char(4),char(0),char(44),char(1),
+char(4),char(0),char(45),char(1),char(4),char(0),char(53),char(0),char(75),char(0),char(19),char(0),char(47),char(0),char(125),char(0),char(72),char(0),char(46),char(1),
+char(65),char(0),char(47),char(1),char(66),char(0),char(48),char(1),char(67),char(0),char(49),char(1),char(68),char(0),char(50),char(1),char(69),char(0),char(51),char(1),
+char(70),char(0),char(52),char(1),char(73),char(0),char(53),char(1),char(74),char(0),char(54),char(1),char(4),char(0),char(55),char(1),char(4),char(0),char(21),char(1),
+char(4),char(0),char(56),char(1),char(4),char(0),char(57),char(1),char(4),char(0),char(58),char(1),char(4),char(0),char(59),char(1),char(4),char(0),char(60),char(1),
+char(4),char(0),char(61),char(1),char(71),char(0),char(62),char(1),};
int sBulletDNAlen= sizeof(sBulletDNAstr);
-
- char sBulletDNAstr64[]= {
-83,68,78,65,78,65,77,69,44,1,0,0,109,95,115,105,122,101,0,109,
-95,99,97,112,97,99,105,116,121,0,42,109,95,100,97,116,97,0,109,95,
-99,111,108,108,105,115,105,111,110,83,104,97,112,101,115,0,109,95,99,111,
-108,108,105,115,105,111,110,79,98,106,101,99,116,115,0,109,95,99,111,110,
-115,116,114,97,105,110,116,115,0,42,102,105,114,115,116,0,42,108,97,115,
-116,0,109,95,102,108,111,97,116,115,91,52,93,0,109,95,101,108,91,51,
-93,0,109,95,98,97,115,105,115,0,109,95,111,114,105,103,105,110,0,109,
-95,114,111,111,116,78,111,100,101,73,110,100,101,120,0,109,95,115,117,98,
-116,114,101,101,83,105,122,101,0,109,95,113,117,97,110,116,105,122,101,100,
-65,97,98,98,77,105,110,91,51,93,0,109,95,113,117,97,110,116,105,122,
-101,100,65,97,98,98,77,97,120,91,51,93,0,109,95,97,97,98,98,77,
-105,110,79,114,103,0,109,95,97,97,98,98,77,97,120,79,114,103,0,109,
-95,101,115,99,97,112,101,73,110,100,101,120,0,109,95,115,117,98,80,97,
-114,116,0,109,95,116,114,105,97,110,103,108,101,73,110,100,101,120,0,109,
-95,112,97,100,91,52,93,0,109,95,101,115,99,97,112,101,73,110,100,101,
-120,79,114,84,114,105,97,110,103,108,101,73,110,100,101,120,0,109,95,98,
-118,104,65,97,98,98,77,105,110,0,109,95,98,118,104,65,97,98,98,77,
-97,120,0,109,95,98,118,104,81,117,97,110,116,105,122,97,116,105,111,110,
-0,109,95,99,117,114,78,111,100,101,73,110,100,101,120,0,109,95,117,115,
-101,81,117,97,110,116,105,122,97,116,105,111,110,0,109,95,110,117,109,67,
-111,110,116,105,103,117,111,117,115,76,101,97,102,78,111,100,101,115,0,109,
-95,110,117,109,81,117,97,110,116,105,122,101,100,67,111,110,116,105,103,117,
-111,117,115,78,111,100,101,115,0,42,109,95,99,111,110,116,105,103,117,111,
-117,115,78,111,100,101,115,80,116,114,0,42,109,95,113,117,97,110,116,105,
-122,101,100,67,111,110,116,105,103,117,111,117,115,78,111,100,101,115,80,116,
-114,0,42,109,95,115,117,98,84,114,101,101,73,110,102,111,80,116,114,0,
-109,95,116,114,97,118,101,114,115,97,108,77,111,100,101,0,109,95,110,117,
-109,83,117,98,116,114,101,101,72,101,97,100,101,114,115,0,42,109,95,110,
-97,109,101,0,109,95,115,104,97,112,101,84,121,112,101,0,109,95,112,97,
-100,100,105,110,103,91,52,93,0,109,95,99,111,108,108,105,115,105,111,110,
-83,104,97,112,101,68,97,116,97,0,109,95,108,111,99,97,108,83,99,97,
-108,105,110,103,0,109,95,112,108,97,110,101,78,111,114,109,97,108,0,109,
-95,112,108,97,110,101,67,111,110,115,116,97,110,116,0,109,95,105,109,112,
-108,105,99,105,116,83,104,97,112,101,68,105,109,101,110,115,105,111,110,115,
-0,109,95,99,111,108,108,105,115,105,111,110,77,97,114,103,105,110,0,109,
-95,112,97,100,100,105,110,103,0,109,95,112,111,115,0,109,95,114,97,100,
-105,117,115,0,109,95,99,111,110,118,101,120,73,110,116,101,114,110,97,108,
-83,104,97,112,101,68,97,116,97,0,42,109,95,108,111,99,97,108,80,111,
-115,105,116,105,111,110,65,114,114,97,121,80,116,114,0,109,95,108,111,99,
-97,108,80,111,115,105,116,105,111,110,65,114,114,97,121,83,105,122,101,0,
-109,95,118,97,108,117,101,0,109,95,112,97,100,91,50,93,0,109,95,118,
-97,108,117,101,115,91,51,93,0,109,95,112,97,100,0,42,109,95,118,101,
-114,116,105,99,101,115,51,102,0,42,109,95,118,101,114,116,105,99,101,115,
-51,100,0,42,109,95,105,110,100,105,99,101,115,51,50,0,42,109,95,51,
-105,110,100,105,99,101,115,49,54,0,42,109,95,51,105,110,100,105,99,101,
-115,56,0,42,109,95,105,110,100,105,99,101,115,49,54,0,109,95,110,117,
-109,84,114,105,97,110,103,108,101,115,0,109,95,110,117,109,86,101,114,116,
-105,99,101,115,0,42,109,95,109,101,115,104,80,97,114,116,115,80,116,114,
-0,109,95,115,99,97,108,105,110,103,0,109,95,110,117,109,77,101,115,104,
-80,97,114,116,115,0,109,95,109,101,115,104,73,110,116,101,114,102,97,99,
-101,0,42,109,95,113,117,97,110,116,105,122,101,100,70,108,111,97,116,66,
-118,104,0,42,109,95,113,117,97,110,116,105,122,101,100,68,111,117,98,108,
-101,66,118,104,0,42,109,95,116,114,105,97,110,103,108,101,73,110,102,111,
-77,97,112,0,109,95,112,97,100,51,91,52,93,0,109,95,116,114,105,109,
-101,115,104,83,104,97,112,101,68,97,116,97,0,109,95,116,114,97,110,115,
-102,111,114,109,0,42,109,95,99,104,105,108,100,83,104,97,112,101,0,109,
-95,99,104,105,108,100,83,104,97,112,101,84,121,112,101,0,109,95,99,104,
-105,108,100,77,97,114,103,105,110,0,42,109,95,99,104,105,108,100,83,104,
-97,112,101,80,116,114,0,109,95,110,117,109,67,104,105,108,100,83,104,97,
-112,101,115,0,109,95,117,112,65,120,105,115,0,109,95,102,108,97,103,115,
-0,109,95,101,100,103,101,86,48,86,49,65,110,103,108,101,0,109,95,101,
-100,103,101,86,49,86,50,65,110,103,108,101,0,109,95,101,100,103,101,86,
-50,86,48,65,110,103,108,101,0,42,109,95,104,97,115,104,84,97,98,108,
-101,80,116,114,0,42,109,95,110,101,120,116,80,116,114,0,42,109,95,118,
-97,108,117,101,65,114,114,97,121,80,116,114,0,42,109,95,107,101,121,65,
-114,114,97,121,80,116,114,0,109,95,99,111,110,118,101,120,69,112,115,105,
-108,111,110,0,109,95,112,108,97,110,97,114,69,112,115,105,108,111,110,0,
-109,95,101,113,117,97,108,86,101,114,116,101,120,84,104,114,101,115,104,111,
-108,100,0,109,95,101,100,103,101,68,105,115,116,97,110,99,101,84,104,114,
-101,115,104,111,108,100,0,109,95,122,101,114,111,65,114,101,97,84,104,114,
-101,115,104,111,108,100,0,109,95,110,101,120,116,83,105,122,101,0,109,95,
-104,97,115,104,84,97,98,108,101,83,105,122,101,0,109,95,110,117,109,86,
-97,108,117,101,115,0,109,95,110,117,109,75,101,121,115,0,109,95,103,105,
-109,112,97,99,116,83,117,98,84,121,112,101,0,42,109,95,117,110,115,99,
-97,108,101,100,80,111,105,110,116,115,70,108,111,97,116,80,116,114,0,42,
-109,95,117,110,115,99,97,108,101,100,80,111,105,110,116,115,68,111,117,98,
-108,101,80,116,114,0,109,95,110,117,109,85,110,115,99,97,108,101,100,80,
-111,105,110,116,115,0,109,95,112,97,100,100,105,110,103,51,91,52,93,0,
-42,109,95,98,114,111,97,100,112,104,97,115,101,72,97,110,100,108,101,0,
-42,109,95,99,111,108,108,105,115,105,111,110,83,104,97,112,101,0,42,109,
-95,114,111,111,116,67,111,108,108,105,115,105,111,110,83,104,97,112,101,0,
-109,95,119,111,114,108,100,84,114,97,110,115,102,111,114,109,0,109,95,105,
-110,116,101,114,112,111,108,97,116,105,111,110,87,111,114,108,100,84,114,97,
-110,115,102,111,114,109,0,109,95,105,110,116,101,114,112,111,108,97,116,105,
-111,110,76,105,110,101,97,114,86,101,108,111,99,105,116,121,0,109,95,105,
-110,116,101,114,112,111,108,97,116,105,111,110,65,110,103,117,108,97,114,86,
-101,108,111,99,105,116,121,0,109,95,97,110,105,115,111,116,114,111,112,105,
-99,70,114,105,99,116,105,111,110,0,109,95,99,111,110,116,97,99,116,80,
-114,111,99,101,115,115,105,110,103,84,104,114,101,115,104,111,108,100,0,109,
-95,100,101,97,99,116,105,118,97,116,105,111,110,84,105,109,101,0,109,95,
-102,114,105,99,116,105,111,110,0,109,95,114,101,115,116,105,116,117,116,105,
-111,110,0,109,95,104,105,116,70,114,97,99,116,105,111,110,0,109,95,99,
-99,100,83,119,101,112,116,83,112,104,101,114,101,82,97,100,105,117,115,0,
-109,95,99,99,100,77,111,116,105,111,110,84,104,114,101,115,104,111,108,100,
-0,109,95,104,97,115,65,110,105,115,111,116,114,111,112,105,99,70,114,105,
-99,116,105,111,110,0,109,95,99,111,108,108,105,115,105,111,110,70,108,97,
-103,115,0,109,95,105,115,108,97,110,100,84,97,103,49,0,109,95,99,111,
-109,112,97,110,105,111,110,73,100,0,109,95,97,99,116,105,118,97,116,105,
-111,110,83,116,97,116,101,49,0,109,95,105,110,116,101,114,110,97,108,84,
-121,112,101,0,109,95,99,104,101,99,107,67,111,108,108,105,100,101,87,105,
-116,104,0,109,95,99,111,108,108,105,115,105,111,110,79,98,106,101,99,116,
-68,97,116,97,0,109,95,105,110,118,73,110,101,114,116,105,97,84,101,110,
-115,111,114,87,111,114,108,100,0,109,95,108,105,110,101,97,114,86,101,108,
-111,99,105,116,121,0,109,95,97,110,103,117,108,97,114,86,101,108,111,99,
-105,116,121,0,109,95,97,110,103,117,108,97,114,70,97,99,116,111,114,0,
-109,95,108,105,110,101,97,114,70,97,99,116,111,114,0,109,95,103,114,97,
-118,105,116,121,0,109,95,103,114,97,118,105,116,121,95,97,99,99,101,108,
-101,114,97,116,105,111,110,0,109,95,105,110,118,73,110,101,114,116,105,97,
-76,111,99,97,108,0,109,95,116,111,116,97,108,70,111,114,99,101,0,109,
-95,116,111,116,97,108,84,111,114,113,117,101,0,109,95,105,110,118,101,114,
-115,101,77,97,115,115,0,109,95,108,105,110,101,97,114,68,97,109,112,105,
-110,103,0,109,95,97,110,103,117,108,97,114,68,97,109,112,105,110,103,0,
-109,95,97,100,100,105,116,105,111,110,97,108,68,97,109,112,105,110,103,70,
-97,99,116,111,114,0,109,95,97,100,100,105,116,105,111,110,97,108,76,105,
-110,101,97,114,68,97,109,112,105,110,103,84,104,114,101,115,104,111,108,100,
-83,113,114,0,109,95,97,100,100,105,116,105,111,110,97,108,65,110,103,117,
-108,97,114,68,97,109,112,105,110,103,84,104,114,101,115,104,111,108,100,83,
-113,114,0,109,95,97,100,100,105,116,105,111,110,97,108,65,110,103,117,108,
-97,114,68,97,109,112,105,110,103,70,97,99,116,111,114,0,109,95,108,105,
-110,101,97,114,83,108,101,101,112,105,110,103,84,104,114,101,115,104,111,108,
-100,0,109,95,97,110,103,117,108,97,114,83,108,101,101,112,105,110,103,84,
-104,114,101,115,104,111,108,100,0,109,95,97,100,100,105,116,105,111,110,97,
-108,68,97,109,112,105,110,103,0,109,95,110,117,109,67,111,110,115,116,114,
-97,105,110,116,82,111,119,115,0,110,117,98,0,42,109,95,114,98,65,0,
-42,109,95,114,98,66,0,109,95,111,98,106,101,99,116,84,121,112,101,0,
-109,95,117,115,101,114,67,111,110,115,116,114,97,105,110,116,84,121,112,101,
-0,109,95,117,115,101,114,67,111,110,115,116,114,97,105,110,116,73,100,0,
-109,95,110,101,101,100,115,70,101,101,100,98,97,99,107,0,109,95,97,112,
-112,108,105,101,100,73,109,112,117,108,115,101,0,109,95,100,98,103,68,114,
-97,119,83,105,122,101,0,109,95,100,105,115,97,98,108,101,67,111,108,108,
-105,115,105,111,110,115,66,101,116,119,101,101,110,76,105,110,107,101,100,66,
-111,100,105,101,115,0,109,95,111,118,101,114,114,105,100,101,78,117,109,83,
-111,108,118,101,114,73,116,101,114,97,116,105,111,110,115,0,109,95,98,114,
-101,97,107,105,110,103,73,109,112,117,108,115,101,84,104,114,101,115,104,111,
-108,100,0,109,95,105,115,69,110,97,98,108,101,100,0,109,95,116,121,112,
-101,67,111,110,115,116,114,97,105,110,116,68,97,116,97,0,109,95,112,105,
-118,111,116,73,110,65,0,109,95,112,105,118,111,116,73,110,66,0,109,95,
-114,98,65,70,114,97,109,101,0,109,95,114,98,66,70,114,97,109,101,0,
-109,95,117,115,101,82,101,102,101,114,101,110,99,101,70,114,97,109,101,65,
-0,109,95,97,110,103,117,108,97,114,79,110,108,121,0,109,95,101,110,97,
-98,108,101,65,110,103,117,108,97,114,77,111,116,111,114,0,109,95,109,111,
-116,111,114,84,97,114,103,101,116,86,101,108,111,99,105,116,121,0,109,95,
-109,97,120,77,111,116,111,114,73,109,112,117,108,115,101,0,109,95,108,111,
-119,101,114,76,105,109,105,116,0,109,95,117,112,112,101,114,76,105,109,105,
-116,0,109,95,108,105,109,105,116,83,111,102,116,110,101,115,115,0,109,95,
-98,105,97,115,70,97,99,116,111,114,0,109,95,114,101,108,97,120,97,116,
-105,111,110,70,97,99,116,111,114,0,109,95,115,119,105,110,103,83,112,97,
-110,49,0,109,95,115,119,105,110,103,83,112,97,110,50,0,109,95,116,119,
-105,115,116,83,112,97,110,0,109,95,100,97,109,112,105,110,103,0,109,95,
-108,105,110,101,97,114,85,112,112,101,114,76,105,109,105,116,0,109,95,108,
-105,110,101,97,114,76,111,119,101,114,76,105,109,105,116,0,109,95,97,110,
-103,117,108,97,114,85,112,112,101,114,76,105,109,105,116,0,109,95,97,110,
-103,117,108,97,114,76,111,119,101,114,76,105,109,105,116,0,109,95,117,115,
-101,76,105,110,101,97,114,82,101,102,101,114,101,110,99,101,70,114,97,109,
-101,65,0,109,95,117,115,101,79,102,102,115,101,116,70,111,114,67,111,110,
-115,116,114,97,105,110,116,70,114,97,109,101,0,109,95,54,100,111,102,68,
-97,116,97,0,109,95,115,112,114,105,110,103,69,110,97,98,108,101,100,91,
-54,93,0,109,95,101,113,117,105,108,105,98,114,105,117,109,80,111,105,110,
-116,91,54,93,0,109,95,115,112,114,105,110,103,83,116,105,102,102,110,101,
-115,115,91,54,93,0,109,95,115,112,114,105,110,103,68,97,109,112,105,110,
-103,91,54,93,0,109,95,108,105,110,101,97,114,83,116,105,102,102,110,101,
-115,115,0,109,95,97,110,103,117,108,97,114,83,116,105,102,102,110,101,115,
-115,0,109,95,118,111,108,117,109,101,83,116,105,102,102,110,101,115,115,0,
-42,109,95,109,97,116,101,114,105,97,108,0,109,95,112,111,115,105,116,105,
-111,110,0,109,95,112,114,101,118,105,111,117,115,80,111,115,105,116,105,111,
-110,0,109,95,118,101,108,111,99,105,116,121,0,109,95,97,99,99,117,109,
-117,108,97,116,101,100,70,111,114,99,101,0,109,95,110,111,114,109,97,108,
-0,109,95,97,114,101,97,0,109,95,97,116,116,97,99,104,0,109,95,110,
-111,100,101,73,110,100,105,99,101,115,91,50,93,0,109,95,114,101,115,116,
-76,101,110,103,116,104,0,109,95,98,98,101,110,100,105,110,103,0,109,95,
-110,111,100,101,73,110,100,105,99,101,115,91,51,93,0,109,95,114,101,115,
-116,65,114,101,97,0,109,95,99,48,91,52,93,0,109,95,110,111,100,101,
-73,110,100,105,99,101,115,91,52,93,0,109,95,114,101,115,116,86,111,108,
-117,109,101,0,109,95,99,49,0,109,95,99,50,0,109,95,99,48,0,109,
-95,108,111,99,97,108,70,114,97,109,101,0,42,109,95,114,105,103,105,100,
-66,111,100,121,0,109,95,110,111,100,101,73,110,100,101,120,0,109,95,97,
-101,114,111,77,111,100,101,108,0,109,95,98,97,117,109,103,97,114,116,101,
-0,109,95,100,114,97,103,0,109,95,108,105,102,116,0,109,95,112,114,101,
-115,115,117,114,101,0,109,95,118,111,108,117,109,101,0,109,95,100,121,110,
-97,109,105,99,70,114,105,99,116,105,111,110,0,109,95,112,111,115,101,77,
-97,116,99,104,0,109,95,114,105,103,105,100,67,111,110,116,97,99,116,72,
-97,114,100,110,101,115,115,0,109,95,107,105,110,101,116,105,99,67,111,110,
-116,97,99,116,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,67,
-111,110,116,97,99,116,72,97,114,100,110,101,115,115,0,109,95,97,110,99,
-104,111,114,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,82,105,
-103,105,100,67,108,117,115,116,101,114,72,97,114,100,110,101,115,115,0,109,
-95,115,111,102,116,75,105,110,101,116,105,99,67,108,117,115,116,101,114,72,
-97,114,100,110,101,115,115,0,109,95,115,111,102,116,83,111,102,116,67,108,
-117,115,116,101,114,72,97,114,100,110,101,115,115,0,109,95,115,111,102,116,
-82,105,103,105,100,67,108,117,115,116,101,114,73,109,112,117,108,115,101,83,
-112,108,105,116,0,109,95,115,111,102,116,75,105,110,101,116,105,99,67,108,
-117,115,116,101,114,73,109,112,117,108,115,101,83,112,108,105,116,0,109,95,
-115,111,102,116,83,111,102,116,67,108,117,115,116,101,114,73,109,112,117,108,
-115,101,83,112,108,105,116,0,109,95,109,97,120,86,111,108,117,109,101,0,
-109,95,116,105,109,101,83,99,97,108,101,0,109,95,118,101,108,111,99,105,
-116,121,73,116,101,114,97,116,105,111,110,115,0,109,95,112,111,115,105,116,
-105,111,110,73,116,101,114,97,116,105,111,110,115,0,109,95,100,114,105,102,
-116,73,116,101,114,97,116,105,111,110,115,0,109,95,99,108,117,115,116,101,
-114,73,116,101,114,97,116,105,111,110,115,0,109,95,114,111,116,0,109,95,
-115,99,97,108,101,0,109,95,97,113,113,0,109,95,99,111,109,0,42,109,
-95,112,111,115,105,116,105,111,110,115,0,42,109,95,119,101,105,103,104,116,
-115,0,109,95,110,117,109,80,111,115,105,116,105,111,110,115,0,109,95,110,
-117,109,87,101,105,103,116,115,0,109,95,98,118,111,108,117,109,101,0,109,
-95,98,102,114,97,109,101,0,109,95,102,114,97,109,101,120,102,111,114,109,
-0,109,95,108,111,99,105,105,0,109,95,105,110,118,119,105,0,109,95,118,
-105,109,112,117,108,115,101,115,91,50,93,0,109,95,100,105,109,112,117,108,
-115,101,115,91,50,93,0,109,95,108,118,0,109,95,97,118,0,42,109,95,
-102,114,97,109,101,114,101,102,115,0,42,109,95,110,111,100,101,73,110,100,
-105,99,101,115,0,42,109,95,109,97,115,115,101,115,0,109,95,110,117,109,
-70,114,97,109,101,82,101,102,115,0,109,95,110,117,109,78,111,100,101,115,
-0,109,95,110,117,109,77,97,115,115,101,115,0,109,95,105,100,109,97,115,
-115,0,109,95,105,109,97,115,115,0,109,95,110,118,105,109,112,117,108,115,
-101,115,0,109,95,110,100,105,109,112,117,108,115,101,115,0,109,95,110,100,
-97,109,112,105,110,103,0,109,95,108,100,97,109,112,105,110,103,0,109,95,
-97,100,97,109,112,105,110,103,0,109,95,109,97,116,99,104,105,110,103,0,
-109,95,109,97,120,83,101,108,102,67,111,108,108,105,115,105,111,110,73,109,
-112,117,108,115,101,0,109,95,115,101,108,102,67,111,108,108,105,115,105,111,
-110,73,109,112,117,108,115,101,70,97,99,116,111,114,0,109,95,99,111,110,
-116,97,105,110,115,65,110,99,104,111,114,0,109,95,99,111,108,108,105,100,
-101,0,109,95,99,108,117,115,116,101,114,73,110,100,101,120,0,42,109,95,
-98,111,100,121,65,0,42,109,95,98,111,100,121,66,0,109,95,114,101,102,
-115,91,50,93,0,109,95,99,102,109,0,109,95,101,114,112,0,109,95,115,
-112,108,105,116,0,109,95,100,101,108,101,116,101,0,109,95,114,101,108,80,
-111,115,105,116,105,111,110,91,50,93,0,109,95,98,111,100,121,65,116,121,
-112,101,0,109,95,98,111,100,121,66,116,121,112,101,0,109,95,106,111,105,
-110,116,84,121,112,101,0,42,109,95,112,111,115,101,0,42,42,109,95,109,
-97,116,101,114,105,97,108,115,0,42,109,95,110,111,100,101,115,0,42,109,
-95,108,105,110,107,115,0,42,109,95,102,97,99,101,115,0,42,109,95,116,
-101,116,114,97,104,101,100,114,97,0,42,109,95,97,110,99,104,111,114,115,
-0,42,109,95,99,108,117,115,116,101,114,115,0,42,109,95,106,111,105,110,
-116,115,0,109,95,110,117,109,77,97,116,101,114,105,97,108,115,0,109,95,
-110,117,109,76,105,110,107,115,0,109,95,110,117,109,70,97,99,101,115,0,
-109,95,110,117,109,84,101,116,114,97,104,101,100,114,97,0,109,95,110,117,
-109,65,110,99,104,111,114,115,0,109,95,110,117,109,67,108,117,115,116,101,
-114,115,0,109,95,110,117,109,74,111,105,110,116,115,0,109,95,99,111,110,
-102,105,103,0,84,89,80,69,72,0,0,0,99,104,97,114,0,117,99,104,
-97,114,0,115,104,111,114,116,0,117,115,104,111,114,116,0,105,110,116,0,
-108,111,110,103,0,117,108,111,110,103,0,102,108,111,97,116,0,100,111,117,
-98,108,101,0,118,111,105,100,0,80,111,105,110,116,101,114,65,114,114,97,
-121,0,98,116,80,104,121,115,105,99,115,83,121,115,116,101,109,0,76,105,
-115,116,66,97,115,101,0,98,116,86,101,99,116,111,114,51,70,108,111,97,
-116,68,97,116,97,0,98,116,86,101,99,116,111,114,51,68,111,117,98,108,
-101,68,97,116,97,0,98,116,77,97,116,114,105,120,51,120,51,70,108,111,
-97,116,68,97,116,97,0,98,116,77,97,116,114,105,120,51,120,51,68,111,
-117,98,108,101,68,97,116,97,0,98,116,84,114,97,110,115,102,111,114,109,
-70,108,111,97,116,68,97,116,97,0,98,116,84,114,97,110,115,102,111,114,
-109,68,111,117,98,108,101,68,97,116,97,0,98,116,66,118,104,83,117,98,
-116,114,101,101,73,110,102,111,68,97,116,97,0,98,116,79,112,116,105,109,
-105,122,101,100,66,118,104,78,111,100,101,70,108,111,97,116,68,97,116,97,
-0,98,116,79,112,116,105,109,105,122,101,100,66,118,104,78,111,100,101,68,
-111,117,98,108,101,68,97,116,97,0,98,116,81,117,97,110,116,105,122,101,
-100,66,118,104,78,111,100,101,68,97,116,97,0,98,116,81,117,97,110,116,
-105,122,101,100,66,118,104,70,108,111,97,116,68,97,116,97,0,98,116,81,
-117,97,110,116,105,122,101,100,66,118,104,68,111,117,98,108,101,68,97,116,
-97,0,98,116,67,111,108,108,105,115,105,111,110,83,104,97,112,101,68,97,
-116,97,0,98,116,83,116,97,116,105,99,80,108,97,110,101,83,104,97,112,
-101,68,97,116,97,0,98,116,67,111,110,118,101,120,73,110,116,101,114,110,
-97,108,83,104,97,112,101,68,97,116,97,0,98,116,80,111,115,105,116,105,
-111,110,65,110,100,82,97,100,105,117,115,0,98,116,77,117,108,116,105,83,
-112,104,101,114,101,83,104,97,112,101,68,97,116,97,0,98,116,73,110,116,
-73,110,100,101,120,68,97,116,97,0,98,116,83,104,111,114,116,73,110,116,
-73,110,100,101,120,68,97,116,97,0,98,116,83,104,111,114,116,73,110,116,
-73,110,100,101,120,84,114,105,112,108,101,116,68,97,116,97,0,98,116,67,
-104,97,114,73,110,100,101,120,84,114,105,112,108,101,116,68,97,116,97,0,
-98,116,77,101,115,104,80,97,114,116,68,97,116,97,0,98,116,83,116,114,
-105,100,105,110,103,77,101,115,104,73,110,116,101,114,102,97,99,101,68,97,
-116,97,0,98,116,84,114,105,97,110,103,108,101,77,101,115,104,83,104,97,
-112,101,68,97,116,97,0,98,116,84,114,105,97,110,103,108,101,73,110,102,
-111,77,97,112,68,97,116,97,0,98,116,83,99,97,108,101,100,84,114,105,
-97,110,103,108,101,77,101,115,104,83,104,97,112,101,68,97,116,97,0,98,
-116,67,111,109,112,111,117,110,100,83,104,97,112,101,67,104,105,108,100,68,
-97,116,97,0,98,116,67,111,109,112,111,117,110,100,83,104,97,112,101,68,
-97,116,97,0,98,116,67,121,108,105,110,100,101,114,83,104,97,112,101,68,
-97,116,97,0,98,116,67,97,112,115,117,108,101,83,104,97,112,101,68,97,
-116,97,0,98,116,84,114,105,97,110,103,108,101,73,110,102,111,68,97,116,
-97,0,98,116,71,73,109,112,97,99,116,77,101,115,104,83,104,97,112,101,
-68,97,116,97,0,98,116,67,111,110,118,101,120,72,117,108,108,83,104,97,
-112,101,68,97,116,97,0,98,116,67,111,108,108,105,115,105,111,110,79,98,
-106,101,99,116,68,111,117,98,108,101,68,97,116,97,0,98,116,67,111,108,
-108,105,115,105,111,110,79,98,106,101,99,116,70,108,111,97,116,68,97,116,
-97,0,98,116,82,105,103,105,100,66,111,100,121,70,108,111,97,116,68,97,
-116,97,0,98,116,82,105,103,105,100,66,111,100,121,68,111,117,98,108,101,
-68,97,116,97,0,98,116,67,111,110,115,116,114,97,105,110,116,73,110,102,
-111,49,0,98,116,84,121,112,101,100,67,111,110,115,116,114,97,105,110,116,
-68,97,116,97,0,98,116,82,105,103,105,100,66,111,100,121,68,97,116,97,
-0,98,116,80,111,105,110,116,50,80,111,105,110,116,67,111,110,115,116,114,
-97,105,110,116,70,108,111,97,116,68,97,116,97,0,98,116,80,111,105,110,
-116,50,80,111,105,110,116,67,111,110,115,116,114,97,105,110,116,68,111,117,
-98,108,101,68,97,116,97,0,98,116,72,105,110,103,101,67,111,110,115,116,
-114,97,105,110,116,68,111,117,98,108,101,68,97,116,97,0,98,116,72,105,
-110,103,101,67,111,110,115,116,114,97,105,110,116,70,108,111,97,116,68,97,
-116,97,0,98,116,67,111,110,101,84,119,105,115,116,67,111,110,115,116,114,
-97,105,110,116,68,97,116,97,0,98,116,71,101,110,101,114,105,99,54,68,
-111,102,67,111,110,115,116,114,97,105,110,116,68,97,116,97,0,98,116,71,
-101,110,101,114,105,99,54,68,111,102,83,112,114,105,110,103,67,111,110,115,
-116,114,97,105,110,116,68,97,116,97,0,98,116,83,108,105,100,101,114,67,
-111,110,115,116,114,97,105,110,116,68,97,116,97,0,83,111,102,116,66,111,
-100,121,77,97,116,101,114,105,97,108,68,97,116,97,0,83,111,102,116,66,
-111,100,121,78,111,100,101,68,97,116,97,0,83,111,102,116,66,111,100,121,
-76,105,110,107,68,97,116,97,0,83,111,102,116,66,111,100,121,70,97,99,
-101,68,97,116,97,0,83,111,102,116,66,111,100,121,84,101,116,114,97,68,
-97,116,97,0,83,111,102,116,82,105,103,105,100,65,110,99,104,111,114,68,
-97,116,97,0,83,111,102,116,66,111,100,121,67,111,110,102,105,103,68,97,
-116,97,0,83,111,102,116,66,111,100,121,80,111,115,101,68,97,116,97,0,
-83,111,102,116,66,111,100,121,67,108,117,115,116,101,114,68,97,116,97,0,
-98,116,83,111,102,116,66,111,100,121,74,111,105,110,116,68,97,116,97,0,
-98,116,83,111,102,116,66,111,100,121,70,108,111,97,116,68,97,116,97,0,
-84,76,69,78,1,0,1,0,2,0,2,0,4,0,4,0,4,0,4,0,
-8,0,0,0,16,0,48,0,16,0,16,0,32,0,48,0,96,0,64,0,
--128,0,20,0,48,0,80,0,16,0,96,0,-112,0,16,0,56,0,56,0,
-20,0,72,0,4,0,4,0,8,0,4,0,56,0,32,0,80,0,72,0,
-96,0,80,0,32,0,64,0,64,0,16,0,72,0,80,0,-40,1,8,1,
--16,1,-88,3,8,0,64,0,0,0,96,0,-128,0,104,1,-24,0,-32,0,
-8,1,104,1,-40,0,16,0,104,0,24,0,40,0,104,0,96,0,104,0,
--56,0,104,1,112,0,-40,1,83,84,82,67,61,0,0,0,10,0,3,0,
-4,0,0,0,4,0,1,0,9,0,2,0,11,0,3,0,10,0,3,0,
-10,0,4,0,10,0,5,0,12,0,2,0,9,0,6,0,9,0,7,0,
-13,0,1,0,7,0,8,0,14,0,1,0,8,0,8,0,15,0,1,0,
-13,0,9,0,16,0,1,0,14,0,9,0,17,0,2,0,15,0,10,0,
-13,0,11,0,18,0,2,0,16,0,10,0,14,0,11,0,19,0,4,0,
-4,0,12,0,4,0,13,0,2,0,14,0,2,0,15,0,20,0,6,0,
-13,0,16,0,13,0,17,0,4,0,18,0,4,0,19,0,4,0,20,0,
-0,0,21,0,21,0,6,0,14,0,16,0,14,0,17,0,4,0,18,0,
-4,0,19,0,4,0,20,0,0,0,21,0,22,0,3,0,2,0,14,0,
-2,0,15,0,4,0,22,0,23,0,12,0,13,0,23,0,13,0,24,0,
-13,0,25,0,4,0,26,0,4,0,27,0,4,0,28,0,4,0,29,0,
-20,0,30,0,22,0,31,0,19,0,32,0,4,0,33,0,4,0,34,0,
-24,0,12,0,14,0,23,0,14,0,24,0,14,0,25,0,4,0,26,0,
-4,0,27,0,4,0,28,0,4,0,29,0,21,0,30,0,22,0,31,0,
-4,0,33,0,4,0,34,0,19,0,32,0,25,0,3,0,0,0,35,0,
-4,0,36,0,0,0,37,0,26,0,5,0,25,0,38,0,13,0,39,0,
-13,0,40,0,7,0,41,0,0,0,21,0,27,0,5,0,25,0,38,0,
-13,0,39,0,13,0,42,0,7,0,43,0,4,0,44,0,28,0,2,0,
-13,0,45,0,7,0,46,0,29,0,4,0,27,0,47,0,28,0,48,0,
-4,0,49,0,0,0,37,0,30,0,1,0,4,0,50,0,31,0,2,0,
-2,0,50,0,0,0,51,0,32,0,2,0,2,0,52,0,0,0,51,0,
-33,0,2,0,0,0,52,0,0,0,53,0,34,0,8,0,13,0,54,0,
-14,0,55,0,30,0,56,0,32,0,57,0,33,0,58,0,31,0,59,0,
-4,0,60,0,4,0,61,0,35,0,4,0,34,0,62,0,13,0,63,0,
-4,0,64,0,0,0,37,0,36,0,7,0,25,0,38,0,35,0,65,0,
-23,0,66,0,24,0,67,0,37,0,68,0,7,0,43,0,0,0,69,0,
-38,0,2,0,36,0,70,0,13,0,39,0,39,0,4,0,17,0,71,0,
-25,0,72,0,4,0,73,0,7,0,74,0,40,0,4,0,25,0,38,0,
-39,0,75,0,4,0,76,0,7,0,43,0,41,0,3,0,27,0,47,0,
-4,0,77,0,0,0,37,0,42,0,3,0,27,0,47,0,4,0,77,0,
-0,0,37,0,43,0,4,0,4,0,78,0,7,0,79,0,7,0,80,0,
-7,0,81,0,37,0,14,0,4,0,82,0,4,0,83,0,43,0,84,0,
-4,0,85,0,7,0,86,0,7,0,87,0,7,0,88,0,7,0,89,0,
-7,0,90,0,4,0,91,0,4,0,92,0,4,0,93,0,4,0,94,0,
-0,0,37,0,44,0,5,0,25,0,38,0,35,0,65,0,13,0,39,0,
-7,0,43,0,4,0,95,0,45,0,5,0,27,0,47,0,13,0,96,0,
-14,0,97,0,4,0,98,0,0,0,99,0,46,0,24,0,9,0,100,0,
-9,0,101,0,25,0,102,0,0,0,35,0,18,0,103,0,18,0,104,0,
-14,0,105,0,14,0,106,0,14,0,107,0,8,0,108,0,8,0,109,0,
-8,0,110,0,8,0,111,0,8,0,112,0,8,0,113,0,8,0,114,0,
-4,0,115,0,4,0,116,0,4,0,117,0,4,0,118,0,4,0,119,0,
-4,0,120,0,4,0,121,0,0,0,37,0,47,0,23,0,9,0,100,0,
-9,0,101,0,25,0,102,0,0,0,35,0,17,0,103,0,17,0,104,0,
-13,0,105,0,13,0,106,0,13,0,107,0,7,0,108,0,7,0,109,0,
-7,0,110,0,7,0,111,0,7,0,112,0,7,0,113,0,7,0,114,0,
-4,0,115,0,4,0,116,0,4,0,117,0,4,0,118,0,4,0,119,0,
-4,0,120,0,4,0,121,0,48,0,21,0,47,0,122,0,15,0,123,0,
-13,0,124,0,13,0,125,0,13,0,126,0,13,0,127,0,13,0,-128,0,
-13,0,-127,0,13,0,-126,0,13,0,-125,0,13,0,-124,0,7,0,-123,0,
-7,0,-122,0,7,0,-121,0,7,0,-120,0,7,0,-119,0,7,0,-118,0,
-7,0,-117,0,7,0,-116,0,7,0,-115,0,4,0,-114,0,49,0,22,0,
-46,0,122,0,16,0,123,0,14,0,124,0,14,0,125,0,14,0,126,0,
-14,0,127,0,14,0,-128,0,14,0,-127,0,14,0,-126,0,14,0,-125,0,
-14,0,-124,0,8,0,-123,0,8,0,-122,0,8,0,-121,0,8,0,-120,0,
-8,0,-119,0,8,0,-118,0,8,0,-117,0,8,0,-116,0,8,0,-115,0,
-4,0,-114,0,0,0,37,0,50,0,2,0,4,0,-113,0,4,0,-112,0,
-51,0,13,0,52,0,-111,0,52,0,-110,0,0,0,35,0,4,0,-109,0,
-4,0,-108,0,4,0,-107,0,4,0,-106,0,7,0,-105,0,7,0,-104,0,
-4,0,-103,0,4,0,-102,0,7,0,-101,0,4,0,-100,0,53,0,3,0,
-51,0,-99,0,13,0,-98,0,13,0,-97,0,54,0,3,0,51,0,-99,0,
-14,0,-98,0,14,0,-97,0,55,0,13,0,51,0,-99,0,18,0,-96,0,
-18,0,-95,0,4,0,-94,0,4,0,-93,0,4,0,-92,0,7,0,-91,0,
-7,0,-90,0,7,0,-89,0,7,0,-88,0,7,0,-87,0,7,0,-86,0,
-7,0,-85,0,56,0,13,0,51,0,-99,0,17,0,-96,0,17,0,-95,0,
-4,0,-94,0,4,0,-93,0,4,0,-92,0,7,0,-91,0,7,0,-90,0,
-7,0,-89,0,7,0,-88,0,7,0,-87,0,7,0,-86,0,7,0,-85,0,
-57,0,11,0,51,0,-99,0,17,0,-96,0,17,0,-95,0,7,0,-84,0,
-7,0,-83,0,7,0,-82,0,7,0,-87,0,7,0,-86,0,7,0,-85,0,
-7,0,-81,0,0,0,21,0,58,0,9,0,51,0,-99,0,17,0,-96,0,
-17,0,-95,0,13,0,-80,0,13,0,-79,0,13,0,-78,0,13,0,-77,0,
-4,0,-76,0,4,0,-75,0,59,0,5,0,58,0,-74,0,4,0,-73,0,
-7,0,-72,0,7,0,-71,0,7,0,-70,0,60,0,9,0,51,0,-99,0,
-17,0,-96,0,17,0,-95,0,7,0,-80,0,7,0,-79,0,7,0,-78,0,
-7,0,-77,0,4,0,-76,0,4,0,-75,0,61,0,4,0,7,0,-69,0,
-7,0,-68,0,7,0,-67,0,4,0,78,0,62,0,10,0,61,0,-66,0,
-13,0,-65,0,13,0,-64,0,13,0,-63,0,13,0,-62,0,13,0,-61,0,
-7,0,-123,0,7,0,-60,0,4,0,-59,0,4,0,53,0,63,0,4,0,
-61,0,-66,0,4,0,-58,0,7,0,-57,0,4,0,-56,0,64,0,4,0,
-13,0,-61,0,61,0,-66,0,4,0,-55,0,7,0,-54,0,65,0,7,0,
-13,0,-53,0,61,0,-66,0,4,0,-52,0,7,0,-51,0,7,0,-50,0,
-7,0,-49,0,4,0,53,0,66,0,6,0,15,0,-48,0,13,0,-50,0,
-13,0,-47,0,52,0,-46,0,4,0,-45,0,7,0,-49,0,67,0,26,0,
-4,0,-44,0,7,0,-43,0,7,0,-81,0,7,0,-42,0,7,0,-41,0,
-7,0,-40,0,7,0,-39,0,7,0,-38,0,7,0,-37,0,7,0,-36,0,
-7,0,-35,0,7,0,-34,0,7,0,-33,0,7,0,-32,0,7,0,-31,0,
-7,0,-30,0,7,0,-29,0,7,0,-28,0,7,0,-27,0,7,0,-26,0,
-7,0,-25,0,4,0,-24,0,4,0,-23,0,4,0,-22,0,4,0,-21,0,
-4,0,116,0,68,0,12,0,15,0,-20,0,15,0,-19,0,15,0,-18,0,
-13,0,-17,0,13,0,-16,0,7,0,-15,0,4,0,-14,0,4,0,-13,0,
-4,0,-12,0,4,0,-11,0,7,0,-51,0,4,0,53,0,69,0,27,0,
-17,0,-10,0,15,0,-9,0,15,0,-8,0,13,0,-17,0,13,0,-7,0,
-13,0,-6,0,13,0,-5,0,13,0,-4,0,13,0,-3,0,4,0,-2,0,
-7,0,-1,0,4,0,0,1,4,0,1,1,4,0,2,1,7,0,3,1,
-7,0,4,1,4,0,5,1,4,0,6,1,7,0,7,1,7,0,8,1,
-7,0,9,1,7,0,10,1,7,0,11,1,7,0,12,1,4,0,13,1,
-4,0,14,1,4,0,15,1,70,0,12,0,9,0,16,1,9,0,17,1,
-13,0,18,1,7,0,19,1,7,0,20,1,7,0,21,1,4,0,22,1,
-13,0,23,1,4,0,24,1,4,0,25,1,4,0,26,1,4,0,53,0,
-71,0,19,0,47,0,122,0,68,0,27,1,61,0,28,1,62,0,29,1,
-63,0,30,1,64,0,31,1,65,0,32,1,66,0,33,1,69,0,34,1,
-70,0,35,1,4,0,36,1,4,0,1,1,4,0,37,1,4,0,38,1,
-4,0,39,1,4,0,40,1,4,0,41,1,4,0,42,1,67,0,43,1,
-};
+char sBulletDNAstr64[]= {
+char(83),char(68),char(78),char(65),char(78),char(65),char(77),char(69),char(63),char(1),char(0),char(0),char(109),char(95),char(115),char(105),char(122),char(101),char(0),char(109),
+char(95),char(99),char(97),char(112),char(97),char(99),char(105),char(116),char(121),char(0),char(42),char(109),char(95),char(100),char(97),char(116),char(97),char(0),char(109),char(95),
+char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(115),char(0),char(109),char(95),char(99),char(111),
+char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),char(115),char(0),char(109),char(95),char(99),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(115),char(0),char(42),char(102),char(105),char(114),char(115),char(116),char(0),char(42),char(108),char(97),char(115),
+char(116),char(0),char(109),char(95),char(102),char(108),char(111),char(97),char(116),char(115),char(91),char(52),char(93),char(0),char(109),char(95),char(101),char(108),char(91),char(51),
+char(93),char(0),char(109),char(95),char(98),char(97),char(115),char(105),char(115),char(0),char(109),char(95),char(111),char(114),char(105),char(103),char(105),char(110),char(0),char(109),
+char(95),char(114),char(111),char(111),char(116),char(78),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(115),char(117),char(98),
+char(116),char(114),char(101),char(101),char(83),char(105),char(122),char(101),char(0),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),
+char(65),char(97),char(98),char(98),char(77),char(105),char(110),char(91),char(51),char(93),char(0),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),
+char(101),char(100),char(65),char(97),char(98),char(98),char(77),char(97),char(120),char(91),char(51),char(93),char(0),char(109),char(95),char(97),char(97),char(98),char(98),char(77),
+char(105),char(110),char(79),char(114),char(103),char(0),char(109),char(95),char(97),char(97),char(98),char(98),char(77),char(97),char(120),char(79),char(114),char(103),char(0),char(109),
+char(95),char(101),char(115),char(99),char(97),char(112),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(115),char(117),char(98),char(80),char(97),
+char(114),char(116),char(0),char(109),char(95),char(116),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),
+char(95),char(112),char(97),char(100),char(91),char(52),char(93),char(0),char(109),char(95),char(101),char(115),char(99),char(97),char(112),char(101),char(73),char(110),char(100),char(101),
+char(120),char(79),char(114),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(98),
+char(118),char(104),char(65),char(97),char(98),char(98),char(77),char(105),char(110),char(0),char(109),char(95),char(98),char(118),char(104),char(65),char(97),char(98),char(98),char(77),
+char(97),char(120),char(0),char(109),char(95),char(98),char(118),char(104),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(97),char(116),char(105),char(111),char(110),
+char(0),char(109),char(95),char(99),char(117),char(114),char(78),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(117),char(115),
+char(101),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(97),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(110),char(117),char(109),char(67),
+char(111),char(110),char(116),char(105),char(103),char(117),char(111),char(117),char(115),char(76),char(101),char(97),char(102),char(78),char(111),char(100),char(101),char(115),char(0),char(109),
+char(95),char(110),char(117),char(109),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(67),char(111),char(110),char(116),char(105),char(103),char(117),
+char(111),char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(0),char(42),char(109),char(95),char(99),char(111),char(110),char(116),char(105),char(103),char(117),char(111),
+char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),
+char(122),char(101),char(100),char(67),char(111),char(110),char(116),char(105),char(103),char(117),char(111),char(117),char(115),char(78),char(111),char(100),char(101),char(115),char(80),char(116),
+char(114),char(0),char(42),char(109),char(95),char(115),char(117),char(98),char(84),char(114),char(101),char(101),char(73),char(110),char(102),char(111),char(80),char(116),char(114),char(0),
+char(109),char(95),char(116),char(114),char(97),char(118),char(101),char(114),char(115),char(97),char(108),char(77),char(111),char(100),char(101),char(0),char(109),char(95),char(110),char(117),
+char(109),char(83),char(117),char(98),char(116),char(114),char(101),char(101),char(72),char(101),char(97),char(100),char(101),char(114),char(115),char(0),char(42),char(109),char(95),char(110),
+char(97),char(109),char(101),char(0),char(109),char(95),char(115),char(104),char(97),char(112),char(101),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(112),char(97),
+char(100),char(100),char(105),char(110),char(103),char(91),char(52),char(93),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),
+char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(108),char(111),char(99),char(97),char(108),char(83),char(99),char(97),
+char(108),char(105),char(110),char(103),char(0),char(109),char(95),char(112),char(108),char(97),char(110),char(101),char(78),char(111),char(114),char(109),char(97),char(108),char(0),char(109),
+char(95),char(112),char(108),char(97),char(110),char(101),char(67),char(111),char(110),char(115),char(116),char(97),char(110),char(116),char(0),char(109),char(95),char(105),char(109),char(112),
+char(108),char(105),char(99),char(105),char(116),char(83),char(104),char(97),char(112),char(101),char(68),char(105),char(109),char(101),char(110),char(115),char(105),char(111),char(110),char(115),
+char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(77),char(97),char(114),char(103),char(105),char(110),char(0),char(109),
+char(95),char(112),char(97),char(100),char(100),char(105),char(110),char(103),char(0),char(109),char(95),char(112),char(111),char(115),char(0),char(109),char(95),char(114),char(97),char(100),
+char(105),char(117),char(115),char(0),char(109),char(95),char(99),char(111),char(110),char(118),char(101),char(120),char(73),char(110),char(116),char(101),char(114),char(110),char(97),char(108),
+char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(42),char(109),char(95),char(108),char(111),char(99),char(97),char(108),char(80),char(111),
+char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(109),char(95),char(108),char(111),char(99),
+char(97),char(108),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(114),char(114),char(97),char(121),char(83),char(105),char(122),char(101),char(0),
+char(109),char(95),char(118),char(97),char(108),char(117),char(101),char(0),char(109),char(95),char(112),char(97),char(100),char(91),char(50),char(93),char(0),char(109),char(95),char(118),
+char(97),char(108),char(117),char(101),char(115),char(91),char(51),char(93),char(0),char(109),char(95),char(112),char(97),char(100),char(0),char(42),char(109),char(95),char(118),char(101),
+char(114),char(116),char(105),char(99),char(101),char(115),char(51),char(102),char(0),char(42),char(109),char(95),char(118),char(101),char(114),char(116),char(105),char(99),char(101),char(115),
+char(51),char(100),char(0),char(42),char(109),char(95),char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(51),char(50),char(0),char(42),char(109),char(95),char(51),
+char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(49),char(54),char(0),char(42),char(109),char(95),char(51),char(105),char(110),char(100),char(105),char(99),char(101),
+char(115),char(56),char(0),char(42),char(109),char(95),char(105),char(110),char(100),char(105),char(99),char(101),char(115),char(49),char(54),char(0),char(109),char(95),char(110),char(117),
+char(109),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(86),char(101),char(114),char(116),
+char(105),char(99),char(101),char(115),char(0),char(42),char(109),char(95),char(109),char(101),char(115),char(104),char(80),char(97),char(114),char(116),char(115),char(80),char(116),char(114),
+char(0),char(109),char(95),char(115),char(99),char(97),char(108),char(105),char(110),char(103),char(0),char(109),char(95),char(110),char(117),char(109),char(77),char(101),char(115),char(104),
+char(80),char(97),char(114),char(116),char(115),char(0),char(109),char(95),char(109),char(101),char(115),char(104),char(73),char(110),char(116),char(101),char(114),char(102),char(97),char(99),
+char(101),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(70),char(108),char(111),char(97),char(116),char(66),
+char(118),char(104),char(0),char(42),char(109),char(95),char(113),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(68),char(111),char(117),char(98),char(108),
+char(101),char(66),char(118),char(104),char(0),char(42),char(109),char(95),char(116),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(102),char(111),
+char(77),char(97),char(112),char(0),char(109),char(95),char(112),char(97),char(100),char(51),char(91),char(52),char(93),char(0),char(109),char(95),char(116),char(114),char(105),char(109),
+char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(116),char(114),char(97),char(110),char(115),
+char(102),char(111),char(114),char(109),char(0),char(42),char(109),char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),char(97),char(112),char(101),char(0),char(109),
+char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),char(97),char(112),char(101),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(99),char(104),
+char(105),char(108),char(100),char(77),char(97),char(114),char(103),char(105),char(110),char(0),char(42),char(109),char(95),char(99),char(104),char(105),char(108),char(100),char(83),char(104),
+char(97),char(112),char(101),char(80),char(116),char(114),char(0),char(109),char(95),char(110),char(117),char(109),char(67),char(104),char(105),char(108),char(100),char(83),char(104),char(97),
+char(112),char(101),char(115),char(0),char(109),char(95),char(117),char(112),char(65),char(120),char(105),char(115),char(0),char(109),char(95),char(102),char(108),char(97),char(103),char(115),
+char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(86),char(48),char(86),char(49),char(65),char(110),char(103),char(108),char(101),char(0),char(109),char(95),char(101),
+char(100),char(103),char(101),char(86),char(49),char(86),char(50),char(65),char(110),char(103),char(108),char(101),char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(86),
+char(50),char(86),char(48),char(65),char(110),char(103),char(108),char(101),char(0),char(42),char(109),char(95),char(104),char(97),char(115),char(104),char(84),char(97),char(98),char(108),
+char(101),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(110),char(101),char(120),char(116),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(118),
+char(97),char(108),char(117),char(101),char(65),char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(42),char(109),char(95),char(107),char(101),char(121),char(65),
+char(114),char(114),char(97),char(121),char(80),char(116),char(114),char(0),char(109),char(95),char(99),char(111),char(110),char(118),char(101),char(120),char(69),char(112),char(115),char(105),
+char(108),char(111),char(110),char(0),char(109),char(95),char(112),char(108),char(97),char(110),char(97),char(114),char(69),char(112),char(115),char(105),char(108),char(111),char(110),char(0),
+char(109),char(95),char(101),char(113),char(117),char(97),char(108),char(86),char(101),char(114),char(116),char(101),char(120),char(84),char(104),char(114),char(101),char(115),char(104),char(111),
+char(108),char(100),char(0),char(109),char(95),char(101),char(100),char(103),char(101),char(68),char(105),char(115),char(116),char(97),char(110),char(99),char(101),char(84),char(104),char(114),
+char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(122),char(101),char(114),char(111),char(65),char(114),char(101),char(97),char(84),char(104),char(114),
+char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(110),char(101),char(120),char(116),char(83),char(105),char(122),char(101),char(0),char(109),char(95),
+char(104),char(97),char(115),char(104),char(84),char(97),char(98),char(108),char(101),char(83),char(105),char(122),char(101),char(0),char(109),char(95),char(110),char(117),char(109),char(86),
+char(97),char(108),char(117),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(75),char(101),char(121),char(115),char(0),char(109),char(95),char(103),char(105),
+char(109),char(112),char(97),char(99),char(116),char(83),char(117),char(98),char(84),char(121),char(112),char(101),char(0),char(42),char(109),char(95),char(117),char(110),char(115),char(99),
+char(97),char(108),char(101),char(100),char(80),char(111),char(105),char(110),char(116),char(115),char(70),char(108),char(111),char(97),char(116),char(80),char(116),char(114),char(0),char(42),
+char(109),char(95),char(117),char(110),char(115),char(99),char(97),char(108),char(101),char(100),char(80),char(111),char(105),char(110),char(116),char(115),char(68),char(111),char(117),char(98),
+char(108),char(101),char(80),char(116),char(114),char(0),char(109),char(95),char(110),char(117),char(109),char(85),char(110),char(115),char(99),char(97),char(108),char(101),char(100),char(80),
+char(111),char(105),char(110),char(116),char(115),char(0),char(109),char(95),char(112),char(97),char(100),char(100),char(105),char(110),char(103),char(51),char(91),char(52),char(93),char(0),
+char(42),char(109),char(95),char(98),char(114),char(111),char(97),char(100),char(112),char(104),char(97),char(115),char(101),char(72),char(97),char(110),char(100),char(108),char(101),char(0),
+char(42),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(0),char(42),char(109),
+char(95),char(114),char(111),char(111),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(83),char(104),char(97),char(112),char(101),char(0),
+char(109),char(95),char(119),char(111),char(114),char(108),char(100),char(84),char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(105),
+char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),char(111),char(110),char(87),char(111),char(114),char(108),char(100),char(84),char(114),char(97),
+char(110),char(115),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(105),char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),
+char(111),char(110),char(76),char(105),char(110),char(101),char(97),char(114),char(86),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(105),
+char(110),char(116),char(101),char(114),char(112),char(111),char(108),char(97),char(116),char(105),char(111),char(110),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(86),
+char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(105),char(115),char(111),char(116),char(114),char(111),char(112),char(105),
+char(99),char(70),char(114),char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(111),char(110),char(116),char(97),char(99),char(116),char(80),
+char(114),char(111),char(99),char(101),char(115),char(115),char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(100),char(101),char(97),char(99),char(116),char(105),char(118),char(97),char(116),char(105),char(111),char(110),char(84),char(105),char(109),char(101),char(0),char(109),char(95),
+char(102),char(114),char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(114),char(111),char(108),char(108),char(105),char(110),char(103),char(70),char(114),
+char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(105),char(116),char(117),char(116),char(105),char(111),char(110),
+char(0),char(109),char(95),char(104),char(105),char(116),char(70),char(114),char(97),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(99),char(100),
+char(83),char(119),char(101),char(112),char(116),char(83),char(112),char(104),char(101),char(114),char(101),char(82),char(97),char(100),char(105),char(117),char(115),char(0),char(109),char(95),
+char(99),char(99),char(100),char(77),char(111),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(104),char(97),char(115),char(65),char(110),char(105),char(115),char(111),char(116),char(114),char(111),char(112),char(105),char(99),char(70),char(114),char(105),char(99),char(116),
+char(105),char(111),char(110),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(70),char(108),char(97),char(103),char(115),
+char(0),char(109),char(95),char(105),char(115),char(108),char(97),char(110),char(100),char(84),char(97),char(103),char(49),char(0),char(109),char(95),char(99),char(111),char(109),char(112),
+char(97),char(110),char(105),char(111),char(110),char(73),char(100),char(0),char(109),char(95),char(97),char(99),char(116),char(105),char(118),char(97),char(116),char(105),char(111),char(110),
+char(83),char(116),char(97),char(116),char(101),char(49),char(0),char(109),char(95),char(105),char(110),char(116),char(101),char(114),char(110),char(97),char(108),char(84),char(121),char(112),
+char(101),char(0),char(109),char(95),char(99),char(104),char(101),char(99),char(107),char(67),char(111),char(108),char(108),char(105),char(100),char(101),char(87),char(105),char(116),char(104),
+char(0),char(109),char(95),char(115),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(0),char(109),char(95),char(103),char(114),char(97),char(118),
+char(105),char(116),char(121),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),
+char(116),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(105),char(110),char(118),char(73),char(110),char(101),char(114),char(116),char(105),char(97),char(84),char(101),
+char(110),char(115),char(111),char(114),char(87),char(111),char(114),char(108),char(100),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(86),char(101),
+char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(86),char(101),char(108),char(111),
+char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(70),char(97),char(99),char(116),char(111),char(114),
+char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(103),char(114),
+char(97),char(118),char(105),char(116),char(121),char(95),char(97),char(99),char(99),char(101),char(108),char(101),char(114),char(97),char(116),char(105),char(111),char(110),char(0),char(109),
+char(95),char(105),char(110),char(118),char(73),char(110),char(101),char(114),char(116),char(105),char(97),char(76),char(111),char(99),char(97),char(108),char(0),char(109),char(95),char(116),
+char(111),char(116),char(97),char(108),char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(116),char(111),char(116),char(97),char(108),char(84),char(111),char(114),
+char(113),char(117),char(101),char(0),char(109),char(95),char(105),char(110),char(118),char(101),char(114),char(115),char(101),char(77),char(97),char(115),char(115),char(0),char(109),char(95),
+char(108),char(105),char(110),char(101),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(110),char(103),char(117),
+char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(100),char(100),char(105),char(116),char(105),char(111),
+char(110),char(97),char(108),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(97),
+char(100),char(100),char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(76),char(105),char(110),char(101),char(97),char(114),char(68),char(97),char(109),char(112),char(105),
+char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(83),char(113),char(114),char(0),char(109),char(95),char(97),char(100),char(100),
+char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),
+char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(83),char(113),char(114),char(0),char(109),char(95),char(97),char(100),char(100),char(105),
+char(116),char(105),char(111),char(110),char(97),char(108),char(65),char(110),char(103),char(117),char(108),char(97),char(114),char(68),char(97),char(109),char(112),char(105),char(110),char(103),
+char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(108),char(101),char(101),char(112),
+char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),
+char(97),char(114),char(83),char(108),char(101),char(101),char(112),char(105),char(110),char(103),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),
+char(109),char(95),char(97),char(100),char(100),char(105),char(116),char(105),char(111),char(110),char(97),char(108),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(0),
+char(109),char(95),char(110),char(117),char(109),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(82),char(111),char(119),char(115),char(0),
+char(110),char(117),char(98),char(0),char(42),char(109),char(95),char(114),char(98),char(65),char(0),char(42),char(109),char(95),char(114),char(98),char(66),char(0),char(109),char(95),
+char(111),char(98),char(106),char(101),char(99),char(116),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(114),char(67),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(84),char(121),char(112),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(114),char(67),char(111),
+char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(73),char(100),char(0),char(109),char(95),char(110),char(101),char(101),char(100),char(115),char(70),char(101),
+char(101),char(100),char(98),char(97),char(99),char(107),char(0),char(109),char(95),char(97),char(112),char(112),char(108),char(105),char(101),char(100),char(73),char(109),char(112),char(117),
+char(108),char(115),char(101),char(0),char(109),char(95),char(100),char(98),char(103),char(68),char(114),char(97),char(119),char(83),char(105),char(122),char(101),char(0),char(109),char(95),
+char(100),char(105),char(115),char(97),char(98),char(108),char(101),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(115),char(66),char(101),char(116),
+char(119),char(101),char(101),char(110),char(76),char(105),char(110),char(107),char(101),char(100),char(66),char(111),char(100),char(105),char(101),char(115),char(0),char(109),char(95),char(111),
+char(118),char(101),char(114),char(114),char(105),char(100),char(101),char(78),char(117),char(109),char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(116),char(101),char(114),
+char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(98),char(114),char(101),char(97),char(107),char(105),char(110),char(103),char(73),char(109),char(112),
+char(117),char(108),char(115),char(101),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(105),char(115),char(69),char(110),
+char(97),char(98),char(108),char(101),char(100),char(0),char(109),char(95),char(116),char(121),char(112),char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(112),char(105),char(118),char(111),char(116),char(73),char(110),char(65),char(0),char(109),char(95),
+char(112),char(105),char(118),char(111),char(116),char(73),char(110),char(66),char(0),char(109),char(95),char(114),char(98),char(65),char(70),char(114),char(97),char(109),char(101),char(0),
+char(109),char(95),char(114),char(98),char(66),char(70),char(114),char(97),char(109),char(101),char(0),char(109),char(95),char(117),char(115),char(101),char(82),char(101),char(102),char(101),
+char(114),char(101),char(110),char(99),char(101),char(70),char(114),char(97),char(109),char(101),char(65),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),
+char(114),char(79),char(110),char(108),char(121),char(0),char(109),char(95),char(101),char(110),char(97),char(98),char(108),char(101),char(65),char(110),char(103),char(117),char(108),char(97),
+char(114),char(77),char(111),char(116),char(111),char(114),char(0),char(109),char(95),char(109),char(111),char(116),char(111),char(114),char(84),char(97),char(114),char(103),char(101),char(116),
+char(86),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(109),char(97),char(120),char(77),char(111),char(116),char(111),char(114),char(73),
+char(109),char(112),char(117),char(108),char(115),char(101),char(0),char(109),char(95),char(108),char(111),char(119),char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),
+char(109),char(95),char(117),char(112),char(112),char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(108),char(105),char(109),char(105),char(116),
+char(83),char(111),char(102),char(116),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(98),char(105),char(97),char(115),char(70),char(97),char(99),char(116),char(111),
+char(114),char(0),char(109),char(95),char(114),char(101),char(108),char(97),char(120),char(97),char(116),char(105),char(111),char(110),char(70),char(97),char(99),char(116),char(111),char(114),
+char(0),char(109),char(95),char(115),char(119),char(105),char(110),char(103),char(83),char(112),char(97),char(110),char(49),char(0),char(109),char(95),char(115),char(119),char(105),char(110),
+char(103),char(83),char(112),char(97),char(110),char(50),char(0),char(109),char(95),char(116),char(119),char(105),char(115),char(116),char(83),char(112),char(97),char(110),char(0),char(109),
+char(95),char(100),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(85),char(112),char(112),
+char(101),char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(76),char(111),char(119),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(85),char(112),char(112),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(97),char(110),char(103),char(117),char(108),char(97),char(114),char(76),char(111),char(119),char(101),
+char(114),char(76),char(105),char(109),char(105),char(116),char(0),char(109),char(95),char(117),char(115),char(101),char(76),char(105),char(110),char(101),char(97),char(114),char(82),char(101),
+char(102),char(101),char(114),char(101),char(110),char(99),char(101),char(70),char(114),char(97),char(109),char(101),char(65),char(0),char(109),char(95),char(117),char(115),char(101),char(79),
+char(102),char(102),char(115),char(101),char(116),char(70),char(111),char(114),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(70),char(114),
+char(97),char(109),char(101),char(0),char(109),char(95),char(54),char(100),char(111),char(102),char(68),char(97),char(116),char(97),char(0),char(109),char(95),char(115),char(112),char(114),
+char(105),char(110),char(103),char(69),char(110),char(97),char(98),char(108),char(101),char(100),char(91),char(54),char(93),char(0),char(109),char(95),char(101),char(113),char(117),char(105),
+char(108),char(105),char(98),char(114),char(105),char(117),char(109),char(80),char(111),char(105),char(110),char(116),char(91),char(54),char(93),char(0),char(109),char(95),char(115),char(112),
+char(114),char(105),char(110),char(103),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(91),char(54),char(93),char(0),char(109),char(95),char(115),
+char(112),char(114),char(105),char(110),char(103),char(68),char(97),char(109),char(112),char(105),char(110),char(103),char(91),char(54),char(93),char(0),char(109),char(95),char(116),char(97),
+char(117),char(0),char(109),char(95),char(116),char(105),char(109),char(101),char(83),char(116),char(101),char(112),char(0),char(109),char(95),char(109),char(97),char(120),char(69),char(114),
+char(114),char(111),char(114),char(82),char(101),char(100),char(117),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(115),char(111),char(114),char(0),char(109),
+char(95),char(101),char(114),char(112),char(0),char(109),char(95),char(101),char(114),char(112),char(50),char(0),char(109),char(95),char(103),char(108),char(111),char(98),char(97),char(108),
+char(67),char(102),char(109),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(80),char(101),
+char(110),char(101),char(116),char(114),char(97),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(84),char(117),char(114),char(110),char(69),char(114),char(112),
+char(0),char(109),char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(108),char(111),char(112),char(0),char(109),char(95),char(119),char(97),char(114),char(109),
+char(115),char(116),char(97),char(114),char(116),char(105),char(110),char(103),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(109),char(97),char(120),
+char(71),char(121),char(114),char(111),char(115),char(99),char(111),char(112),char(105),char(99),char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(115),char(105),
+char(110),char(103),char(108),char(101),char(65),char(120),char(105),char(115),char(82),char(111),char(108),char(108),char(105),char(110),char(103),char(70),char(114),char(105),char(99),char(116),
+char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),char(95),char(110),char(117),char(109),char(73),char(116),
+char(101),char(114),char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(115),char(111),char(108),char(118),char(101),char(114),char(77),char(111),char(100),
+char(101),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(105),char(110),char(103),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(82),char(101),
+char(115),char(116),char(105),char(116),char(117),char(116),char(105),char(111),char(110),char(84),char(104),char(114),char(101),char(115),char(104),char(111),char(108),char(100),char(0),char(109),
+char(95),char(109),char(105),char(110),char(105),char(109),char(117),char(109),char(83),char(111),char(108),char(118),char(101),char(114),char(66),char(97),char(116),char(99),char(104),char(83),
+char(105),char(122),char(101),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(0),char(109),
+char(95),char(108),char(105),char(110),char(101),char(97),char(114),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(97),
+char(110),char(103),char(117),char(108),char(97),char(114),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(118),char(111),
+char(108),char(117),char(109),char(101),char(83),char(116),char(105),char(102),char(102),char(110),char(101),char(115),char(115),char(0),char(42),char(109),char(95),char(109),char(97),char(116),
+char(101),char(114),char(105),char(97),char(108),char(0),char(109),char(95),char(112),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(112),
+char(114),char(101),char(118),char(105),char(111),char(117),char(115),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(118),char(101),
+char(108),char(111),char(99),char(105),char(116),char(121),char(0),char(109),char(95),char(97),char(99),char(99),char(117),char(109),char(117),char(108),char(97),char(116),char(101),char(100),
+char(70),char(111),char(114),char(99),char(101),char(0),char(109),char(95),char(110),char(111),char(114),char(109),char(97),char(108),char(0),char(109),char(95),char(97),char(114),char(101),
+char(97),char(0),char(109),char(95),char(97),char(116),char(116),char(97),char(99),char(104),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),
+char(105),char(99),char(101),char(115),char(91),char(50),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(76),char(101),char(110),char(103),char(116),char(104),
+char(0),char(109),char(95),char(98),char(98),char(101),char(110),char(100),char(105),char(110),char(103),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),
+char(100),char(105),char(99),char(101),char(115),char(91),char(51),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(65),char(114),char(101),char(97),char(0),
+char(109),char(95),char(99),char(48),char(91),char(52),char(93),char(0),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(105),char(99),char(101),
+char(115),char(91),char(52),char(93),char(0),char(109),char(95),char(114),char(101),char(115),char(116),char(86),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),
+char(99),char(49),char(0),char(109),char(95),char(99),char(50),char(0),char(109),char(95),char(99),char(48),char(0),char(109),char(95),char(108),char(111),char(99),char(97),char(108),
+char(70),char(114),char(97),char(109),char(101),char(0),char(42),char(109),char(95),char(114),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(0),char(109),
+char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(101),char(120),char(0),char(109),char(95),char(97),char(101),char(114),char(111),char(77),char(111),char(100),
+char(101),char(108),char(0),char(109),char(95),char(98),char(97),char(117),char(109),char(103),char(97),char(114),char(116),char(101),char(0),char(109),char(95),char(100),char(114),char(97),
+char(103),char(0),char(109),char(95),char(108),char(105),char(102),char(116),char(0),char(109),char(95),char(112),char(114),char(101),char(115),char(115),char(117),char(114),char(101),char(0),
+char(109),char(95),char(118),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(100),char(121),char(110),char(97),char(109),char(105),char(99),char(70),char(114),
+char(105),char(99),char(116),char(105),char(111),char(110),char(0),char(109),char(95),char(112),char(111),char(115),char(101),char(77),char(97),char(116),char(99),char(104),char(0),char(109),
+char(95),char(114),char(105),char(103),char(105),char(100),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(72),char(97),char(114),char(100),char(110),char(101),char(115),
+char(115),char(0),char(109),char(95),char(107),char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(111),char(110),char(116),char(97),char(99),char(116),char(72),char(97),
+char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(67),char(111),char(110),char(116),char(97),char(99),char(116),
+char(72),char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(97),char(110),char(99),char(104),char(111),char(114),char(72),char(97),char(114),
+char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(67),char(108),char(117),
+char(115),char(116),char(101),char(114),char(72),char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(75),
+char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(72),char(97),char(114),char(100),char(110),char(101),char(115),
+char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(83),char(111),char(102),char(116),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(72),
+char(97),char(114),char(100),char(110),char(101),char(115),char(115),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(67),
+char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),char(116),char(0),char(109),
+char(95),char(115),char(111),char(102),char(116),char(75),char(105),char(110),char(101),char(116),char(105),char(99),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(73),
+char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),char(116),char(0),char(109),char(95),char(115),char(111),char(102),char(116),char(83),char(111),
+char(102),char(116),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(83),char(112),char(108),char(105),
+char(116),char(0),char(109),char(95),char(109),char(97),char(120),char(86),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(116),char(105),char(109),char(101),
+char(83),char(99),char(97),char(108),char(101),char(0),char(109),char(95),char(118),char(101),char(108),char(111),char(99),char(105),char(116),char(121),char(73),char(116),char(101),char(114),
+char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(112),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(73),char(116),char(101),
+char(114),char(97),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(100),char(114),char(105),char(102),char(116),char(73),char(116),char(101),char(114),char(97),
+char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(99),char(108),char(117),char(115),char(116),char(101),char(114),char(73),char(116),char(101),char(114),char(97),
+char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(114),char(111),char(116),char(0),char(109),char(95),char(115),char(99),char(97),char(108),char(101),char(0),
+char(109),char(95),char(97),char(113),char(113),char(0),char(109),char(95),char(99),char(111),char(109),char(0),char(42),char(109),char(95),char(112),char(111),char(115),char(105),char(116),
+char(105),char(111),char(110),char(115),char(0),char(42),char(109),char(95),char(119),char(101),char(105),char(103),char(104),char(116),char(115),char(0),char(109),char(95),char(110),char(117),
+char(109),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(87),char(101),char(105),char(103),
+char(116),char(115),char(0),char(109),char(95),char(98),char(118),char(111),char(108),char(117),char(109),char(101),char(0),char(109),char(95),char(98),char(102),char(114),char(97),char(109),
+char(101),char(0),char(109),char(95),char(102),char(114),char(97),char(109),char(101),char(120),char(102),char(111),char(114),char(109),char(0),char(109),char(95),char(108),char(111),char(99),
+char(105),char(105),char(0),char(109),char(95),char(105),char(110),char(118),char(119),char(105),char(0),char(109),char(95),char(118),char(105),char(109),char(112),char(117),char(108),char(115),
+char(101),char(115),char(91),char(50),char(93),char(0),char(109),char(95),char(100),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(91),char(50),char(93),
+char(0),char(109),char(95),char(108),char(118),char(0),char(109),char(95),char(97),char(118),char(0),char(42),char(109),char(95),char(102),char(114),char(97),char(109),char(101),char(114),
+char(101),char(102),char(115),char(0),char(42),char(109),char(95),char(110),char(111),char(100),char(101),char(73),char(110),char(100),char(105),char(99),char(101),char(115),char(0),char(42),
+char(109),char(95),char(109),char(97),char(115),char(115),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(70),char(114),char(97),char(109),char(101),char(82),
+char(101),char(102),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(78),char(111),char(100),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),
+char(77),char(97),char(115),char(115),char(101),char(115),char(0),char(109),char(95),char(105),char(100),char(109),char(97),char(115),char(115),char(0),char(109),char(95),char(105),char(109),
+char(97),char(115),char(115),char(0),char(109),char(95),char(110),char(118),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(0),char(109),char(95),char(110),
+char(100),char(105),char(109),char(112),char(117),char(108),char(115),char(101),char(115),char(0),char(109),char(95),char(110),char(100),char(97),char(109),char(112),char(105),char(110),char(103),
+char(0),char(109),char(95),char(108),char(100),char(97),char(109),char(112),char(105),char(110),char(103),char(0),char(109),char(95),char(97),char(100),char(97),char(109),char(112),char(105),
+char(110),char(103),char(0),char(109),char(95),char(109),char(97),char(116),char(99),char(104),char(105),char(110),char(103),char(0),char(109),char(95),char(109),char(97),char(120),char(83),
+char(101),char(108),char(102),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(73),char(109),char(112),char(117),char(108),char(115),char(101),char(0),
+char(109),char(95),char(115),char(101),char(108),char(102),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(73),char(109),char(112),char(117),char(108),
+char(115),char(101),char(70),char(97),char(99),char(116),char(111),char(114),char(0),char(109),char(95),char(99),char(111),char(110),char(116),char(97),char(105),char(110),char(115),char(65),
+char(110),char(99),char(104),char(111),char(114),char(0),char(109),char(95),char(99),char(111),char(108),char(108),char(105),char(100),char(101),char(0),char(109),char(95),char(99),char(108),
+char(117),char(115),char(116),char(101),char(114),char(73),char(110),char(100),char(101),char(120),char(0),char(42),char(109),char(95),char(98),char(111),char(100),char(121),char(65),char(0),
+char(42),char(109),char(95),char(98),char(111),char(100),char(121),char(66),char(0),char(109),char(95),char(114),char(101),char(102),char(115),char(91),char(50),char(93),char(0),char(109),
+char(95),char(99),char(102),char(109),char(0),char(109),char(95),char(115),char(112),char(108),char(105),char(116),char(0),char(109),char(95),char(100),char(101),char(108),char(101),char(116),
+char(101),char(0),char(109),char(95),char(114),char(101),char(108),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(91),char(50),char(93),char(0),char(109),
+char(95),char(98),char(111),char(100),char(121),char(65),char(116),char(121),char(112),char(101),char(0),char(109),char(95),char(98),char(111),char(100),char(121),char(66),char(116),char(121),
+char(112),char(101),char(0),char(109),char(95),char(106),char(111),char(105),char(110),char(116),char(84),char(121),char(112),char(101),char(0),char(42),char(109),char(95),char(112),char(111),
+char(115),char(101),char(0),char(42),char(42),char(109),char(95),char(109),char(97),char(116),char(101),char(114),char(105),char(97),char(108),char(115),char(0),char(42),char(109),char(95),
+char(110),char(111),char(100),char(101),char(115),char(0),char(42),char(109),char(95),char(108),char(105),char(110),char(107),char(115),char(0),char(42),char(109),char(95),char(102),char(97),
+char(99),char(101),char(115),char(0),char(42),char(109),char(95),char(116),char(101),char(116),char(114),char(97),char(104),char(101),char(100),char(114),char(97),char(0),char(42),char(109),
+char(95),char(97),char(110),char(99),char(104),char(111),char(114),char(115),char(0),char(42),char(109),char(95),char(99),char(108),char(117),char(115),char(116),char(101),char(114),char(115),
+char(0),char(42),char(109),char(95),char(106),char(111),char(105),char(110),char(116),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(77),char(97),char(116),char(101),
+char(114),char(105),char(97),char(108),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(76),char(105),char(110),char(107),char(115),char(0),char(109),char(95),char(110),
+char(117),char(109),char(70),char(97),char(99),char(101),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(84),char(101),char(116),char(114),char(97),char(104),char(101),
+char(100),char(114),char(97),char(0),char(109),char(95),char(110),char(117),char(109),char(65),char(110),char(99),char(104),char(111),char(114),char(115),char(0),char(109),char(95),char(110),
+char(117),char(109),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(115),char(0),char(109),char(95),char(110),char(117),char(109),char(74),char(111),char(105),char(110),
+char(116),char(115),char(0),char(109),char(95),char(99),char(111),char(110),char(102),char(105),char(103),char(0),char(84),char(89),char(80),char(69),char(76),char(0),char(0),char(0),
+char(99),char(104),char(97),char(114),char(0),char(117),char(99),char(104),char(97),char(114),char(0),char(115),char(104),char(111),char(114),char(116),char(0),char(117),char(115),char(104),
+char(111),char(114),char(116),char(0),char(105),char(110),char(116),char(0),char(108),char(111),char(110),char(103),char(0),char(117),char(108),char(111),char(110),char(103),char(0),char(102),
+char(108),char(111),char(97),char(116),char(0),char(100),char(111),char(117),char(98),char(108),char(101),char(0),char(118),char(111),char(105),char(100),char(0),char(80),char(111),char(105),
+char(110),char(116),char(101),char(114),char(65),char(114),char(114),char(97),char(121),char(0),char(98),char(116),char(80),char(104),char(121),char(115),char(105),char(99),char(115),char(83),
+char(121),char(115),char(116),char(101),char(109),char(0),char(76),char(105),char(115),char(116),char(66),char(97),char(115),char(101),char(0),char(98),char(116),char(86),char(101),char(99),
+char(116),char(111),char(114),char(51),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(86),char(101),char(99),char(116),
+char(111),char(114),char(51),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(97),char(116),char(114),
+char(105),char(120),char(51),char(120),char(51),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(97),char(116),
+char(114),char(105),char(120),char(51),char(120),char(51),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),
+char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(84),char(114),char(97),char(110),char(115),char(102),char(111),char(114),char(109),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(66),char(118),char(104),char(83),char(117),char(98),char(116),char(114),char(101),char(101),char(73),char(110),char(102),char(111),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(79),char(112),char(116),char(105),char(109),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(70),
+char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(79),char(112),char(116),char(105),char(109),char(105),char(122),char(101),char(100),
+char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(78),char(111),char(100),char(101),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(70),char(108),char(111),char(97),char(116),
+char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(81),char(117),char(97),char(110),char(116),char(105),char(122),char(101),char(100),char(66),char(118),char(104),char(68),
+char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),
+char(110),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),char(116),char(97),char(116),char(105),char(99),char(80),
+char(108),char(97),char(110),char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(118),
+char(101),char(120),char(73),char(110),char(116),char(101),char(114),char(110),char(97),char(108),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(80),char(111),char(115),char(105),char(116),char(105),char(111),char(110),char(65),char(110),char(100),char(82),char(97),char(100),char(105),char(117),char(115),char(0),
+char(98),char(116),char(77),char(117),char(108),char(116),char(105),char(83),char(112),char(104),char(101),char(114),char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(104),char(111),char(114),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(104),char(111),char(114),char(116),char(73),char(110),char(116),char(73),char(110),char(100),char(101),char(120),char(84),char(114),char(105),char(112),char(108),char(101),char(116),
+char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(104),char(97),char(114),char(73),char(110),char(100),char(101),char(120),char(84),char(114),char(105),char(112),
+char(108),char(101),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(77),char(101),char(115),char(104),char(80),char(97),char(114),char(116),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(83),char(116),char(114),char(105),char(100),char(105),char(110),char(103),char(77),char(101),char(115),char(104),char(73),char(110),char(116),
+char(101),char(114),char(102),char(97),char(99),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),char(97),char(110),char(103),char(108),
+char(101),char(77),char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),
+char(97),char(110),char(103),char(108),char(101),char(73),char(110),char(102),char(111),char(77),char(97),char(112),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),
+char(99),char(97),char(108),char(101),char(100),char(84),char(114),char(105),char(97),char(110),char(103),char(108),char(101),char(77),char(101),char(115),char(104),char(83),char(104),char(97),
+char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(109),char(112),char(111),char(117),char(110),char(100),char(83),char(104),char(97),
+char(112),char(101),char(67),char(104),char(105),char(108),char(100),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(109),char(112),char(111),char(117),
+char(110),char(100),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(121),char(108),char(105),char(110),char(100),
+char(101),char(114),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(97),char(112),char(115),char(117),char(108),
+char(101),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(84),char(114),char(105),char(97),char(110),char(103),char(108),
+char(101),char(73),char(110),char(102),char(111),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(73),char(109),char(112),char(97),char(99),char(116),char(77),
+char(101),char(115),char(104),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(118),char(101),
+char(120),char(72),char(117),char(108),char(108),char(83),char(104),char(97),char(112),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),
+char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(108),char(108),char(105),char(115),char(105),char(111),char(110),char(79),char(98),char(106),char(101),char(99),char(116),
+char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(68),char(121),char(110),char(97),char(109),char(105),char(99),char(115),
+char(87),char(111),char(114),char(108),char(100),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),
+char(110),char(116),char(97),char(99),char(116),char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(68),char(111),char(117),char(98),char(108),
+char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(68),char(121),char(110),char(97),char(109),char(105),char(99),char(115),char(87),char(111),char(114),char(108),
+char(100),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(116),char(97),char(99),char(116),
+char(83),char(111),char(108),char(118),char(101),char(114),char(73),char(110),char(102),char(111),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),
+char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(73),char(110),char(102),char(111),char(49),
+char(0),char(98),char(116),char(84),char(121),char(112),char(101),char(100),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),
+char(116),char(97),char(0),char(98),char(116),char(82),char(105),char(103),char(105),char(100),char(66),char(111),char(100),char(121),char(68),char(97),char(116),char(97),char(0),char(98),
+char(116),char(80),char(111),char(105),char(110),char(116),char(50),char(80),char(111),char(105),char(110),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(80),char(111),char(105),char(110),char(116),char(50),
+char(80),char(111),char(105),char(110),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(111),char(117),char(98),char(108),
+char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(72),char(105),char(110),char(103),char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),
+char(105),char(110),char(116),char(68),char(111),char(117),char(98),char(108),char(101),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(72),char(105),char(110),char(103),
+char(101),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),
+char(0),char(98),char(116),char(67),char(111),char(110),char(101),char(84),char(119),char(105),char(115),char(116),char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),
+char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(101),char(110),char(101),char(114),char(105),char(99),char(54),char(68),char(111),char(102),
+char(67),char(111),char(110),char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(71),char(101),char(110),
+char(101),char(114),char(105),char(99),char(54),char(68),char(111),char(102),char(83),char(112),char(114),char(105),char(110),char(103),char(67),char(111),char(110),char(115),char(116),char(114),
+char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),char(83),char(108),char(105),char(100),char(101),char(114),char(67),char(111),char(110),
+char(115),char(116),char(114),char(97),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),
+char(77),char(97),char(116),char(101),char(114),char(105),char(97),char(108),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),
+char(121),char(78),char(111),char(100),char(101),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(76),char(105),
+char(110),char(107),char(68),char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(70),char(97),char(99),char(101),char(68),
+char(97),char(116),char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(84),char(101),char(116),char(114),char(97),char(68),char(97),char(116),
+char(97),char(0),char(83),char(111),char(102),char(116),char(82),char(105),char(103),char(105),char(100),char(65),char(110),char(99),char(104),char(111),char(114),char(68),char(97),char(116),
+char(97),char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(67),char(111),char(110),char(102),char(105),char(103),char(68),char(97),char(116),char(97),
+char(0),char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(80),char(111),char(115),char(101),char(68),char(97),char(116),char(97),char(0),char(83),char(111),
+char(102),char(116),char(66),char(111),char(100),char(121),char(67),char(108),char(117),char(115),char(116),char(101),char(114),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(74),char(111),char(105),char(110),char(116),char(68),char(97),char(116),char(97),char(0),char(98),char(116),
+char(83),char(111),char(102),char(116),char(66),char(111),char(100),char(121),char(70),char(108),char(111),char(97),char(116),char(68),char(97),char(116),char(97),char(0),char(0),char(0),
+char(84),char(76),char(69),char(78),char(1),char(0),char(1),char(0),char(2),char(0),char(2),char(0),char(4),char(0),char(4),char(0),char(4),char(0),char(4),char(0),
+char(8),char(0),char(0),char(0),char(16),char(0),char(48),char(0),char(16),char(0),char(16),char(0),char(32),char(0),char(48),char(0),char(96),char(0),char(64),char(0),
+char(-128),char(0),char(20),char(0),char(48),char(0),char(80),char(0),char(16),char(0),char(96),char(0),char(-112),char(0),char(16),char(0),char(56),char(0),char(56),char(0),
+char(20),char(0),char(72),char(0),char(4),char(0),char(4),char(0),char(8),char(0),char(4),char(0),char(56),char(0),char(32),char(0),char(80),char(0),char(72),char(0),
+char(96),char(0),char(80),char(0),char(32),char(0),char(64),char(0),char(64),char(0),char(16),char(0),char(72),char(0),char(80),char(0),char(-32),char(1),char(16),char(1),
+char(-72),char(0),char(-104),char(0),char(104),char(0),char(88),char(0),char(-8),char(1),char(-80),char(3),char(8),char(0),char(64),char(0),char(0),char(0),char(96),char(0),
+char(-128),char(0),char(104),char(1),char(-24),char(0),char(-32),char(0),char(8),char(1),char(104),char(1),char(-40),char(0),char(16),char(0),char(104),char(0),char(24),char(0),
+char(40),char(0),char(104),char(0),char(96),char(0),char(104),char(0),char(-56),char(0),char(104),char(1),char(112),char(0),char(-32),char(1),char(83),char(84),char(82),char(67),
+char(65),char(0),char(0),char(0),char(10),char(0),char(3),char(0),char(4),char(0),char(0),char(0),char(4),char(0),char(1),char(0),char(9),char(0),char(2),char(0),
+char(11),char(0),char(3),char(0),char(10),char(0),char(3),char(0),char(10),char(0),char(4),char(0),char(10),char(0),char(5),char(0),char(12),char(0),char(2),char(0),
+char(9),char(0),char(6),char(0),char(9),char(0),char(7),char(0),char(13),char(0),char(1),char(0),char(7),char(0),char(8),char(0),char(14),char(0),char(1),char(0),
+char(8),char(0),char(8),char(0),char(15),char(0),char(1),char(0),char(13),char(0),char(9),char(0),char(16),char(0),char(1),char(0),char(14),char(0),char(9),char(0),
+char(17),char(0),char(2),char(0),char(15),char(0),char(10),char(0),char(13),char(0),char(11),char(0),char(18),char(0),char(2),char(0),char(16),char(0),char(10),char(0),
+char(14),char(0),char(11),char(0),char(19),char(0),char(4),char(0),char(4),char(0),char(12),char(0),char(4),char(0),char(13),char(0),char(2),char(0),char(14),char(0),
+char(2),char(0),char(15),char(0),char(20),char(0),char(6),char(0),char(13),char(0),char(16),char(0),char(13),char(0),char(17),char(0),char(4),char(0),char(18),char(0),
+char(4),char(0),char(19),char(0),char(4),char(0),char(20),char(0),char(0),char(0),char(21),char(0),char(21),char(0),char(6),char(0),char(14),char(0),char(16),char(0),
+char(14),char(0),char(17),char(0),char(4),char(0),char(18),char(0),char(4),char(0),char(19),char(0),char(4),char(0),char(20),char(0),char(0),char(0),char(21),char(0),
+char(22),char(0),char(3),char(0),char(2),char(0),char(14),char(0),char(2),char(0),char(15),char(0),char(4),char(0),char(22),char(0),char(23),char(0),char(12),char(0),
+char(13),char(0),char(23),char(0),char(13),char(0),char(24),char(0),char(13),char(0),char(25),char(0),char(4),char(0),char(26),char(0),char(4),char(0),char(27),char(0),
+char(4),char(0),char(28),char(0),char(4),char(0),char(29),char(0),char(20),char(0),char(30),char(0),char(22),char(0),char(31),char(0),char(19),char(0),char(32),char(0),
+char(4),char(0),char(33),char(0),char(4),char(0),char(34),char(0),char(24),char(0),char(12),char(0),char(14),char(0),char(23),char(0),char(14),char(0),char(24),char(0),
+char(14),char(0),char(25),char(0),char(4),char(0),char(26),char(0),char(4),char(0),char(27),char(0),char(4),char(0),char(28),char(0),char(4),char(0),char(29),char(0),
+char(21),char(0),char(30),char(0),char(22),char(0),char(31),char(0),char(4),char(0),char(33),char(0),char(4),char(0),char(34),char(0),char(19),char(0),char(32),char(0),
+char(25),char(0),char(3),char(0),char(0),char(0),char(35),char(0),char(4),char(0),char(36),char(0),char(0),char(0),char(37),char(0),char(26),char(0),char(5),char(0),
+char(25),char(0),char(38),char(0),char(13),char(0),char(39),char(0),char(13),char(0),char(40),char(0),char(7),char(0),char(41),char(0),char(0),char(0),char(21),char(0),
+char(27),char(0),char(5),char(0),char(25),char(0),char(38),char(0),char(13),char(0),char(39),char(0),char(13),char(0),char(42),char(0),char(7),char(0),char(43),char(0),
+char(4),char(0),char(44),char(0),char(28),char(0),char(2),char(0),char(13),char(0),char(45),char(0),char(7),char(0),char(46),char(0),char(29),char(0),char(4),char(0),
+char(27),char(0),char(47),char(0),char(28),char(0),char(48),char(0),char(4),char(0),char(49),char(0),char(0),char(0),char(37),char(0),char(30),char(0),char(1),char(0),
+char(4),char(0),char(50),char(0),char(31),char(0),char(2),char(0),char(2),char(0),char(50),char(0),char(0),char(0),char(51),char(0),char(32),char(0),char(2),char(0),
+char(2),char(0),char(52),char(0),char(0),char(0),char(51),char(0),char(33),char(0),char(2),char(0),char(0),char(0),char(52),char(0),char(0),char(0),char(53),char(0),
+char(34),char(0),char(8),char(0),char(13),char(0),char(54),char(0),char(14),char(0),char(55),char(0),char(30),char(0),char(56),char(0),char(32),char(0),char(57),char(0),
+char(33),char(0),char(58),char(0),char(31),char(0),char(59),char(0),char(4),char(0),char(60),char(0),char(4),char(0),char(61),char(0),char(35),char(0),char(4),char(0),
+char(34),char(0),char(62),char(0),char(13),char(0),char(63),char(0),char(4),char(0),char(64),char(0),char(0),char(0),char(37),char(0),char(36),char(0),char(7),char(0),
+char(25),char(0),char(38),char(0),char(35),char(0),char(65),char(0),char(23),char(0),char(66),char(0),char(24),char(0),char(67),char(0),char(37),char(0),char(68),char(0),
+char(7),char(0),char(43),char(0),char(0),char(0),char(69),char(0),char(38),char(0),char(2),char(0),char(36),char(0),char(70),char(0),char(13),char(0),char(39),char(0),
+char(39),char(0),char(4),char(0),char(17),char(0),char(71),char(0),char(25),char(0),char(72),char(0),char(4),char(0),char(73),char(0),char(7),char(0),char(74),char(0),
+char(40),char(0),char(4),char(0),char(25),char(0),char(38),char(0),char(39),char(0),char(75),char(0),char(4),char(0),char(76),char(0),char(7),char(0),char(43),char(0),
+char(41),char(0),char(3),char(0),char(27),char(0),char(47),char(0),char(4),char(0),char(77),char(0),char(0),char(0),char(37),char(0),char(42),char(0),char(3),char(0),
+char(27),char(0),char(47),char(0),char(4),char(0),char(77),char(0),char(0),char(0),char(37),char(0),char(43),char(0),char(4),char(0),char(4),char(0),char(78),char(0),
+char(7),char(0),char(79),char(0),char(7),char(0),char(80),char(0),char(7),char(0),char(81),char(0),char(37),char(0),char(14),char(0),char(4),char(0),char(82),char(0),
+char(4),char(0),char(83),char(0),char(43),char(0),char(84),char(0),char(4),char(0),char(85),char(0),char(7),char(0),char(86),char(0),char(7),char(0),char(87),char(0),
+char(7),char(0),char(88),char(0),char(7),char(0),char(89),char(0),char(7),char(0),char(90),char(0),char(4),char(0),char(91),char(0),char(4),char(0),char(92),char(0),
+char(4),char(0),char(93),char(0),char(4),char(0),char(94),char(0),char(0),char(0),char(37),char(0),char(44),char(0),char(5),char(0),char(25),char(0),char(38),char(0),
+char(35),char(0),char(65),char(0),char(13),char(0),char(39),char(0),char(7),char(0),char(43),char(0),char(4),char(0),char(95),char(0),char(45),char(0),char(5),char(0),
+char(27),char(0),char(47),char(0),char(13),char(0),char(96),char(0),char(14),char(0),char(97),char(0),char(4),char(0),char(98),char(0),char(0),char(0),char(99),char(0),
+char(46),char(0),char(25),char(0),char(9),char(0),char(100),char(0),char(9),char(0),char(101),char(0),char(25),char(0),char(102),char(0),char(0),char(0),char(35),char(0),
+char(18),char(0),char(103),char(0),char(18),char(0),char(104),char(0),char(14),char(0),char(105),char(0),char(14),char(0),char(106),char(0),char(14),char(0),char(107),char(0),
+char(8),char(0),char(108),char(0),char(8),char(0),char(109),char(0),char(8),char(0),char(110),char(0),char(8),char(0),char(111),char(0),char(8),char(0),char(112),char(0),
+char(8),char(0),char(113),char(0),char(8),char(0),char(114),char(0),char(8),char(0),char(115),char(0),char(4),char(0),char(116),char(0),char(4),char(0),char(117),char(0),
+char(4),char(0),char(118),char(0),char(4),char(0),char(119),char(0),char(4),char(0),char(120),char(0),char(4),char(0),char(121),char(0),char(4),char(0),char(122),char(0),
+char(0),char(0),char(37),char(0),char(47),char(0),char(25),char(0),char(9),char(0),char(100),char(0),char(9),char(0),char(101),char(0),char(25),char(0),char(102),char(0),
+char(0),char(0),char(35),char(0),char(17),char(0),char(103),char(0),char(17),char(0),char(104),char(0),char(13),char(0),char(105),char(0),char(13),char(0),char(106),char(0),
+char(13),char(0),char(107),char(0),char(7),char(0),char(108),char(0),char(7),char(0),char(109),char(0),char(7),char(0),char(110),char(0),char(7),char(0),char(111),char(0),
+char(7),char(0),char(112),char(0),char(7),char(0),char(113),char(0),char(7),char(0),char(114),char(0),char(7),char(0),char(115),char(0),char(4),char(0),char(116),char(0),
+char(4),char(0),char(117),char(0),char(4),char(0),char(118),char(0),char(4),char(0),char(119),char(0),char(4),char(0),char(120),char(0),char(4),char(0),char(121),char(0),
+char(4),char(0),char(122),char(0),char(0),char(0),char(37),char(0),char(48),char(0),char(2),char(0),char(49),char(0),char(123),char(0),char(14),char(0),char(124),char(0),
+char(50),char(0),char(2),char(0),char(51),char(0),char(123),char(0),char(13),char(0),char(124),char(0),char(52),char(0),char(21),char(0),char(47),char(0),char(125),char(0),
+char(15),char(0),char(126),char(0),char(13),char(0),char(127),char(0),char(13),char(0),char(-128),char(0),char(13),char(0),char(-127),char(0),char(13),char(0),char(-126),char(0),
+char(13),char(0),char(124),char(0),char(13),char(0),char(-125),char(0),char(13),char(0),char(-124),char(0),char(13),char(0),char(-123),char(0),char(13),char(0),char(-122),char(0),
+char(7),char(0),char(-121),char(0),char(7),char(0),char(-120),char(0),char(7),char(0),char(-119),char(0),char(7),char(0),char(-118),char(0),char(7),char(0),char(-117),char(0),
+char(7),char(0),char(-116),char(0),char(7),char(0),char(-115),char(0),char(7),char(0),char(-114),char(0),char(7),char(0),char(-113),char(0),char(4),char(0),char(-112),char(0),
+char(53),char(0),char(22),char(0),char(46),char(0),char(125),char(0),char(16),char(0),char(126),char(0),char(14),char(0),char(127),char(0),char(14),char(0),char(-128),char(0),
+char(14),char(0),char(-127),char(0),char(14),char(0),char(-126),char(0),char(14),char(0),char(124),char(0),char(14),char(0),char(-125),char(0),char(14),char(0),char(-124),char(0),
+char(14),char(0),char(-123),char(0),char(14),char(0),char(-122),char(0),char(8),char(0),char(-121),char(0),char(8),char(0),char(-120),char(0),char(8),char(0),char(-119),char(0),
+char(8),char(0),char(-118),char(0),char(8),char(0),char(-117),char(0),char(8),char(0),char(-116),char(0),char(8),char(0),char(-115),char(0),char(8),char(0),char(-114),char(0),
+char(8),char(0),char(-113),char(0),char(4),char(0),char(-112),char(0),char(0),char(0),char(37),char(0),char(54),char(0),char(2),char(0),char(4),char(0),char(-111),char(0),
+char(4),char(0),char(-110),char(0),char(55),char(0),char(13),char(0),char(56),char(0),char(-109),char(0),char(56),char(0),char(-108),char(0),char(0),char(0),char(35),char(0),
+char(4),char(0),char(-107),char(0),char(4),char(0),char(-106),char(0),char(4),char(0),char(-105),char(0),char(4),char(0),char(-104),char(0),char(7),char(0),char(-103),char(0),
+char(7),char(0),char(-102),char(0),char(4),char(0),char(-101),char(0),char(4),char(0),char(-100),char(0),char(7),char(0),char(-99),char(0),char(4),char(0),char(-98),char(0),
+char(57),char(0),char(3),char(0),char(55),char(0),char(-97),char(0),char(13),char(0),char(-96),char(0),char(13),char(0),char(-95),char(0),char(58),char(0),char(3),char(0),
+char(55),char(0),char(-97),char(0),char(14),char(0),char(-96),char(0),char(14),char(0),char(-95),char(0),char(59),char(0),char(13),char(0),char(55),char(0),char(-97),char(0),
+char(18),char(0),char(-94),char(0),char(18),char(0),char(-93),char(0),char(4),char(0),char(-92),char(0),char(4),char(0),char(-91),char(0),char(4),char(0),char(-90),char(0),
+char(7),char(0),char(-89),char(0),char(7),char(0),char(-88),char(0),char(7),char(0),char(-87),char(0),char(7),char(0),char(-86),char(0),char(7),char(0),char(-85),char(0),
+char(7),char(0),char(-84),char(0),char(7),char(0),char(-83),char(0),char(60),char(0),char(13),char(0),char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),
+char(17),char(0),char(-93),char(0),char(4),char(0),char(-92),char(0),char(4),char(0),char(-91),char(0),char(4),char(0),char(-90),char(0),char(7),char(0),char(-89),char(0),
+char(7),char(0),char(-88),char(0),char(7),char(0),char(-87),char(0),char(7),char(0),char(-86),char(0),char(7),char(0),char(-85),char(0),char(7),char(0),char(-84),char(0),
+char(7),char(0),char(-83),char(0),char(61),char(0),char(11),char(0),char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),
+char(7),char(0),char(-82),char(0),char(7),char(0),char(-81),char(0),char(7),char(0),char(-80),char(0),char(7),char(0),char(-85),char(0),char(7),char(0),char(-84),char(0),
+char(7),char(0),char(-83),char(0),char(7),char(0),char(-79),char(0),char(0),char(0),char(21),char(0),char(62),char(0),char(9),char(0),char(55),char(0),char(-97),char(0),
+char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),char(13),char(0),char(-78),char(0),char(13),char(0),char(-77),char(0),char(13),char(0),char(-76),char(0),
+char(13),char(0),char(-75),char(0),char(4),char(0),char(-74),char(0),char(4),char(0),char(-73),char(0),char(63),char(0),char(5),char(0),char(62),char(0),char(-72),char(0),
+char(4),char(0),char(-71),char(0),char(7),char(0),char(-70),char(0),char(7),char(0),char(-69),char(0),char(7),char(0),char(-68),char(0),char(64),char(0),char(9),char(0),
+char(55),char(0),char(-97),char(0),char(17),char(0),char(-94),char(0),char(17),char(0),char(-93),char(0),char(7),char(0),char(-78),char(0),char(7),char(0),char(-77),char(0),
+char(7),char(0),char(-76),char(0),char(7),char(0),char(-75),char(0),char(4),char(0),char(-74),char(0),char(4),char(0),char(-73),char(0),char(49),char(0),char(22),char(0),
+char(8),char(0),char(-67),char(0),char(8),char(0),char(-79),char(0),char(8),char(0),char(110),char(0),char(8),char(0),char(-66),char(0),char(8),char(0),char(112),char(0),
+char(8),char(0),char(-65),char(0),char(8),char(0),char(-64),char(0),char(8),char(0),char(-63),char(0),char(8),char(0),char(-62),char(0),char(8),char(0),char(-61),char(0),
+char(8),char(0),char(-60),char(0),char(8),char(0),char(-59),char(0),char(8),char(0),char(-58),char(0),char(8),char(0),char(-57),char(0),char(8),char(0),char(-56),char(0),
+char(8),char(0),char(-55),char(0),char(4),char(0),char(-54),char(0),char(4),char(0),char(-53),char(0),char(4),char(0),char(-52),char(0),char(4),char(0),char(-51),char(0),
+char(4),char(0),char(-50),char(0),char(0),char(0),char(37),char(0),char(51),char(0),char(22),char(0),char(7),char(0),char(-67),char(0),char(7),char(0),char(-79),char(0),
+char(7),char(0),char(110),char(0),char(7),char(0),char(-66),char(0),char(7),char(0),char(112),char(0),char(7),char(0),char(-65),char(0),char(7),char(0),char(-64),char(0),
+char(7),char(0),char(-63),char(0),char(7),char(0),char(-62),char(0),char(7),char(0),char(-61),char(0),char(7),char(0),char(-60),char(0),char(7),char(0),char(-59),char(0),
+char(7),char(0),char(-58),char(0),char(7),char(0),char(-57),char(0),char(7),char(0),char(-56),char(0),char(7),char(0),char(-55),char(0),char(4),char(0),char(-54),char(0),
+char(4),char(0),char(-53),char(0),char(4),char(0),char(-52),char(0),char(4),char(0),char(-51),char(0),char(4),char(0),char(-50),char(0),char(0),char(0),char(37),char(0),
+char(65),char(0),char(4),char(0),char(7),char(0),char(-49),char(0),char(7),char(0),char(-48),char(0),char(7),char(0),char(-47),char(0),char(4),char(0),char(78),char(0),
+char(66),char(0),char(10),char(0),char(65),char(0),char(-46),char(0),char(13),char(0),char(-45),char(0),char(13),char(0),char(-44),char(0),char(13),char(0),char(-43),char(0),
+char(13),char(0),char(-42),char(0),char(13),char(0),char(-41),char(0),char(7),char(0),char(-121),char(0),char(7),char(0),char(-40),char(0),char(4),char(0),char(-39),char(0),
+char(4),char(0),char(53),char(0),char(67),char(0),char(4),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-38),char(0),char(7),char(0),char(-37),char(0),
+char(4),char(0),char(-36),char(0),char(68),char(0),char(4),char(0),char(13),char(0),char(-41),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-35),char(0),
+char(7),char(0),char(-34),char(0),char(69),char(0),char(7),char(0),char(13),char(0),char(-33),char(0),char(65),char(0),char(-46),char(0),char(4),char(0),char(-32),char(0),
+char(7),char(0),char(-31),char(0),char(7),char(0),char(-30),char(0),char(7),char(0),char(-29),char(0),char(4),char(0),char(53),char(0),char(70),char(0),char(6),char(0),
+char(15),char(0),char(-28),char(0),char(13),char(0),char(-30),char(0),char(13),char(0),char(-27),char(0),char(56),char(0),char(-26),char(0),char(4),char(0),char(-25),char(0),
+char(7),char(0),char(-29),char(0),char(71),char(0),char(26),char(0),char(4),char(0),char(-24),char(0),char(7),char(0),char(-23),char(0),char(7),char(0),char(-79),char(0),
+char(7),char(0),char(-22),char(0),char(7),char(0),char(-21),char(0),char(7),char(0),char(-20),char(0),char(7),char(0),char(-19),char(0),char(7),char(0),char(-18),char(0),
+char(7),char(0),char(-17),char(0),char(7),char(0),char(-16),char(0),char(7),char(0),char(-15),char(0),char(7),char(0),char(-14),char(0),char(7),char(0),char(-13),char(0),
+char(7),char(0),char(-12),char(0),char(7),char(0),char(-11),char(0),char(7),char(0),char(-10),char(0),char(7),char(0),char(-9),char(0),char(7),char(0),char(-8),char(0),
+char(7),char(0),char(-7),char(0),char(7),char(0),char(-6),char(0),char(7),char(0),char(-5),char(0),char(4),char(0),char(-4),char(0),char(4),char(0),char(-3),char(0),
+char(4),char(0),char(-2),char(0),char(4),char(0),char(-1),char(0),char(4),char(0),char(117),char(0),char(72),char(0),char(12),char(0),char(15),char(0),char(0),char(1),
+char(15),char(0),char(1),char(1),char(15),char(0),char(2),char(1),char(13),char(0),char(3),char(1),char(13),char(0),char(4),char(1),char(7),char(0),char(5),char(1),
+char(4),char(0),char(6),char(1),char(4),char(0),char(7),char(1),char(4),char(0),char(8),char(1),char(4),char(0),char(9),char(1),char(7),char(0),char(-31),char(0),
+char(4),char(0),char(53),char(0),char(73),char(0),char(27),char(0),char(17),char(0),char(10),char(1),char(15),char(0),char(11),char(1),char(15),char(0),char(12),char(1),
+char(13),char(0),char(3),char(1),char(13),char(0),char(13),char(1),char(13),char(0),char(14),char(1),char(13),char(0),char(15),char(1),char(13),char(0),char(16),char(1),
+char(13),char(0),char(17),char(1),char(4),char(0),char(18),char(1),char(7),char(0),char(19),char(1),char(4),char(0),char(20),char(1),char(4),char(0),char(21),char(1),
+char(4),char(0),char(22),char(1),char(7),char(0),char(23),char(1),char(7),char(0),char(24),char(1),char(4),char(0),char(25),char(1),char(4),char(0),char(26),char(1),
+char(7),char(0),char(27),char(1),char(7),char(0),char(28),char(1),char(7),char(0),char(29),char(1),char(7),char(0),char(30),char(1),char(7),char(0),char(31),char(1),
+char(7),char(0),char(32),char(1),char(4),char(0),char(33),char(1),char(4),char(0),char(34),char(1),char(4),char(0),char(35),char(1),char(74),char(0),char(12),char(0),
+char(9),char(0),char(36),char(1),char(9),char(0),char(37),char(1),char(13),char(0),char(38),char(1),char(7),char(0),char(39),char(1),char(7),char(0),char(-63),char(0),
+char(7),char(0),char(40),char(1),char(4),char(0),char(41),char(1),char(13),char(0),char(42),char(1),char(4),char(0),char(43),char(1),char(4),char(0),char(44),char(1),
+char(4),char(0),char(45),char(1),char(4),char(0),char(53),char(0),char(75),char(0),char(19),char(0),char(47),char(0),char(125),char(0),char(72),char(0),char(46),char(1),
+char(65),char(0),char(47),char(1),char(66),char(0),char(48),char(1),char(67),char(0),char(49),char(1),char(68),char(0),char(50),char(1),char(69),char(0),char(51),char(1),
+char(70),char(0),char(52),char(1),char(73),char(0),char(53),char(1),char(74),char(0),char(54),char(1),char(4),char(0),char(55),char(1),char(4),char(0),char(21),char(1),
+char(4),char(0),char(56),char(1),char(4),char(0),char(57),char(1),char(4),char(0),char(58),char(1),char(4),char(0),char(59),char(1),char(4),char(0),char(60),char(1),
+char(4),char(0),char(61),char(1),char(71),char(0),char(62),char(1),};
int sBulletDNAlen64= sizeof(sBulletDNAstr64);
diff --git a/extern/bullet2/src/LinearMath/btSerializer.h b/extern/bullet2/src/LinearMath/btSerializer.h
index 76f3cf32f8e..c5bc96b7839 100644
--- a/extern/bullet2/src/LinearMath/btSerializer.h
+++ b/extern/bullet2/src/LinearMath/btSerializer.h
@@ -122,6 +122,7 @@ public:
#define BT_ARRAY_CODE BT_MAKE_ID('A','R','A','Y')
#define BT_SBMATERIAL_CODE BT_MAKE_ID('S','B','M','T')
#define BT_SBNODE_CODE BT_MAKE_ID('S','B','N','D')
+#define BT_DYNAMICSWORLD_CODE BT_MAKE_ID('D','W','L','D')
#define BT_DNA_CODE BT_MAKE_ID('D','N','A','1')
@@ -256,7 +257,7 @@ protected:
*/
intPtr = (int*)cp;
- assert(strncmp(cp, "TYPE", 4)==0); intPtr++;
+ btAssert(strncmp(cp, "TYPE", 4)==0); intPtr++;
if (!littleEndian)
*intPtr = btSwapEndian(*intPtr);
@@ -284,7 +285,7 @@ protected:
// Parse type lens
intPtr = (int*)cp;
- assert(strncmp(cp, "TLEN", 4)==0); intPtr++;
+ btAssert(strncmp(cp, "TLEN", 4)==0); intPtr++;
dataLen = (int)mTypes.size();
@@ -311,7 +312,7 @@ protected:
intPtr = (int*)shtPtr;
cp = (char*)intPtr;
- assert(strncmp(cp, "STRC", 4)==0); intPtr++;
+ btAssert(strncmp(cp, "STRC", 4)==0); intPtr++;
if (!littleEndian)
*intPtr = btSwapEndian(*intPtr);
@@ -438,7 +439,7 @@ public:
buffer[9] = '2';
buffer[10] = '8';
- buffer[11] = '0';
+ buffer[11] = '1';
}
diff --git a/extern/bullet2/src/LinearMath/btTransform.h b/extern/bullet2/src/LinearMath/btTransform.h
index 5e52d183acb..907627379bf 100644
--- a/extern/bullet2/src/LinearMath/btTransform.h
+++ b/extern/bullet2/src/LinearMath/btTransform.h
@@ -31,7 +31,7 @@ subject to the following restrictions:
/**@brief The btTransform class supports rigid transforms with only translation and rotation and no scaling/shear.
*It can be used in combination with btVector3, btQuaternion and btMatrix3x3 linear algebra classes. */
-class btTransform {
+ATTRIBUTE_ALIGNED16(class) btTransform {
///Storage for the rotation
btMatrix3x3 m_basis;
@@ -93,9 +93,7 @@ public:
/**@brief Return the transform of the vector */
SIMD_FORCE_INLINE btVector3 operator()(const btVector3& x) const
{
- return btVector3(m_basis[0].dot(x) + m_origin.x(),
- m_basis[1].dot(x) + m_origin.y(),
- m_basis[2].dot(x) + m_origin.z());
+ return x.dot3(m_basis[0], m_basis[1], m_basis[2]) + m_origin;
}
/**@brief Return the transform of the vector */
diff --git a/extern/bullet2/src/LinearMath/btVector3.cpp b/extern/bullet2/src/LinearMath/btVector3.cpp
new file mode 100644
index 00000000000..1c26e523d80
--- /dev/null
+++ b/extern/bullet2/src/LinearMath/btVector3.cpp
@@ -0,0 +1,1639 @@
+/*
+ Copyright (c) 2011 Apple Inc.
+ http://continuousphysics.com/Bullet/
+
+ This software is provided 'as-is', without any express or implied warranty.
+ In no event will the authors be held liable for any damages arising from the use of this software.
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it freely,
+ subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ This source version has been altered.
+ */
+
+#if defined (_WIN32) || defined (__i386__)
+#define BT_USE_SSE_IN_API
+#endif
+
+
+#include "btVector3.h"
+
+
+
+#if defined BT_USE_SIMD_VECTOR3
+
+#if DEBUG
+#include <string.h>//for memset
+#endif
+
+
+#ifdef __APPLE__
+#include <stdint.h>
+typedef float float4 __attribute__ ((vector_size(16)));
+#else
+#define float4 __m128
+#endif
+//typedef uint32_t uint4 __attribute__ ((vector_size(16)));
+
+
+#if defined BT_USE_SSE || defined _WIN32
+
+#define LOG2_ARRAY_SIZE 6
+#define STACK_ARRAY_COUNT (1UL << LOG2_ARRAY_SIZE)
+
+#include <emmintrin.h>
+
+long _maxdot_large( const float *vv, const float *vec, unsigned long count, float *dotResult );
+long _maxdot_large( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ const float4 *vertices = (const float4*) vv;
+ static const unsigned char indexTable[16] = {-1, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0 };
+ float4 dotMax = btAssign128( -BT_INFINITY, -BT_INFINITY, -BT_INFINITY, -BT_INFINITY );
+ float4 vvec = _mm_loadu_ps( vec );
+ float4 vHi = btCastiTo128f(_mm_shuffle_epi32( btCastfTo128i( vvec), 0xaa )); /// zzzz
+ float4 vLo = _mm_movelh_ps( vvec, vvec ); /// xyxy
+
+ long maxIndex = -1L;
+
+ size_t segment = 0;
+ float4 stack_array[ STACK_ARRAY_COUNT ];
+
+#if DEBUG
+ memset( stack_array, -1, STACK_ARRAY_COUNT * sizeof(stack_array[0]) );
+#endif
+
+ size_t index;
+ float4 max;
+ // Faster loop without cleanup code for full tiles
+ for ( segment = 0; segment + STACK_ARRAY_COUNT*4 <= count; segment += STACK_ARRAY_COUNT*4 )
+ {
+ max = dotMax;
+
+ for( index = 0; index < STACK_ARRAY_COUNT; index+= 4 )
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3]; vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+1] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+2] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+3] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ // It is too costly to keep the index of the max here. We will look for it again later. We save a lot of work this way.
+ }
+
+ // If we found a new max
+ if( 0xf != _mm_movemask_ps( (float4) _mm_cmpeq_ps(max, dotMax)))
+ {
+ // copy the new max across all lanes of our max accumulator
+ max = _mm_max_ps(max, (float4) _mm_shuffle_ps( max, max, 0x4e));
+ max = _mm_max_ps(max, (float4) _mm_shuffle_ps( max, max, 0xb1));
+
+ dotMax = max;
+
+ // find first occurrence of that max
+ size_t test;
+ for( index = 0; 0 == (test=_mm_movemask_ps( _mm_cmpeq_ps( stack_array[index], max))); index++ ) // local_count must be a multiple of 4
+ {}
+ // record where it is.
+ maxIndex = 4*index + segment + indexTable[test];
+ }
+ }
+
+ // account for work we've already done
+ count -= segment;
+
+ // Deal with the last < STACK_ARRAY_COUNT vectors
+ max = dotMax;
+ index = 0;
+
+
+ if( btUnlikely( count > 16) )
+ {
+ for( ; index + 4 <= count / 4; index+=4 )
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3]; vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+1] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+2] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+3] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+
+ // It is too costly to keep the index of the max here. We will look for it again later. We save a lot of work this way.
+ }
+ }
+
+ size_t localCount = (count & -4L) - 4*index;
+ if( localCount )
+ {
+#ifdef __APPLE__
+ float4 t0, t1, t2, t3, t4;
+ float4 * sap = &stack_array[index + localCount / 4];
+ vertices += localCount; // counter the offset
+ size_t byteIndex = -(localCount) * sizeof(float);
+ //AT&T Code style assembly
+ asm volatile
+ ( ".align 4 \n\
+ 0: movaps %[max], %[t2] // move max out of the way to avoid propagating NaNs in max \n\
+ movaps (%[vertices], %[byteIndex], 4), %[t0] // vertices[0] \n\
+ movaps 16(%[vertices], %[byteIndex], 4), %[t1] // vertices[1] \n\
+ movaps %[t0], %[max] // vertices[0] \n\
+ movlhps %[t1], %[max] // x0y0x1y1 \n\
+ movaps 32(%[vertices], %[byteIndex], 4), %[t3] // vertices[2] \n\
+ movaps 48(%[vertices], %[byteIndex], 4), %[t4] // vertices[3] \n\
+ mulps %[vLo], %[max] // x0y0x1y1 * vLo \n\
+ movhlps %[t0], %[t1] // z0w0z1w1 \n\
+ movaps %[t3], %[t0] // vertices[2] \n\
+ movlhps %[t4], %[t0] // x2y2x3y3 \n\
+ mulps %[vLo], %[t0] // x2y2x3y3 * vLo \n\
+ movhlps %[t3], %[t4] // z2w2z3w3 \n\
+ shufps $0x88, %[t4], %[t1] // z0z1z2z3 \n\
+ mulps %[vHi], %[t1] // z0z1z2z3 * vHi \n\
+ movaps %[max], %[t3] // x0y0x1y1 * vLo \n\
+ shufps $0x88, %[t0], %[max] // x0x1x2x3 * vLo.x \n\
+ shufps $0xdd, %[t0], %[t3] // y0y1y2y3 * vLo.y \n\
+ addps %[t3], %[max] // x + y \n\
+ addps %[t1], %[max] // x + y + z \n\
+ movaps %[max], (%[sap], %[byteIndex]) // record result for later scrutiny \n\
+ maxps %[t2], %[max] // record max, restore max \n\
+ add $16, %[byteIndex] // advance loop counter\n\
+ jnz 0b \n\
+ "
+ : [max] "+x" (max), [t0] "=&x" (t0), [t1] "=&x" (t1), [t2] "=&x" (t2), [t3] "=&x" (t3), [t4] "=&x" (t4), [byteIndex] "+r" (byteIndex)
+ : [vLo] "x" (vLo), [vHi] "x" (vHi), [vertices] "r" (vertices), [sap] "r" (sap)
+ : "memory", "cc"
+ );
+ index += localCount/4;
+#else
+ {
+ for( unsigned int i=0; i<localCount/4; i++,index++)
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3];
+ vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+ }
+ }
+#endif //__APPLE__
+ }
+
+ // process the last few points
+ if( count & 3 )
+ {
+ float4 v0, v1, v2, x, y, z;
+ switch( count & 3 )
+ {
+ case 3:
+ {
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+
+ // Calculate 3 dot products, transpose, duplicate v2
+ float4 lo0 = _mm_movelh_ps( v0, v1); // xyxy.lo
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z?z?.lo
+ lo0 = lo0*vLo;
+ z = _mm_shuffle_ps(hi0, v2, 0xa8 ); // z0z1z2z2
+ z = z*vHi;
+ float4 lo1 = _mm_movelh_ps(v2, v2); // xyxy
+ lo1 = lo1*vLo;
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ }
+ break;
+ case 2:
+ {
+ v0 = vertices[0];
+ v1 = vertices[1];
+ float4 xy = _mm_movelh_ps(v0, v1);
+ z = _mm_movehl_ps(v1, v0);
+ xy = xy*vLo;
+ z = _mm_shuffle_ps( z, z, 0xa8);
+ x = _mm_shuffle_ps( xy, xy, 0xa8);
+ y = _mm_shuffle_ps( xy, xy, 0xfd);
+ z = z*vHi;
+ }
+ break;
+ case 1:
+ {
+ float4 xy = vertices[0];
+ z = _mm_shuffle_ps( xy, xy, 0xaa);
+ xy = xy*vLo;
+ z = z*vHi;
+ x = _mm_shuffle_ps(xy, xy, 0);
+ y = _mm_shuffle_ps(xy, xy, 0x55);
+ }
+ break;
+ }
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ max = _mm_max_ps( x, max ); // control the order here so that max is never NaN even if x is nan
+ index++;
+ }
+
+ // if we found a new max.
+ if( 0 == segment || 0xf != _mm_movemask_ps( (float4) _mm_cmpeq_ps(max, dotMax)))
+ { // we found a new max. Search for it
+ // find max across the max vector, place in all elements of max -- big latency hit here
+ max = _mm_max_ps(max, (float4) _mm_shuffle_ps( max, max, 0x4e));
+ max = _mm_max_ps(max, (float4) _mm_shuffle_ps( max, max, 0xb1));
+
+ // It is slightly faster to do this part in scalar code when count < 8. However, the common case for
+ // this where it actually makes a difference is handled in the early out at the top of the function,
+ // so it is less than a 1% difference here. I opted for improved code size, fewer branches and reduced
+ // complexity, and removed it.
+
+ dotMax = max;
+
+ // scan for the first occurence of max in the array
+ size_t test;
+ for( index = 0; 0 == (test=_mm_movemask_ps( _mm_cmpeq_ps( stack_array[index], max))); index++ ) // local_count must be a multiple of 4
+ {}
+ maxIndex = 4*index + segment + indexTable[test];
+ }
+
+ _mm_store_ss( dotResult, dotMax);
+ return maxIndex;
+}
+
+long _mindot_large( const float *vv, const float *vec, unsigned long count, float *dotResult );
+
+long _mindot_large( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ const float4 *vertices = (const float4*) vv;
+ static const unsigned char indexTable[16] = {-1, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0 };
+ float4 dotmin = btAssign128( BT_INFINITY, BT_INFINITY, BT_INFINITY, BT_INFINITY );
+ float4 vvec = _mm_loadu_ps( vec );
+ float4 vHi = btCastiTo128f(_mm_shuffle_epi32( btCastfTo128i( vvec), 0xaa )); /// zzzz
+ float4 vLo = _mm_movelh_ps( vvec, vvec ); /// xyxy
+
+ long minIndex = -1L;
+
+ size_t segment = 0;
+ float4 stack_array[ STACK_ARRAY_COUNT ];
+
+#if DEBUG
+ memset( stack_array, -1, STACK_ARRAY_COUNT * sizeof(stack_array[0]) );
+#endif
+
+ size_t index;
+ float4 min;
+ // Faster loop without cleanup code for full tiles
+ for ( segment = 0; segment + STACK_ARRAY_COUNT*4 <= count; segment += STACK_ARRAY_COUNT*4 )
+ {
+ min = dotmin;
+
+ for( index = 0; index < STACK_ARRAY_COUNT; index+= 4 )
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3]; vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+1] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+2] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+3] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ // It is too costly to keep the index of the min here. We will look for it again later. We save a lot of work this way.
+ }
+
+ // If we found a new min
+ if( 0xf != _mm_movemask_ps( (float4) _mm_cmpeq_ps(min, dotmin)))
+ {
+ // copy the new min across all lanes of our min accumulator
+ min = _mm_min_ps(min, (float4) _mm_shuffle_ps( min, min, 0x4e));
+ min = _mm_min_ps(min, (float4) _mm_shuffle_ps( min, min, 0xb1));
+
+ dotmin = min;
+
+ // find first occurrence of that min
+ size_t test;
+ for( index = 0; 0 == (test=_mm_movemask_ps( _mm_cmpeq_ps( stack_array[index], min))); index++ ) // local_count must be a multiple of 4
+ {}
+ // record where it is.
+ minIndex = 4*index + segment + indexTable[test];
+ }
+ }
+
+ // account for work we've already done
+ count -= segment;
+
+ // Deal with the last < STACK_ARRAY_COUNT vectors
+ min = dotmin;
+ index = 0;
+
+
+ if(btUnlikely( count > 16) )
+ {
+ for( ; index + 4 <= count / 4; index+=4 )
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3]; vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+1] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+2] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+ v3 = vertices[3]; vertices += 4;
+
+ lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index+3] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+
+ // It is too costly to keep the index of the min here. We will look for it again later. We save a lot of work this way.
+ }
+ }
+
+ size_t localCount = (count & -4L) - 4*index;
+ if( localCount )
+ {
+
+
+#ifdef __APPLE__
+ vertices += localCount; // counter the offset
+ float4 t0, t1, t2, t3, t4;
+ size_t byteIndex = -(localCount) * sizeof(float);
+ float4 * sap = &stack_array[index + localCount / 4];
+
+ asm volatile
+ ( ".align 4 \n\
+ 0: movaps %[min], %[t2] // move min out of the way to avoid propagating NaNs in min \n\
+ movaps (%[vertices], %[byteIndex], 4), %[t0] // vertices[0] \n\
+ movaps 16(%[vertices], %[byteIndex], 4), %[t1] // vertices[1] \n\
+ movaps %[t0], %[min] // vertices[0] \n\
+ movlhps %[t1], %[min] // x0y0x1y1 \n\
+ movaps 32(%[vertices], %[byteIndex], 4), %[t3] // vertices[2] \n\
+ movaps 48(%[vertices], %[byteIndex], 4), %[t4] // vertices[3] \n\
+ mulps %[vLo], %[min] // x0y0x1y1 * vLo \n\
+ movhlps %[t0], %[t1] // z0w0z1w1 \n\
+ movaps %[t3], %[t0] // vertices[2] \n\
+ movlhps %[t4], %[t0] // x2y2x3y3 \n\
+ movhlps %[t3], %[t4] // z2w2z3w3 \n\
+ mulps %[vLo], %[t0] // x2y2x3y3 * vLo \n\
+ shufps $0x88, %[t4], %[t1] // z0z1z2z3 \n\
+ mulps %[vHi], %[t1] // z0z1z2z3 * vHi \n\
+ movaps %[min], %[t3] // x0y0x1y1 * vLo \n\
+ shufps $0x88, %[t0], %[min] // x0x1x2x3 * vLo.x \n\
+ shufps $0xdd, %[t0], %[t3] // y0y1y2y3 * vLo.y \n\
+ addps %[t3], %[min] // x + y \n\
+ addps %[t1], %[min] // x + y + z \n\
+ movaps %[min], (%[sap], %[byteIndex]) // record result for later scrutiny \n\
+ minps %[t2], %[min] // record min, restore min \n\
+ add $16, %[byteIndex] // advance loop counter\n\
+ jnz 0b \n\
+ "
+ : [min] "+x" (min), [t0] "=&x" (t0), [t1] "=&x" (t1), [t2] "=&x" (t2), [t3] "=&x" (t3), [t4] "=&x" (t4), [byteIndex] "+r" (byteIndex)
+ : [vLo] "x" (vLo), [vHi] "x" (vHi), [vertices] "r" (vertices), [sap] "r" (sap)
+ : "memory", "cc"
+ );
+ index += localCount/4;
+#else
+ {
+ for( unsigned int i=0; i<localCount/4; i++,index++)
+ { // do four dot products at a time. Carefully avoid touching the w element.
+ float4 v0 = vertices[0];
+ float4 v1 = vertices[1];
+ float4 v2 = vertices[2];
+ float4 v3 = vertices[3];
+ vertices += 4;
+
+ float4 lo0 = _mm_movelh_ps( v0, v1); // x0y0x1y1
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z0?0z1?1
+ float4 lo1 = _mm_movelh_ps( v2, v3); // x2y2x3y3
+ float4 hi1 = _mm_movehl_ps( v3, v2); // z2?2z3?3
+
+ lo0 = lo0*vLo;
+ lo1 = lo1*vLo;
+ float4 z = _mm_shuffle_ps(hi0, hi1, 0x88);
+ float4 x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ float4 y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ z = z*vHi;
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that max is never NaN even if x is nan
+ }
+ }
+
+#endif
+ }
+
+ // process the last few points
+ if( count & 3 )
+ {
+ float4 v0, v1, v2, x, y, z;
+ switch( count & 3 )
+ {
+ case 3:
+ {
+ v0 = vertices[0];
+ v1 = vertices[1];
+ v2 = vertices[2];
+
+ // Calculate 3 dot products, transpose, duplicate v2
+ float4 lo0 = _mm_movelh_ps( v0, v1); // xyxy.lo
+ float4 hi0 = _mm_movehl_ps( v1, v0); // z?z?.lo
+ lo0 = lo0*vLo;
+ z = _mm_shuffle_ps(hi0, v2, 0xa8 ); // z0z1z2z2
+ z = z*vHi;
+ float4 lo1 = _mm_movelh_ps(v2, v2); // xyxy
+ lo1 = lo1*vLo;
+ x = _mm_shuffle_ps(lo0, lo1, 0x88);
+ y = _mm_shuffle_ps(lo0, lo1, 0xdd);
+ }
+ break;
+ case 2:
+ {
+ v0 = vertices[0];
+ v1 = vertices[1];
+ float4 xy = _mm_movelh_ps(v0, v1);
+ z = _mm_movehl_ps(v1, v0);
+ xy = xy*vLo;
+ z = _mm_shuffle_ps( z, z, 0xa8);
+ x = _mm_shuffle_ps( xy, xy, 0xa8);
+ y = _mm_shuffle_ps( xy, xy, 0xfd);
+ z = z*vHi;
+ }
+ break;
+ case 1:
+ {
+ float4 xy = vertices[0];
+ z = _mm_shuffle_ps( xy, xy, 0xaa);
+ xy = xy*vLo;
+ z = z*vHi;
+ x = _mm_shuffle_ps(xy, xy, 0);
+ y = _mm_shuffle_ps(xy, xy, 0x55);
+ }
+ break;
+ }
+ x = x+y;
+ x = x+z;
+ stack_array[index] = x;
+ min = _mm_min_ps( x, min ); // control the order here so that min is never NaN even if x is nan
+ index++;
+ }
+
+ // if we found a new min.
+ if( 0 == segment || 0xf != _mm_movemask_ps( (float4) _mm_cmpeq_ps(min, dotmin)))
+ { // we found a new min. Search for it
+ // find min across the min vector, place in all elements of min -- big latency hit here
+ min = _mm_min_ps(min, (float4) _mm_shuffle_ps( min, min, 0x4e));
+ min = _mm_min_ps(min, (float4) _mm_shuffle_ps( min, min, 0xb1));
+
+ // It is slightly faster to do this part in scalar code when count < 8. However, the common case for
+ // this where it actually makes a difference is handled in the early out at the top of the function,
+ // so it is less than a 1% difference here. I opted for improved code size, fewer branches and reduced
+ // complexity, and removed it.
+
+ dotmin = min;
+
+ // scan for the first occurence of min in the array
+ size_t test;
+ for( index = 0; 0 == (test=_mm_movemask_ps( _mm_cmpeq_ps( stack_array[index], min))); index++ ) // local_count must be a multiple of 4
+ {}
+ minIndex = 4*index + segment + indexTable[test];
+ }
+
+ _mm_store_ss( dotResult, dotmin);
+ return minIndex;
+}
+
+
+#elif defined BT_USE_NEON
+#define ARM_NEON_GCC_COMPATIBILITY 1
+#include <arm_neon.h>
+
+
+static long _maxdot_large_v0( const float *vv, const float *vec, unsigned long count, float *dotResult );
+static long _maxdot_large_v1( const float *vv, const float *vec, unsigned long count, float *dotResult );
+static long _maxdot_large_sel( const float *vv, const float *vec, unsigned long count, float *dotResult );
+static long _mindot_large_v0( const float *vv, const float *vec, unsigned long count, float *dotResult );
+static long _mindot_large_v1( const float *vv, const float *vec, unsigned long count, float *dotResult );
+static long _mindot_large_sel( const float *vv, const float *vec, unsigned long count, float *dotResult );
+
+long (*_maxdot_large)( const float *vv, const float *vec, unsigned long count, float *dotResult ) = _maxdot_large_sel;
+long (*_mindot_large)( const float *vv, const float *vec, unsigned long count, float *dotResult ) = _mindot_large_sel;
+
+extern "C" {int _get_cpu_capabilities( void );}
+
+static long _maxdot_large_sel( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ if( _get_cpu_capabilities() & 0x2000 )
+ _maxdot_large = _maxdot_large_v1;
+ else
+ _maxdot_large = _maxdot_large_v0;
+
+ return _maxdot_large(vv, vec, count, dotResult);
+}
+
+static long _mindot_large_sel( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ if( _get_cpu_capabilities() & 0x2000 )
+ _mindot_large = _mindot_large_v1;
+ else
+ _mindot_large = _mindot_large_v0;
+
+ return _mindot_large(vv, vec, count, dotResult);
+}
+
+
+
+#define vld1q_f32_aligned_postincrement( _ptr ) ({ float32x4_t _r; asm( "vld1.f32 {%0}, [%1, :128]!\n" : "=w" (_r), "+r" (_ptr) ); /*return*/ _r; })
+
+
+long _maxdot_large_v0( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ unsigned long i = 0;
+ float32x4_t vvec = vld1q_f32_aligned_postincrement( vec );
+ float32x2_t vLo = vget_low_f32(vvec);
+ float32x2_t vHi = vdup_lane_f32(vget_high_f32(vvec), 0);
+ float32x2_t dotMaxLo = (float32x2_t) { -BT_INFINITY, -BT_INFINITY };
+ float32x2_t dotMaxHi = (float32x2_t) { -BT_INFINITY, -BT_INFINITY };
+ uint32x2_t indexLo = (uint32x2_t) {0, 1};
+ uint32x2_t indexHi = (uint32x2_t) {2, 3};
+ uint32x2_t iLo = (uint32x2_t) {-1, -1};
+ uint32x2_t iHi = (uint32x2_t) {-1, -1};
+ const uint32x2_t four = (uint32x2_t) {4,4};
+
+ for( ; i+8 <= count; i+= 8 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ float32x2_t xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2x2_t z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( z1.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vcgt_f32( rLo, dotMaxLo );
+ uint32x2_t maskHi = vcgt_f32( rHi, dotMaxHi );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ dotMaxHi = vbsl_f32( maskHi, rHi, dotMaxHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+
+ v0 = vld1q_f32_aligned_postincrement( vv );
+ v1 = vld1q_f32_aligned_postincrement( vv );
+ v2 = vld1q_f32_aligned_postincrement( vv );
+ v3 = vld1q_f32_aligned_postincrement( vv );
+
+ xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ zLo = vmul_f32( z0.val[0], vHi);
+ zHi = vmul_f32( z1.val[0], vHi);
+
+ rLo = vpadd_f32( xy0, xy1);
+ rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ maskLo = vcgt_f32( rLo, dotMaxLo );
+ maskHi = vcgt_f32( rHi, dotMaxHi );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ dotMaxHi = vbsl_f32( maskHi, rHi, dotMaxHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+ }
+
+ for( ; i+4 <= count; i+= 4 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ float32x2_t xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2x2_t z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( z1.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vcgt_f32( rLo, dotMaxLo );
+ uint32x2_t maskHi = vcgt_f32( rHi, dotMaxHi );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ dotMaxHi = vbsl_f32( maskHi, rHi, dotMaxHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+ }
+
+ switch( count & 3 )
+ {
+ case 3:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( vdup_lane_f32(vget_high_f32(v2), 0), vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy2);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vcgt_f32( rLo, dotMaxLo );
+ uint32x2_t maskHi = vcgt_f32( rHi, dotMaxHi );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ dotMaxHi = vbsl_f32( maskHi, rHi, dotMaxHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ }
+ break;
+ case 2:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ rLo = vadd_f32(rLo, zLo);
+
+ uint32x2_t maskLo = vcgt_f32( rLo, dotMaxLo );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ }
+ break;
+ case 1:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t z0 = vdup_lane_f32(vget_high_f32(v0), 0);
+ float32x2_t zLo = vmul_f32( z0, vHi);
+ float32x2_t rLo = vpadd_f32( xy0, xy0);
+ rLo = vadd_f32(rLo, zLo);
+ uint32x2_t maskLo = vcgt_f32( rLo, dotMaxLo );
+ dotMaxLo = vbsl_f32( maskLo, rLo, dotMaxLo);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ // select best answer between hi and lo results
+ uint32x2_t mask = vcgt_f32( dotMaxHi, dotMaxLo );
+ dotMaxLo = vbsl_f32(mask, dotMaxHi, dotMaxLo);
+ iLo = vbsl_u32(mask, iHi, iLo);
+
+ // select best answer between even and odd results
+ dotMaxHi = vdup_lane_f32(dotMaxLo, 1);
+ iHi = vdup_lane_u32(iLo, 1);
+ mask = vcgt_f32( dotMaxHi, dotMaxLo );
+ dotMaxLo = vbsl_f32(mask, dotMaxHi, dotMaxLo);
+ iLo = vbsl_u32(mask, iHi, iLo);
+
+ *dotResult = vget_lane_f32( dotMaxLo, 0);
+ return vget_lane_u32(iLo, 0);
+}
+
+
+long _maxdot_large_v1( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ float32x4_t vvec = vld1q_f32_aligned_postincrement( vec );
+ float32x4_t vLo = vcombine_f32(vget_low_f32(vvec), vget_low_f32(vvec));
+ float32x4_t vHi = vdupq_lane_f32(vget_high_f32(vvec), 0);
+ const uint32x4_t four = (uint32x4_t){ 4, 4, 4, 4 };
+ uint32x4_t local_index = (uint32x4_t) {0, 1, 2, 3};
+ uint32x4_t index = (uint32x4_t) { -1, -1, -1, -1 };
+ float32x4_t maxDot = (float32x4_t) { -BT_INFINITY, -BT_INFINITY, -BT_INFINITY, -BT_INFINITY };
+
+ unsigned long i = 0;
+ for( ; i + 8 <= count; i += 8 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+
+ v0 = vld1q_f32_aligned_postincrement( vv );
+ v1 = vld1q_f32_aligned_postincrement( vv );
+ v2 = vld1q_f32_aligned_postincrement( vv );
+ v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ zb = vuzpq_f32( z0, z1);
+ z = vmulq_f32( zb.val[0], vHi);
+ xy = vuzpq_f32( xy0, xy1);
+ x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+
+ for( ; i + 4 <= count; i += 4 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+
+ switch (count & 3) {
+ case 3:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v2));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v2));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ case 2:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+
+ xy0 = vmulq_f32(xy0, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z0);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy0);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ case 1:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v0));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z = vdupq_lane_f32(vget_high_f32(v0), 0);
+
+ xy0 = vmulq_f32(xy0, vLo);
+
+ z = vmulq_f32( z, vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy0);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcgtq_f32(x, maxDot);
+ maxDot = vbslq_f32( mask, x, maxDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+
+ // select best answer between hi and lo results
+ uint32x2_t mask = vcgt_f32( vget_high_f32(maxDot), vget_low_f32(maxDot));
+ float32x2_t maxDot2 = vbsl_f32(mask, vget_high_f32(maxDot), vget_low_f32(maxDot));
+ uint32x2_t index2 = vbsl_u32(mask, vget_high_u32(index), vget_low_u32(index));
+
+ // select best answer between even and odd results
+ float32x2_t maxDotO = vdup_lane_f32(maxDot2, 1);
+ uint32x2_t indexHi = vdup_lane_u32(index2, 1);
+ mask = vcgt_f32( maxDotO, maxDot2 );
+ maxDot2 = vbsl_f32(mask, maxDotO, maxDot2);
+ index2 = vbsl_u32(mask, indexHi, index2);
+
+ *dotResult = vget_lane_f32( maxDot2, 0);
+ return vget_lane_u32(index2, 0);
+
+}
+
+long _mindot_large_v0( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ unsigned long i = 0;
+ float32x4_t vvec = vld1q_f32_aligned_postincrement( vec );
+ float32x2_t vLo = vget_low_f32(vvec);
+ float32x2_t vHi = vdup_lane_f32(vget_high_f32(vvec), 0);
+ float32x2_t dotMinLo = (float32x2_t) { BT_INFINITY, BT_INFINITY };
+ float32x2_t dotMinHi = (float32x2_t) { BT_INFINITY, BT_INFINITY };
+ uint32x2_t indexLo = (uint32x2_t) {0, 1};
+ uint32x2_t indexHi = (uint32x2_t) {2, 3};
+ uint32x2_t iLo = (uint32x2_t) {-1, -1};
+ uint32x2_t iHi = (uint32x2_t) {-1, -1};
+ const uint32x2_t four = (uint32x2_t) {4,4};
+
+ for( ; i+8 <= count; i+= 8 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ float32x2_t xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2x2_t z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( z1.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vclt_f32( rLo, dotMinLo );
+ uint32x2_t maskHi = vclt_f32( rHi, dotMinHi );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ dotMinHi = vbsl_f32( maskHi, rHi, dotMinHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+
+ v0 = vld1q_f32_aligned_postincrement( vv );
+ v1 = vld1q_f32_aligned_postincrement( vv );
+ v2 = vld1q_f32_aligned_postincrement( vv );
+ v3 = vld1q_f32_aligned_postincrement( vv );
+
+ xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ zLo = vmul_f32( z0.val[0], vHi);
+ zHi = vmul_f32( z1.val[0], vHi);
+
+ rLo = vpadd_f32( xy0, xy1);
+ rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ maskLo = vclt_f32( rLo, dotMinLo );
+ maskHi = vclt_f32( rHi, dotMinHi );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ dotMinHi = vbsl_f32( maskHi, rHi, dotMinHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+ }
+
+ for( ; i+4 <= count; i+= 4 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+ float32x2_t xy3 = vmul_f32( vget_low_f32(v3), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2x2_t z1 = vtrn_f32( vget_high_f32(v2), vget_high_f32(v3));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( z1.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy3);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vclt_f32( rLo, dotMinLo );
+ uint32x2_t maskHi = vclt_f32( rHi, dotMinHi );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ dotMinHi = vbsl_f32( maskHi, rHi, dotMinHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ indexLo = vadd_u32(indexLo, four);
+ indexHi = vadd_u32(indexHi, four);
+ }
+ switch( count & 3 )
+ {
+ case 3:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+ float32x2_t xy2 = vmul_f32( vget_low_f32(v2), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+ float32x2_t zHi = vmul_f32( vdup_lane_f32(vget_high_f32(v2), 0), vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ float32x2_t rHi = vpadd_f32( xy2, xy2);
+ rLo = vadd_f32(rLo, zLo);
+ rHi = vadd_f32(rHi, zHi);
+
+ uint32x2_t maskLo = vclt_f32( rLo, dotMinLo );
+ uint32x2_t maskHi = vclt_f32( rHi, dotMinHi );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ dotMinHi = vbsl_f32( maskHi, rHi, dotMinHi);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ iHi = vbsl_u32(maskHi, indexHi, iHi);
+ }
+ break;
+ case 2:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t xy1 = vmul_f32( vget_low_f32(v1), vLo);
+
+ float32x2x2_t z0 = vtrn_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x2_t zLo = vmul_f32( z0.val[0], vHi);
+
+ float32x2_t rLo = vpadd_f32( xy0, xy1);
+ rLo = vadd_f32(rLo, zLo);
+
+ uint32x2_t maskLo = vclt_f32( rLo, dotMinLo );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ }
+ break;
+ case 1:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x2_t xy0 = vmul_f32( vget_low_f32(v0), vLo);
+ float32x2_t z0 = vdup_lane_f32(vget_high_f32(v0), 0);
+ float32x2_t zLo = vmul_f32( z0, vHi);
+ float32x2_t rLo = vpadd_f32( xy0, xy0);
+ rLo = vadd_f32(rLo, zLo);
+ uint32x2_t maskLo = vclt_f32( rLo, dotMinLo );
+ dotMinLo = vbsl_f32( maskLo, rLo, dotMinLo);
+ iLo = vbsl_u32(maskLo, indexLo, iLo);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ // select best answer between hi and lo results
+ uint32x2_t mask = vclt_f32( dotMinHi, dotMinLo );
+ dotMinLo = vbsl_f32(mask, dotMinHi, dotMinLo);
+ iLo = vbsl_u32(mask, iHi, iLo);
+
+ // select best answer between even and odd results
+ dotMinHi = vdup_lane_f32(dotMinLo, 1);
+ iHi = vdup_lane_u32(iLo, 1);
+ mask = vclt_f32( dotMinHi, dotMinLo );
+ dotMinLo = vbsl_f32(mask, dotMinHi, dotMinLo);
+ iLo = vbsl_u32(mask, iHi, iLo);
+
+ *dotResult = vget_lane_f32( dotMinLo, 0);
+ return vget_lane_u32(iLo, 0);
+}
+
+long _mindot_large_v1( const float *vv, const float *vec, unsigned long count, float *dotResult )
+{
+ float32x4_t vvec = vld1q_f32_aligned_postincrement( vec );
+ float32x4_t vLo = vcombine_f32(vget_low_f32(vvec), vget_low_f32(vvec));
+ float32x4_t vHi = vdupq_lane_f32(vget_high_f32(vvec), 0);
+ const uint32x4_t four = (uint32x4_t){ 4, 4, 4, 4 };
+ uint32x4_t local_index = (uint32x4_t) {0, 1, 2, 3};
+ uint32x4_t index = (uint32x4_t) { -1, -1, -1, -1 };
+ float32x4_t minDot = (float32x4_t) { BT_INFINITY, BT_INFINITY, BT_INFINITY, BT_INFINITY };
+
+ unsigned long i = 0;
+ for( ; i + 8 <= count; i += 8 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+
+ v0 = vld1q_f32_aligned_postincrement( vv );
+ v1 = vld1q_f32_aligned_postincrement( vv );
+ v2 = vld1q_f32_aligned_postincrement( vv );
+ v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ zb = vuzpq_f32( z0, z1);
+ z = vmulq_f32( zb.val[0], vHi);
+ xy = vuzpq_f32( xy0, xy1);
+ x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+
+ for( ; i + 4 <= count; i += 4 )
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v3 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v3));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v3));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+
+ switch (count & 3) {
+ case 3:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v2 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ float32x4_t xy1 = vcombine_f32( vget_low_f32(v2), vget_low_f32(v2));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+ float32x4_t z1 = vcombine_f32( vget_high_f32(v2), vget_high_f32(v2));
+
+ xy0 = vmulq_f32(xy0, vLo);
+ xy1 = vmulq_f32(xy1, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z1);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy1);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ case 2:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+ float32x4_t v1 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v1));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z0 = vcombine_f32( vget_high_f32(v0), vget_high_f32(v1));
+
+ xy0 = vmulq_f32(xy0, vLo);
+
+ float32x4x2_t zb = vuzpq_f32( z0, z0);
+ float32x4_t z = vmulq_f32( zb.val[0], vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy0);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ case 1:
+ {
+ float32x4_t v0 = vld1q_f32_aligned_postincrement( vv );
+
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t xy0 = vcombine_f32( vget_low_f32(v0), vget_low_f32(v0));
+ // the next two lines should resolve to a single vswp d, d
+ float32x4_t z = vdupq_lane_f32(vget_high_f32(v0), 0);
+
+ xy0 = vmulq_f32(xy0, vLo);
+
+ z = vmulq_f32( z, vHi);
+ float32x4x2_t xy = vuzpq_f32( xy0, xy0);
+ float32x4_t x = vaddq_f32(xy.val[0], xy.val[1]);
+ x = vaddq_f32(x, z);
+
+ uint32x4_t mask = vcltq_f32(x, minDot);
+ minDot = vbslq_f32( mask, x, minDot);
+ index = vbslq_u32(mask, local_index, index);
+ local_index = vaddq_u32(local_index, four);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+
+ // select best answer between hi and lo results
+ uint32x2_t mask = vclt_f32( vget_high_f32(minDot), vget_low_f32(minDot));
+ float32x2_t minDot2 = vbsl_f32(mask, vget_high_f32(minDot), vget_low_f32(minDot));
+ uint32x2_t index2 = vbsl_u32(mask, vget_high_u32(index), vget_low_u32(index));
+
+ // select best answer between even and odd results
+ float32x2_t minDotO = vdup_lane_f32(minDot2, 1);
+ uint32x2_t indexHi = vdup_lane_u32(index2, 1);
+ mask = vclt_f32( minDotO, minDot2 );
+ minDot2 = vbsl_f32(mask, minDotO, minDot2);
+ index2 = vbsl_u32(mask, indexHi, index2);
+
+ *dotResult = vget_lane_f32( minDot2, 0);
+ return vget_lane_u32(index2, 0);
+
+}
+
+#else
+ #error Unhandled __APPLE__ arch
+#endif
+
+#endif /* __APPLE__ */
+
+
diff --git a/extern/bullet2/src/LinearMath/btVector3.h b/extern/bullet2/src/LinearMath/btVector3.h
index d99b7c83ae3..1cf65358803 100644
--- a/extern/bullet2/src/LinearMath/btVector3.h
+++ b/extern/bullet2/src/LinearMath/btVector3.h
@@ -17,9 +17,10 @@ subject to the following restrictions:
#ifndef BT_VECTOR3_H
#define BT_VECTOR3_H
-
+//#include <stdint.h>
#include "btScalar.h"
#include "btMinMax.h"
+#include "btAlignedAllocator.h"
#ifdef BT_USE_DOUBLE_PRECISION
#define btVector3Data btVector3DoubleData
@@ -29,8 +30,46 @@ subject to the following restrictions:
#define btVector3DataName "btVector3FloatData"
#endif //BT_USE_DOUBLE_PRECISION
+#if defined BT_USE_SSE
+
+//typedef uint32_t __m128i __attribute__ ((vector_size(16)));
+
+#ifdef _MSC_VER
+#pragma warning(disable: 4556) // value of intrinsic immediate argument '4294967239' is out of range '0 - 255'
+#endif
+
+
+#define BT_SHUFFLE(x,y,z,w) ((w)<<6 | (z)<<4 | (y)<<2 | (x))
+//#define bt_pshufd_ps( _a, _mask ) (__m128) _mm_shuffle_epi32((__m128i)(_a), (_mask) )
+#define bt_pshufd_ps( _a, _mask ) _mm_shuffle_ps((_a), (_a), (_mask) )
+#define bt_splat3_ps( _a, _i ) bt_pshufd_ps((_a), BT_SHUFFLE(_i,_i,_i, 3) )
+#define bt_splat_ps( _a, _i ) bt_pshufd_ps((_a), BT_SHUFFLE(_i,_i,_i,_i) )
+
+#define btv3AbsiMask (_mm_set_epi32(0x00000000, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF))
+#define btvAbsMask (_mm_set_epi32( 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF))
+#define btvFFF0Mask (_mm_set_epi32(0x00000000, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF))
+#define btv3AbsfMask btCastiTo128f(btv3AbsiMask)
+#define btvFFF0fMask btCastiTo128f(btvFFF0Mask)
+#define btvxyzMaskf btvFFF0fMask
+#define btvAbsfMask btCastiTo128f(btvAbsMask)
+
+
+
+const __m128 ATTRIBUTE_ALIGNED16(btvMzeroMask) = {-0.0f, -0.0f, -0.0f, -0.0f};
+const __m128 ATTRIBUTE_ALIGNED16(v1110) = {1.0f, 1.0f, 1.0f, 0.0f};
+const __m128 ATTRIBUTE_ALIGNED16(vHalf) = {0.5f, 0.5f, 0.5f, 0.5f};
+const __m128 ATTRIBUTE_ALIGNED16(v1_5) = {1.5f, 1.5f, 1.5f, 1.5f};
+
+#endif
+#ifdef BT_USE_NEON
+const float32x4_t ATTRIBUTE_ALIGNED16(btvMzeroMask) = (float32x4_t){-0.0f, -0.0f, -0.0f, -0.0f};
+const int32x4_t ATTRIBUTE_ALIGNED16(btvFFF0Mask) = (int32x4_t){0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x0};
+const int32x4_t ATTRIBUTE_ALIGNED16(btvAbsMask) = (int32x4_t){0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF};
+const int32x4_t ATTRIBUTE_ALIGNED16(btv3AbsMask) = (int32x4_t){0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x0};
+
+#endif
/**@brief btVector3 can be used to represent 3D points and vectors.
* It has an un-used w component to suit 16-byte alignment when btVector3 is stored in containers. This extra component can be used by derived classes (Quaternion?) or by user
@@ -40,6 +79,8 @@ ATTRIBUTE_ALIGNED16(class) btVector3
{
public:
+ BT_DECLARE_ALIGNED_ALLOCATOR();
+
#if defined (__SPU__) && defined (__CELLOS_LV2__)
btScalar m_floats[4];
public:
@@ -49,28 +90,31 @@ public:
}
public:
#else //__CELLOS_LV2__ __SPU__
-#ifdef BT_USE_SSE // _WIN32
- union {
- __m128 mVec128;
- btScalar m_floats[4];
- };
- SIMD_FORCE_INLINE __m128 get128() const
- {
- return mVec128;
- }
- SIMD_FORCE_INLINE void set128(__m128 v128)
- {
- mVec128 = v128;
- }
-#else
- btScalar m_floats[4];
-#endif
+ #if defined (BT_USE_SSE) || defined(BT_USE_NEON) // _WIN32 || ARM
+ union {
+ btSimdFloat4 mVec128;
+ btScalar m_floats[4];
+ };
+ SIMD_FORCE_INLINE btSimdFloat4 get128() const
+ {
+ return mVec128;
+ }
+ SIMD_FORCE_INLINE void set128(btSimdFloat4 v128)
+ {
+ mVec128 = v128;
+ }
+ #else
+ btScalar m_floats[4];
+ #endif
#endif //__CELLOS_LV2__ __SPU__
public:
/**@brief No initialization constructor */
- SIMD_FORCE_INLINE btVector3() {}
+ SIMD_FORCE_INLINE btVector3()
+ {
+
+ }
@@ -79,21 +123,50 @@ public:
* @param y Y value
* @param z Z value
*/
- SIMD_FORCE_INLINE btVector3(const btScalar& x, const btScalar& y, const btScalar& z)
+ SIMD_FORCE_INLINE btVector3(const btScalar& _x, const btScalar& _y, const btScalar& _z)
{
- m_floats[0] = x;
- m_floats[1] = y;
- m_floats[2] = z;
- m_floats[3] = btScalar(0.);
+ m_floats[0] = _x;
+ m_floats[1] = _y;
+ m_floats[2] = _z;
+ m_floats[3] = btScalar(0.f);
}
-
+#if (defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE) )|| defined (BT_USE_NEON)
+ // Set Vector
+ SIMD_FORCE_INLINE btVector3( btSimdFloat4 v)
+ {
+ mVec128 = v;
+ }
+
+ // Copy constructor
+ SIMD_FORCE_INLINE btVector3(const btVector3& rhs)
+ {
+ mVec128 = rhs.mVec128;
+ }
+
+ // Assignment Operator
+ SIMD_FORCE_INLINE btVector3&
+ operator=(const btVector3& v)
+ {
+ mVec128 = v.mVec128;
+
+ return *this;
+ }
+#endif // #if defined (BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
+
/**@brief Add a vector to this one
* @param The vector to add to this one */
SIMD_FORCE_INLINE btVector3& operator+=(const btVector3& v)
{
-
- m_floats[0] += v.m_floats[0]; m_floats[1] += v.m_floats[1];m_floats[2] += v.m_floats[2];
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_add_ps(mVec128, v.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vaddq_f32(mVec128, v.mVec128);
+#else
+ m_floats[0] += v.m_floats[0];
+ m_floats[1] += v.m_floats[1];
+ m_floats[2] += v.m_floats[2];
+#endif
return *this;
}
@@ -102,14 +175,33 @@ public:
* @param The vector to subtract */
SIMD_FORCE_INLINE btVector3& operator-=(const btVector3& v)
{
- m_floats[0] -= v.m_floats[0]; m_floats[1] -= v.m_floats[1];m_floats[2] -= v.m_floats[2];
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_sub_ps(mVec128, v.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vsubq_f32(mVec128, v.mVec128);
+#else
+ m_floats[0] -= v.m_floats[0];
+ m_floats[1] -= v.m_floats[1];
+ m_floats[2] -= v.m_floats[2];
+#endif
return *this;
}
+
/**@brief Scale the vector
* @param s Scale factor */
SIMD_FORCE_INLINE btVector3& operator*=(const btScalar& s)
{
- m_floats[0] *= s; m_floats[1] *= s;m_floats[2] *= s;
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vs = _mm_load_ss(&s); // (S 0 0 0)
+ vs = bt_pshufd_ps(vs, 0x80); // (S S S 0.0)
+ mVec128 = _mm_mul_ps(mVec128, vs);
+#elif defined(BT_USE_NEON)
+ mVec128 = vmulq_n_f32(mVec128, s);
+#else
+ m_floats[0] *= s;
+ m_floats[1] *= s;
+ m_floats[2] *= s;
+#endif
return *this;
}
@@ -118,14 +210,42 @@ public:
SIMD_FORCE_INLINE btVector3& operator/=(const btScalar& s)
{
btFullAssert(s != btScalar(0.0));
+
+#if 0 //defined(BT_USE_SSE_IN_API)
+// this code is not faster !
+ __m128 vs = _mm_load_ss(&s);
+ vs = _mm_div_ss(v1110, vs);
+ vs = bt_pshufd_ps(vs, 0x00); // (S S S S)
+
+ mVec128 = _mm_mul_ps(mVec128, vs);
+
+ return *this;
+#else
return *this *= btScalar(1.0) / s;
+#endif
}
/**@brief Return the dot product
* @param v The other vector in the dot product */
SIMD_FORCE_INLINE btScalar dot(const btVector3& v) const
{
- return m_floats[0] * v.m_floats[0] + m_floats[1] * v.m_floats[1] +m_floats[2] * v.m_floats[2];
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vd = _mm_mul_ps(mVec128, v.mVec128);
+ __m128 z = _mm_movehl_ps(vd, vd);
+ __m128 y = _mm_shuffle_ps(vd, vd, 0x55);
+ vd = _mm_add_ss(vd, y);
+ vd = _mm_add_ss(vd, z);
+ return _mm_cvtss_f32(vd);
+#elif defined(BT_USE_NEON)
+ float32x4_t vd = vmulq_f32(mVec128, v.mVec128);
+ float32x2_t x = vpadd_f32(vget_low_f32(vd), vget_low_f32(vd));
+ x = vadd_f32(x, vget_high_f32(vd));
+ return vget_lane_f32(x, 0);
+#else
+ return m_floats[0] * v.m_floats[0] +
+ m_floats[1] * v.m_floats[1] +
+ m_floats[2] * v.m_floats[2];
+#endif
}
/**@brief Return the length of the vector squared */
@@ -165,7 +285,44 @@ public:
* x^2 + y^2 + z^2 = 1 */
SIMD_FORCE_INLINE btVector3& normalize()
{
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ // dot product first
+ __m128 vd = _mm_mul_ps(mVec128, mVec128);
+ __m128 z = _mm_movehl_ps(vd, vd);
+ __m128 y = _mm_shuffle_ps(vd, vd, 0x55);
+ vd = _mm_add_ss(vd, y);
+ vd = _mm_add_ss(vd, z);
+
+ #if 0
+ vd = _mm_sqrt_ss(vd);
+ vd = _mm_div_ss(v1110, vd);
+ vd = bt_splat_ps(vd, 0x80);
+ mVec128 = _mm_mul_ps(mVec128, vd);
+ #else
+
+ // NR step 1/sqrt(x) - vd is x, y is output
+ y = _mm_rsqrt_ss(vd); // estimate
+
+ // one step NR
+ z = v1_5;
+ vd = _mm_mul_ss(vd, vHalf); // vd * 0.5
+ //x2 = vd;
+ vd = _mm_mul_ss(vd, y); // vd * 0.5 * y0
+ vd = _mm_mul_ss(vd, y); // vd * 0.5 * y0 * y0
+ z = _mm_sub_ss(z, vd); // 1.5 - vd * 0.5 * y0 * y0
+
+ y = _mm_mul_ss(y, z); // y0 * (1.5 - vd * 0.5 * y0 * y0)
+
+ y = bt_splat_ps(y, 0x80);
+ mVec128 = _mm_mul_ps(mVec128, y);
+
+ #endif
+
+
+ return *this;
+#else
return *this /= length();
+#endif
}
/**@brief Return a normalized version of this vector */
@@ -184,29 +341,112 @@ public:
btFullAssert(s != btScalar(0.0));
return btAcos(dot(v) / s);
}
+
/**@brief Return a vector will the absolute values of each element */
SIMD_FORCE_INLINE btVector3 absolute() const
{
+
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btVector3(_mm_and_ps(mVec128, btv3AbsfMask));
+#elif defined(BT_USE_NEON)
+ return btVector3(vabsq_f32(mVec128));
+#else
return btVector3(
btFabs(m_floats[0]),
btFabs(m_floats[1]),
btFabs(m_floats[2]));
+#endif
}
+
/**@brief Return the cross product between this and another vector
* @param v The other vector */
SIMD_FORCE_INLINE btVector3 cross(const btVector3& v) const
{
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 T, V;
+
+ T = bt_pshufd_ps(mVec128, BT_SHUFFLE(1, 2, 0, 3)); // (Y Z X 0)
+ V = bt_pshufd_ps(v.mVec128, BT_SHUFFLE(1, 2, 0, 3)); // (Y Z X 0)
+
+ V = _mm_mul_ps(V, mVec128);
+ T = _mm_mul_ps(T, v.mVec128);
+ V = _mm_sub_ps(V, T);
+
+ V = bt_pshufd_ps(V, BT_SHUFFLE(1, 2, 0, 3));
+ return btVector3(V);
+#elif defined(BT_USE_NEON)
+ float32x4_t T, V;
+ // form (Y, Z, X, _) of mVec128 and v.mVec128
+ float32x2_t Tlow = vget_low_f32(mVec128);
+ float32x2_t Vlow = vget_low_f32(v.mVec128);
+ T = vcombine_f32(vext_f32(Tlow, vget_high_f32(mVec128), 1), Tlow);
+ V = vcombine_f32(vext_f32(Vlow, vget_high_f32(v.mVec128), 1), Vlow);
+
+ V = vmulq_f32(V, mVec128);
+ T = vmulq_f32(T, v.mVec128);
+ V = vsubq_f32(V, T);
+ Vlow = vget_low_f32(V);
+ // form (Y, Z, X, _);
+ V = vcombine_f32(vext_f32(Vlow, vget_high_f32(V), 1), Vlow);
+ V = (float32x4_t)vandq_s32((int32x4_t)V, btvFFF0Mask);
+
+ return btVector3(V);
+#else
return btVector3(
- m_floats[1] * v.m_floats[2] -m_floats[2] * v.m_floats[1],
+ m_floats[1] * v.m_floats[2] - m_floats[2] * v.m_floats[1],
m_floats[2] * v.m_floats[0] - m_floats[0] * v.m_floats[2],
m_floats[0] * v.m_floats[1] - m_floats[1] * v.m_floats[0]);
+#endif
}
SIMD_FORCE_INLINE btScalar triple(const btVector3& v1, const btVector3& v2) const
{
- return m_floats[0] * (v1.m_floats[1] * v2.m_floats[2] - v1.m_floats[2] * v2.m_floats[1]) +
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ // cross:
+ __m128 T = _mm_shuffle_ps(v1.mVec128, v1.mVec128, BT_SHUFFLE(1, 2, 0, 3)); // (Y Z X 0)
+ __m128 V = _mm_shuffle_ps(v2.mVec128, v2.mVec128, BT_SHUFFLE(1, 2, 0, 3)); // (Y Z X 0)
+
+ V = _mm_mul_ps(V, v1.mVec128);
+ T = _mm_mul_ps(T, v2.mVec128);
+ V = _mm_sub_ps(V, T);
+
+ V = _mm_shuffle_ps(V, V, BT_SHUFFLE(1, 2, 0, 3));
+
+ // dot:
+ V = _mm_mul_ps(V, mVec128);
+ __m128 z = _mm_movehl_ps(V, V);
+ __m128 y = _mm_shuffle_ps(V, V, 0x55);
+ V = _mm_add_ss(V, y);
+ V = _mm_add_ss(V, z);
+ return _mm_cvtss_f32(V);
+
+#elif defined(BT_USE_NEON)
+ // cross:
+ float32x4_t T, V;
+ // form (Y, Z, X, _) of mVec128 and v.mVec128
+ float32x2_t Tlow = vget_low_f32(v1.mVec128);
+ float32x2_t Vlow = vget_low_f32(v2.mVec128);
+ T = vcombine_f32(vext_f32(Tlow, vget_high_f32(v1.mVec128), 1), Tlow);
+ V = vcombine_f32(vext_f32(Vlow, vget_high_f32(v2.mVec128), 1), Vlow);
+
+ V = vmulq_f32(V, v1.mVec128);
+ T = vmulq_f32(T, v2.mVec128);
+ V = vsubq_f32(V, T);
+ Vlow = vget_low_f32(V);
+ // form (Y, Z, X, _);
+ V = vcombine_f32(vext_f32(Vlow, vget_high_f32(V), 1), Vlow);
+
+ // dot:
+ V = vmulq_f32(mVec128, V);
+ float32x2_t x = vpadd_f32(vget_low_f32(V), vget_low_f32(V));
+ x = vadd_f32(x, vget_high_f32(V));
+ return vget_lane_f32(x, 0);
+#else
+ return
+ m_floats[0] * (v1.m_floats[1] * v2.m_floats[2] - v1.m_floats[2] * v2.m_floats[1]) +
m_floats[1] * (v1.m_floats[2] * v2.m_floats[0] - v1.m_floats[0] * v2.m_floats[2]) +
m_floats[2] * (v1.m_floats[0] * v2.m_floats[1] - v1.m_floats[1] * v2.m_floats[0]);
+#endif
}
/**@brief Return the axis with the smallest value
@@ -233,14 +473,31 @@ public:
return absolute().maxAxis();
}
+
SIMD_FORCE_INLINE void setInterpolate3(const btVector3& v0, const btVector3& v1, btScalar rt)
{
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vrt = _mm_load_ss(&rt); // (rt 0 0 0)
+ btScalar s = btScalar(1.0) - rt;
+ __m128 vs = _mm_load_ss(&s); // (S 0 0 0)
+ vs = bt_pshufd_ps(vs, 0x80); // (S S S 0.0)
+ __m128 r0 = _mm_mul_ps(v0.mVec128, vs);
+ vrt = bt_pshufd_ps(vrt, 0x80); // (rt rt rt 0.0)
+ __m128 r1 = _mm_mul_ps(v1.mVec128, vrt);
+ __m128 tmp3 = _mm_add_ps(r0,r1);
+ mVec128 = tmp3;
+#elif defined(BT_USE_NEON)
+ mVec128 = vsubq_f32(v1.mVec128, v0.mVec128);
+ mVec128 = vmulq_n_f32(mVec128, rt);
+ mVec128 = vaddq_f32(mVec128, v0.mVec128);
+#else
btScalar s = btScalar(1.0) - rt;
m_floats[0] = s * v0.m_floats[0] + rt * v1.m_floats[0];
m_floats[1] = s * v0.m_floats[1] + rt * v1.m_floats[1];
m_floats[2] = s * v0.m_floats[2] + rt * v1.m_floats[2];
//don't do the unused w component
// m_co[3] = s * v0[3] + rt * v1[3];
+#endif
}
/**@brief Return the linear interpolation between this and another vector
@@ -248,16 +505,41 @@ public:
* @param t The ration of this to v (t = 0 => return this, t=1 => return other) */
SIMD_FORCE_INLINE btVector3 lerp(const btVector3& v, const btScalar& t) const
{
- return btVector3(m_floats[0] + (v.m_floats[0] - m_floats[0]) * t,
- m_floats[1] + (v.m_floats[1] - m_floats[1]) * t,
- m_floats[2] + (v.m_floats[2] -m_floats[2]) * t);
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vt = _mm_load_ss(&t); // (t 0 0 0)
+ vt = bt_pshufd_ps(vt, 0x80); // (rt rt rt 0.0)
+ __m128 vl = _mm_sub_ps(v.mVec128, mVec128);
+ vl = _mm_mul_ps(vl, vt);
+ vl = _mm_add_ps(vl, mVec128);
+
+ return btVector3(vl);
+#elif defined(BT_USE_NEON)
+ float32x4_t vl = vsubq_f32(v.mVec128, mVec128);
+ vl = vmulq_n_f32(vl, t);
+ vl = vaddq_f32(vl, mVec128);
+
+ return btVector3(vl);
+#else
+ return
+ btVector3( m_floats[0] + (v.m_floats[0] - m_floats[0]) * t,
+ m_floats[1] + (v.m_floats[1] - m_floats[1]) * t,
+ m_floats[2] + (v.m_floats[2] - m_floats[2]) * t);
+#endif
}
/**@brief Elementwise multiply this vector by the other
* @param v The other vector */
SIMD_FORCE_INLINE btVector3& operator*=(const btVector3& v)
{
- m_floats[0] *= v.m_floats[0]; m_floats[1] *= v.m_floats[1];m_floats[2] *= v.m_floats[2];
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_mul_ps(mVec128, v.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vmulq_f32(mVec128, v.mVec128);
+#else
+ m_floats[0] *= v.m_floats[0];
+ m_floats[1] *= v.m_floats[1];
+ m_floats[2] *= v.m_floats[2];
+#endif
return *this;
}
@@ -268,13 +550,13 @@ public:
/**@brief Return the z value */
SIMD_FORCE_INLINE const btScalar& getZ() const { return m_floats[2]; }
/**@brief Set the x value */
- SIMD_FORCE_INLINE void setX(btScalar x) { m_floats[0] = x;};
+ SIMD_FORCE_INLINE void setX(btScalar _x) { m_floats[0] = _x;};
/**@brief Set the y value */
- SIMD_FORCE_INLINE void setY(btScalar y) { m_floats[1] = y;};
+ SIMD_FORCE_INLINE void setY(btScalar _y) { m_floats[1] = _y;};
/**@brief Set the z value */
- SIMD_FORCE_INLINE void setZ(btScalar z) {m_floats[2] = z;};
+ SIMD_FORCE_INLINE void setZ(btScalar _z) { m_floats[2] = _z;};
/**@brief Set the w value */
- SIMD_FORCE_INLINE void setW(btScalar w) { m_floats[3] = w;};
+ SIMD_FORCE_INLINE void setW(btScalar _w) { m_floats[3] = _w;};
/**@brief Return the x value */
SIMD_FORCE_INLINE const btScalar& x() const { return m_floats[0]; }
/**@brief Return the y value */
@@ -292,7 +574,14 @@ public:
SIMD_FORCE_INLINE bool operator==(const btVector3& other) const
{
- return ((m_floats[3]==other.m_floats[3]) && (m_floats[2]==other.m_floats[2]) && (m_floats[1]==other.m_floats[1]) && (m_floats[0]==other.m_floats[0]));
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return (0xf == _mm_movemask_ps((__m128)_mm_cmpeq_ps(mVec128, other.mVec128)));
+#else
+ return ((m_floats[3]==other.m_floats[3]) &&
+ (m_floats[2]==other.m_floats[2]) &&
+ (m_floats[1]==other.m_floats[1]) &&
+ (m_floats[0]==other.m_floats[0]));
+#endif
}
SIMD_FORCE_INLINE bool operator!=(const btVector3& other) const
@@ -300,103 +589,230 @@ public:
return !(*this == other);
}
- /**@brief Set each element to the max of the current values and the values of another btVector3
+ /**@brief Set each element to the max of the current values and the values of another btVector3
* @param other The other btVector3 to compare with
*/
- SIMD_FORCE_INLINE void setMax(const btVector3& other)
- {
- btSetMax(m_floats[0], other.m_floats[0]);
- btSetMax(m_floats[1], other.m_floats[1]);
- btSetMax(m_floats[2], other.m_floats[2]);
- btSetMax(m_floats[3], other.w());
- }
+ SIMD_FORCE_INLINE void setMax(const btVector3& other)
+ {
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_max_ps(mVec128, other.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vmaxq_f32(mVec128, other.mVec128);
+#else
+ btSetMax(m_floats[0], other.m_floats[0]);
+ btSetMax(m_floats[1], other.m_floats[1]);
+ btSetMax(m_floats[2], other.m_floats[2]);
+ btSetMax(m_floats[3], other.w());
+#endif
+ }
+
/**@brief Set each element to the min of the current values and the values of another btVector3
* @param other The other btVector3 to compare with
*/
- SIMD_FORCE_INLINE void setMin(const btVector3& other)
- {
- btSetMin(m_floats[0], other.m_floats[0]);
- btSetMin(m_floats[1], other.m_floats[1]);
- btSetMin(m_floats[2], other.m_floats[2]);
- btSetMin(m_floats[3], other.w());
- }
-
- SIMD_FORCE_INLINE void setValue(const btScalar& x, const btScalar& y, const btScalar& z)
- {
- m_floats[0]=x;
- m_floats[1]=y;
- m_floats[2]=z;
- m_floats[3] = btScalar(0.);
- }
-
- void getSkewSymmetricMatrix(btVector3* v0,btVector3* v1,btVector3* v2) const
- {
- v0->setValue(0. ,-z() ,y());
- v1->setValue(z() ,0. ,-x());
- v2->setValue(-y() ,x() ,0.);
- }
-
- void setZero()
- {
- setValue(btScalar(0.),btScalar(0.),btScalar(0.));
- }
-
- SIMD_FORCE_INLINE bool isZero() const
- {
- return m_floats[0] == btScalar(0) && m_floats[1] == btScalar(0) && m_floats[2] == btScalar(0);
- }
-
- SIMD_FORCE_INLINE bool fuzzyZero() const
- {
- return length2() < SIMD_EPSILON;
- }
-
- SIMD_FORCE_INLINE void serialize(struct btVector3Data& dataOut) const;
+ SIMD_FORCE_INLINE void setMin(const btVector3& other)
+ {
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = _mm_min_ps(mVec128, other.mVec128);
+#elif defined(BT_USE_NEON)
+ mVec128 = vminq_f32(mVec128, other.mVec128);
+#else
+ btSetMin(m_floats[0], other.m_floats[0]);
+ btSetMin(m_floats[1], other.m_floats[1]);
+ btSetMin(m_floats[2], other.m_floats[2]);
+ btSetMin(m_floats[3], other.w());
+#endif
+ }
- SIMD_FORCE_INLINE void deSerialize(const struct btVector3Data& dataIn);
+ SIMD_FORCE_INLINE void setValue(const btScalar& _x, const btScalar& _y, const btScalar& _z)
+ {
+ m_floats[0]=_x;
+ m_floats[1]=_y;
+ m_floats[2]=_z;
+ m_floats[3] = btScalar(0.f);
+ }
- SIMD_FORCE_INLINE void serializeFloat(struct btVector3FloatData& dataOut) const;
+ void getSkewSymmetricMatrix(btVector3* v0,btVector3* v1,btVector3* v2) const
+ {
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+
+ __m128 V = _mm_and_ps(mVec128, btvFFF0fMask);
+ __m128 V0 = _mm_xor_ps(btvMzeroMask, V);
+ __m128 V2 = _mm_movelh_ps(V0, V);
+
+ __m128 V1 = _mm_shuffle_ps(V, V0, 0xCE);
+
+ V0 = _mm_shuffle_ps(V0, V, 0xDB);
+ V2 = _mm_shuffle_ps(V2, V, 0xF9);
+
+ v0->mVec128 = V0;
+ v1->mVec128 = V1;
+ v2->mVec128 = V2;
+#else
+ v0->setValue(0. ,-z() ,y());
+ v1->setValue(z() ,0. ,-x());
+ v2->setValue(-y() ,x() ,0.);
+#endif
+ }
- SIMD_FORCE_INLINE void deSerializeFloat(const struct btVector3FloatData& dataIn);
+ void setZero()
+ {
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ mVec128 = (__m128)_mm_xor_ps(mVec128, mVec128);
+#elif defined(BT_USE_NEON)
+ int32x4_t vi = vdupq_n_s32(0);
+ mVec128 = vreinterpretq_f32_s32(vi);
+#else
+ setValue(btScalar(0.),btScalar(0.),btScalar(0.));
+#endif
+ }
- SIMD_FORCE_INLINE void serializeDouble(struct btVector3DoubleData& dataOut) const;
+ SIMD_FORCE_INLINE bool isZero() const
+ {
+ return m_floats[0] == btScalar(0) && m_floats[1] == btScalar(0) && m_floats[2] == btScalar(0);
+ }
- SIMD_FORCE_INLINE void deSerializeDouble(const struct btVector3DoubleData& dataIn);
+ SIMD_FORCE_INLINE bool fuzzyZero() const
+ {
+ return length2() < SIMD_EPSILON;
+ }
+ SIMD_FORCE_INLINE void serialize(struct btVector3Data& dataOut) const;
+
+ SIMD_FORCE_INLINE void deSerialize(const struct btVector3Data& dataIn);
+
+ SIMD_FORCE_INLINE void serializeFloat(struct btVector3FloatData& dataOut) const;
+
+ SIMD_FORCE_INLINE void deSerializeFloat(const struct btVector3FloatData& dataIn);
+
+ SIMD_FORCE_INLINE void serializeDouble(struct btVector3DoubleData& dataOut) const;
+
+ SIMD_FORCE_INLINE void deSerializeDouble(const struct btVector3DoubleData& dataIn);
+
+ /**@brief returns index of maximum dot product between this and vectors in array[]
+ * @param array The other vectors
+ * @param array_count The number of other vectors
+ * @param dotOut The maximum dot product */
+ SIMD_FORCE_INLINE long maxDot( const btVector3 *array, long array_count, btScalar &dotOut ) const;
+
+ /**@brief returns index of minimum dot product between this and vectors in array[]
+ * @param array The other vectors
+ * @param array_count The number of other vectors
+ * @param dotOut The minimum dot product */
+ SIMD_FORCE_INLINE long minDot( const btVector3 *array, long array_count, btScalar &dotOut ) const;
+
+ /* create a vector as btVector3( this->dot( btVector3 v0 ), this->dot( btVector3 v1), this->dot( btVector3 v2 )) */
+ SIMD_FORCE_INLINE btVector3 dot3( const btVector3 &v0, const btVector3 &v1, const btVector3 &v2 ) const
+ {
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+
+ __m128 a0 = _mm_mul_ps( v0.mVec128, this->mVec128 );
+ __m128 a1 = _mm_mul_ps( v1.mVec128, this->mVec128 );
+ __m128 a2 = _mm_mul_ps( v2.mVec128, this->mVec128 );
+ __m128 b0 = _mm_unpacklo_ps( a0, a1 );
+ __m128 b1 = _mm_unpackhi_ps( a0, a1 );
+ __m128 b2 = _mm_unpacklo_ps( a2, _mm_setzero_ps() );
+ __m128 r = _mm_movelh_ps( b0, b2 );
+ r = _mm_add_ps( r, _mm_movehl_ps( b2, b0 ));
+ a2 = _mm_and_ps( a2, btvxyzMaskf);
+ r = _mm_add_ps( r, btCastdTo128f (_mm_move_sd( btCastfTo128d(a2), btCastfTo128d(b1) )));
+ return btVector3(r);
+
+#elif defined(BT_USE_NEON)
+ static const uint32x4_t xyzMask = (const uint32x4_t){ -1, -1, -1, 0 };
+ float32x4_t a0 = vmulq_f32( v0.mVec128, this->mVec128);
+ float32x4_t a1 = vmulq_f32( v1.mVec128, this->mVec128);
+ float32x4_t a2 = vmulq_f32( v2.mVec128, this->mVec128);
+ float32x2x2_t zLo = vtrn_f32( vget_high_f32(a0), vget_high_f32(a1));
+ a2 = (float32x4_t) vandq_u32((uint32x4_t) a2, xyzMask );
+ float32x2_t b0 = vadd_f32( vpadd_f32( vget_low_f32(a0), vget_low_f32(a1)), zLo.val[0] );
+ float32x2_t b1 = vpadd_f32( vpadd_f32( vget_low_f32(a2), vget_high_f32(a2)), vdup_n_f32(0.0f));
+ return btVector3( vcombine_f32(b0, b1) );
+#else
+ return btVector3( dot(v0), dot(v1), dot(v2));
+#endif
+ }
};
/**@brief Return the sum of two vectors (Point symantics)*/
SIMD_FORCE_INLINE btVector3
operator+(const btVector3& v1, const btVector3& v2)
{
- return btVector3(v1.m_floats[0] + v2.m_floats[0], v1.m_floats[1] + v2.m_floats[1], v1.m_floats[2] + v2.m_floats[2]);
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btVector3(_mm_add_ps(v1.mVec128, v2.mVec128));
+#elif defined(BT_USE_NEON)
+ return btVector3(vaddq_f32(v1.mVec128, v2.mVec128));
+#else
+ return btVector3(
+ v1.m_floats[0] + v2.m_floats[0],
+ v1.m_floats[1] + v2.m_floats[1],
+ v1.m_floats[2] + v2.m_floats[2]);
+#endif
}
/**@brief Return the elementwise product of two vectors */
SIMD_FORCE_INLINE btVector3
operator*(const btVector3& v1, const btVector3& v2)
{
- return btVector3(v1.m_floats[0] * v2.m_floats[0], v1.m_floats[1] * v2.m_floats[1], v1.m_floats[2] * v2.m_floats[2]);
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btVector3(_mm_mul_ps(v1.mVec128, v2.mVec128));
+#elif defined(BT_USE_NEON)
+ return btVector3(vmulq_f32(v1.mVec128, v2.mVec128));
+#else
+ return btVector3(
+ v1.m_floats[0] * v2.m_floats[0],
+ v1.m_floats[1] * v2.m_floats[1],
+ v1.m_floats[2] * v2.m_floats[2]);
+#endif
}
/**@brief Return the difference between two vectors */
SIMD_FORCE_INLINE btVector3
operator-(const btVector3& v1, const btVector3& v2)
{
- return btVector3(v1.m_floats[0] - v2.m_floats[0], v1.m_floats[1] - v2.m_floats[1], v1.m_floats[2] - v2.m_floats[2]);
+#if defined BT_USE_SIMD_VECTOR3 && (defined(BT_USE_SSE_IN_API) && defined(BT_USE_SSE))
+
+ // without _mm_and_ps this code causes slowdown in Concave moving
+ __m128 r = _mm_sub_ps(v1.mVec128, v2.mVec128);
+ return btVector3(_mm_and_ps(r, btvFFF0fMask));
+#elif defined(BT_USE_NEON)
+ float32x4_t r = vsubq_f32(v1.mVec128, v2.mVec128);
+ return btVector3((float32x4_t)vandq_s32((int32x4_t)r, btvFFF0Mask));
+#else
+ return btVector3(
+ v1.m_floats[0] - v2.m_floats[0],
+ v1.m_floats[1] - v2.m_floats[1],
+ v1.m_floats[2] - v2.m_floats[2]);
+#endif
}
+
/**@brief Return the negative of the vector */
SIMD_FORCE_INLINE btVector3
operator-(const btVector3& v)
{
+#if defined BT_USE_SIMD_VECTOR3 && (defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE))
+ __m128 r = _mm_xor_ps(v.mVec128, btvMzeroMask);
+ return btVector3(_mm_and_ps(r, btvFFF0fMask));
+#elif defined(BT_USE_NEON)
+ return btVector3((btSimdFloat4)veorq_s32((int32x4_t)v.mVec128, (int32x4_t)btvMzeroMask));
+#else
return btVector3(-v.m_floats[0], -v.m_floats[1], -v.m_floats[2]);
+#endif
}
/**@brief Return the vector scaled by s */
SIMD_FORCE_INLINE btVector3
operator*(const btVector3& v, const btScalar& s)
{
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ __m128 vs = _mm_load_ss(&s); // (S 0 0 0)
+ vs = bt_pshufd_ps(vs, 0x80); // (S S S 0.0)
+ return btVector3(_mm_mul_ps(v.mVec128, vs));
+#elif defined(BT_USE_NEON)
+ float32x4_t r = vmulq_n_f32(v.mVec128, s);
+ return btVector3((float32x4_t)vandq_s32((int32x4_t)r, btvFFF0Mask));
+#else
return btVector3(v.m_floats[0] * s, v.m_floats[1] * s, v.m_floats[2] * s);
+#endif
}
/**@brief Return the vector scaled by s */
@@ -411,14 +827,46 @@ SIMD_FORCE_INLINE btVector3
operator/(const btVector3& v, const btScalar& s)
{
btFullAssert(s != btScalar(0.0));
+#if 0 //defined(BT_USE_SSE_IN_API)
+// this code is not faster !
+ __m128 vs = _mm_load_ss(&s);
+ vs = _mm_div_ss(v1110, vs);
+ vs = bt_pshufd_ps(vs, 0x00); // (S S S S)
+
+ return btVector3(_mm_mul_ps(v.mVec128, vs));
+#else
return v * (btScalar(1.0) / s);
+#endif
}
/**@brief Return the vector inversely scaled by s */
SIMD_FORCE_INLINE btVector3
operator/(const btVector3& v1, const btVector3& v2)
{
- return btVector3(v1.m_floats[0] / v2.m_floats[0],v1.m_floats[1] / v2.m_floats[1],v1.m_floats[2] / v2.m_floats[2]);
+#if defined BT_USE_SIMD_VECTOR3 && (defined(BT_USE_SSE_IN_API)&& defined (BT_USE_SSE))
+ __m128 vec = _mm_div_ps(v1.mVec128, v2.mVec128);
+ vec = _mm_and_ps(vec, btvFFF0fMask);
+ return btVector3(vec);
+#elif defined(BT_USE_NEON)
+ float32x4_t x, y, v, m;
+
+ x = v1.mVec128;
+ y = v2.mVec128;
+
+ v = vrecpeq_f32(y); // v ~ 1/y
+ m = vrecpsq_f32(y, v); // m = (2-v*y)
+ v = vmulq_f32(v, m); // vv = v*m ~~ 1/y
+ m = vrecpsq_f32(y, v); // mm = (2-vv*y)
+ v = vmulq_f32(v, x); // x*vv
+ v = vmulq_f32(v, m); // (x*vv)*(2-vv*y) = x*(vv(2-vv*y)) ~~~ x/y
+
+ return btVector3(v);
+#else
+ return btVector3(
+ v1.m_floats[0] / v2.m_floats[0],
+ v1.m_floats[1] / v2.m_floats[1],
+ v1.m_floats[2] / v2.m_floats[2]);
+#endif
}
/**@brief Return the dot product between two vectors */
@@ -488,22 +936,133 @@ SIMD_FORCE_INLINE btScalar btVector3::distance(const btVector3& v) const
SIMD_FORCE_INLINE btVector3 btVector3::normalized() const
{
+#if defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ btVector3 norm = *this;
+
+ return norm.normalize();
+#else
return *this / length();
+#endif
}
-SIMD_FORCE_INLINE btVector3 btVector3::rotate( const btVector3& wAxis, const btScalar angle ) const
+SIMD_FORCE_INLINE btVector3 btVector3::rotate( const btVector3& wAxis, const btScalar _angle ) const
{
// wAxis must be a unit lenght vector
+#if defined BT_USE_SIMD_VECTOR3 && defined (BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+
+ __m128 O = _mm_mul_ps(wAxis.mVec128, mVec128);
+ btScalar ssin = btSin( _angle );
+ __m128 C = wAxis.cross( mVec128 ).mVec128;
+ O = _mm_and_ps(O, btvFFF0fMask);
+ btScalar scos = btCos( _angle );
+
+ __m128 vsin = _mm_load_ss(&ssin); // (S 0 0 0)
+ __m128 vcos = _mm_load_ss(&scos); // (S 0 0 0)
+
+ __m128 Y = bt_pshufd_ps(O, 0xC9); // (Y Z X 0)
+ __m128 Z = bt_pshufd_ps(O, 0xD2); // (Z X Y 0)
+ O = _mm_add_ps(O, Y);
+ vsin = bt_pshufd_ps(vsin, 0x80); // (S S S 0)
+ O = _mm_add_ps(O, Z);
+ vcos = bt_pshufd_ps(vcos, 0x80); // (S S S 0)
+
+ vsin = vsin * C;
+ O = O * wAxis.mVec128;
+ __m128 X = mVec128 - O;
+
+ O = O + vsin;
+ vcos = vcos * X;
+ O = O + vcos;
+
+ return btVector3(O);
+#else
btVector3 o = wAxis * wAxis.dot( *this );
- btVector3 x = *this - o;
- btVector3 y;
+ btVector3 _x = *this - o;
+ btVector3 _y;
- y = wAxis.cross( *this );
+ _y = wAxis.cross( *this );
- return ( o + x * btCos( angle ) + y * btSin( angle ) );
+ return ( o + _x * btCos( _angle ) + _y * btSin( _angle ) );
+#endif
+}
+
+SIMD_FORCE_INLINE long btVector3::maxDot( const btVector3 *array, long array_count, btScalar &dotOut ) const
+{
+#if (defined BT_USE_SSE && defined BT_USE_SIMD_VECTOR3 && defined BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
+ #if defined _WIN32 || defined (BT_USE_SSE)
+ const long scalar_cutoff = 10;
+ long _maxdot_large( const float *array, const float *vec, unsigned long array_count, float *dotOut );
+ #elif defined BT_USE_NEON
+ const long scalar_cutoff = 4;
+ extern long (*_maxdot_large)( const float *array, const float *vec, unsigned long array_count, float *dotOut );
+ #endif
+ if( array_count < scalar_cutoff )
+#endif
+ {
+ btScalar maxDot = -SIMD_INFINITY;
+ int i = 0;
+ int ptIndex = -1;
+ for( i = 0; i < array_count; i++ )
+ {
+ btScalar dot = array[i].dot(*this);
+
+ if( dot > maxDot )
+ {
+ maxDot = dot;
+ ptIndex = i;
+ }
+ }
+
+ dotOut = maxDot;
+ return ptIndex;
+ }
+#if (defined BT_USE_SSE && defined BT_USE_SIMD_VECTOR3 && defined BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
+ return _maxdot_large( (float*) array, (float*) &m_floats[0], array_count, &dotOut );
+#endif
}
+SIMD_FORCE_INLINE long btVector3::minDot( const btVector3 *array, long array_count, btScalar &dotOut ) const
+{
+#if (defined BT_USE_SSE && defined BT_USE_SIMD_VECTOR3 && defined BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
+ #if defined BT_USE_SSE
+ const long scalar_cutoff = 10;
+ long _mindot_large( const float *array, const float *vec, unsigned long array_count, float *dotOut );
+ #elif defined BT_USE_NEON
+ const long scalar_cutoff = 4;
+ extern long (*_mindot_large)( const float *array, const float *vec, unsigned long array_count, float *dotOut );
+ #else
+ #error unhandled arch!
+ #endif
+
+ if( array_count < scalar_cutoff )
+#endif
+ {
+ btScalar minDot = SIMD_INFINITY;
+ int i = 0;
+ int ptIndex = -1;
+
+ for( i = 0; i < array_count; i++ )
+ {
+ btScalar dot = array[i].dot(*this);
+
+ if( dot < minDot )
+ {
+ minDot = dot;
+ ptIndex = i;
+ }
+ }
+
+ dotOut = minDot;
+
+ return ptIndex;
+ }
+#if (defined BT_USE_SSE && defined BT_USE_SIMD_VECTOR3 && defined BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
+ return _mindot_large( (float*) array, (float*) &m_floats[0], array_count, &dotOut );
+#endif//BT_USE_SIMD_VECTOR3
+}
+
+
class btVector4 : public btVector3
{
public:
@@ -511,24 +1070,47 @@ public:
SIMD_FORCE_INLINE btVector4() {}
- SIMD_FORCE_INLINE btVector4(const btScalar& x, const btScalar& y, const btScalar& z,const btScalar& w)
- : btVector3(x,y,z)
+ SIMD_FORCE_INLINE btVector4(const btScalar& _x, const btScalar& _y, const btScalar& _z,const btScalar& _w)
+ : btVector3(_x,_y,_z)
+ {
+ m_floats[3] = _w;
+ }
+
+#if (defined (BT_USE_SSE_IN_API)&& defined (BT_USE_SSE)) || defined (BT_USE_NEON)
+ SIMD_FORCE_INLINE btVector4(const btSimdFloat4 vec)
+ {
+ mVec128 = vec;
+ }
+
+ SIMD_FORCE_INLINE btVector4(const btVector3& rhs)
{
- m_floats[3] = w;
+ mVec128 = rhs.mVec128;
}
+ SIMD_FORCE_INLINE btVector4&
+ operator=(const btVector4& v)
+ {
+ mVec128 = v.mVec128;
+ return *this;
+ }
+#endif // #if defined (BT_USE_SSE_IN_API) || defined (BT_USE_NEON)
SIMD_FORCE_INLINE btVector4 absolute4() const
{
+#if defined BT_USE_SIMD_VECTOR3 && defined(BT_USE_SSE_IN_API) && defined (BT_USE_SSE)
+ return btVector4(_mm_and_ps(mVec128, btvAbsfMask));
+#elif defined(BT_USE_NEON)
+ return btVector4(vabsq_f32(mVec128));
+#else
return btVector4(
btFabs(m_floats[0]),
btFabs(m_floats[1]),
btFabs(m_floats[2]),
btFabs(m_floats[3]));
+#endif
}
-
btScalar getW() const { return m_floats[3];}
@@ -556,12 +1138,8 @@ public:
maxIndex = 3;
maxVal = m_floats[3];
}
-
-
-
return maxIndex;
-
}
@@ -591,7 +1169,6 @@ public:
}
return minIndex;
-
}
@@ -623,12 +1200,12 @@ public:
* @param z Value of z
* @param w Value of w
*/
- SIMD_FORCE_INLINE void setValue(const btScalar& x, const btScalar& y, const btScalar& z,const btScalar& w)
+ SIMD_FORCE_INLINE void setValue(const btScalar& _x, const btScalar& _y, const btScalar& _z,const btScalar& _w)
{
- m_floats[0]=x;
- m_floats[1]=y;
- m_floats[2]=z;
- m_floats[3]=w;
+ m_floats[0]=_x;
+ m_floats[1]=_y;
+ m_floats[2]=_z;
+ m_floats[3]=_w;
}
@@ -762,5 +1339,4 @@ SIMD_FORCE_INLINE void btVector3::deSerialize(const struct btVector3Data& dataIn
m_floats[i] = dataIn.m_floats[i];
}
-
#endif //BT_VECTOR3_H