16#ifndef BT_QUANTIZED_BVH_H
17#define BT_QUANTIZED_BVH_H
22#ifdef DEBUG_CHECK_DEQUANTIZATION
24#define printf spu_printf
34#ifdef BT_USE_DOUBLE_PRECISION
35#define btQuantizedBvhData btQuantizedBvhDoubleData
36#define btOptimizedBvhNodeData btOptimizedBvhNodeDoubleData
37#define btQuantizedBvhDataName "btQuantizedBvhDoubleData"
39#define btQuantizedBvhData btQuantizedBvhFloatData
40#define btOptimizedBvhNodeData btOptimizedBvhNodeFloatData
41#define btQuantizedBvhDataName "btQuantizedBvhFloatData"
50#define MAX_SUBTREE_SIZE_IN_BYTES 2048
54#define MAX_NUM_PARTS_IN_BITS 10
63 unsigned short int m_quantizedAabbMin[3];
64 unsigned short int m_quantizedAabbMax[3];
71 return (m_escapeIndexOrTriangleIndex >= 0);
76 return -m_escapeIndexOrTriangleIndex;
84 return (m_escapeIndexOrTriangleIndex&~(y));
125 unsigned short int m_quantizedAabbMin[3];
126 unsigned short int m_quantizedAabbMax[3];
179 TRAVERSAL_STACKLESS = 0,
218 if (m_useQuantization)
220 quantize(&m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[0] ,aabbMin,0);
223 m_contiguousNodes[nodeIndex].m_aabbMinOrg = aabbMin;
229 if (m_useQuantization)
231 quantize(&m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[0],aabbMax,1);
234 m_contiguousNodes[nodeIndex].m_aabbMaxOrg = aabbMax;
240 if (m_useQuantization)
242 return unQuantize(&m_quantizedLeafNodes[nodeIndex].m_quantizedAabbMin[0]);
245 return m_leafNodes[nodeIndex].m_aabbMinOrg;
250 if (m_useQuantization)
252 return unQuantize(&m_quantizedLeafNodes[nodeIndex].m_quantizedAabbMax[0]);
255 return m_leafNodes[nodeIndex].m_aabbMaxOrg;
262 if (m_useQuantization)
264 m_quantizedContiguousNodes[nodeIndex].m_escapeIndexOrTriangleIndex = -escapeIndex;
268 m_contiguousNodes[nodeIndex].m_escapeIndex = escapeIndex;
275 if (m_useQuantization)
277 unsigned short int quantizedAabbMin[3];
278 unsigned short int quantizedAabbMax[3];
279 quantize(quantizedAabbMin,newAabbMin,0);
280 quantize(quantizedAabbMax,newAabbMax,1);
281 for (
int i=0;i<3;i++)
283 if (m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[i] > quantizedAabbMin[i])
284 m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMin[i] = quantizedAabbMin[i];
286 if (m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[i] < quantizedAabbMax[i])
287 m_quantizedContiguousNodes[nodeIndex].m_quantizedAabbMax[i] = quantizedAabbMax[i];
293 m_contiguousNodes[nodeIndex].m_aabbMinOrg.setMin(newAabbMin);
294 m_contiguousNodes[nodeIndex].m_aabbMaxOrg.setMax(newAabbMax);
298 void swapLeafNodes(
int firstIndex,
int secondIndex);
300 void assignInternalNodeFromLeafNode(
int internalNode,
int leafNodeIndex);
306 void buildTree (
int startIndex,
int endIndex);
308 int calcSplittingAxis(
int startIndex,
int endIndex);
310 int sortAndCalcSplittingIndex(
int startIndex,
int endIndex,
int splitAxis);
315 void walkStacklessQuantizedTree(
btNodeOverlapCallback* nodeCallback,
unsigned short int* quantizedQueryAabbMin,
unsigned short int* quantizedQueryAabbMax,
int startNodeIndex,
int endNodeIndex)
const;
319 void walkStacklessQuantizedTreeCacheFriendly(
btNodeOverlapCallback* nodeCallback,
unsigned short int* quantizedQueryAabbMin,
unsigned short int* quantizedQueryAabbMax)
const;
322 void walkRecursiveQuantizedTreeAgainstQueryAabb(
const btQuantizedBvhNode* currentNode,
btNodeOverlapCallback* nodeCallback,
unsigned short int* quantizedQueryAabbMin,
unsigned short int* quantizedQueryAabbMax)
const;
330 void updateSubtreeHeaders(
int leftChildNodexIndex,
int rightChildNodexIndex);
345 void buildInternal();
365 btVector3 v = (point - m_bvhAabbMin) * m_bvhQuantization;
371 out[0] = (
unsigned short) (((
unsigned short)(v.
getX()+
btScalar(1.)) | 1));
372 out[1] = (
unsigned short) (((
unsigned short)(v.
getY()+
btScalar(1.)) | 1));
373 out[2] = (
unsigned short) (((
unsigned short)(v.
getZ()+
btScalar(1.)) | 1));
376 out[0] = (
unsigned short) (((
unsigned short)(v.
getX()) & 0xfffe));
377 out[1] = (
unsigned short) (((
unsigned short)(v.
getY()) & 0xfffe));
378 out[2] = (
unsigned short) (((
unsigned short)(v.
getZ()) & 0xfffe));
382#ifdef DEBUG_CHECK_DEQUANTIZATION
388 printf(
"unconservative X, diffX = %f, oldX=%f,newX=%f\n",newPoint.
getX()-point.
getX(), newPoint.
getX(),point.
getX());
392 printf(
"unconservative Y, diffY = %f, oldY=%f,newY=%f\n",newPoint.
getY()-point.
getY(), newPoint.
getY(),point.
getY());
397 printf(
"unconservative Z, diffZ = %f, oldZ=%f,newZ=%f\n",newPoint.
getZ()-point.
getZ(), newPoint.
getZ(),point.
getZ());
403 printf(
"unconservative X, diffX = %f, oldX=%f,newX=%f\n",newPoint.
getX()-point.
getX(), newPoint.
getX(),point.
getX());
407 printf(
"unconservative Y, diffY = %f, oldY=%f,newY=%f\n",newPoint.
getY()-point.
getY(), newPoint.
getY(),point.
getY());
411 printf(
"unconservative Z, diffZ = %f, oldZ=%f,newZ=%f\n",newPoint.
getZ()-point.
getZ(), newPoint.
getZ(),point.
getZ());
425 clampedPoint.
setMax(m_bvhAabbMin);
426 clampedPoint.
setMin(m_bvhAabbMax);
428 quantize(out,clampedPoint,isMax);
439 vecOut += m_bvhAabbMin;
446 m_traversalMode = traversalMode;
452 return m_quantizedContiguousNodes;
458 return m_SubtreeHeaders;
464 unsigned calculateSerializeBufferSize()
const;
467 virtual bool serialize(
void *o_alignedDataBuffer,
unsigned i_dataBufferSize,
bool i_swapEndian)
const;
470 static btQuantizedBvh *deSerializeInPlace(
void *i_alignedDataBuffer,
unsigned int i_dataBufferSize,
bool i_swapEndian);
472 static unsigned int getAlignmentSerializationPadding();
476 virtual int calculateSerializeBufferSizeNew()
const;
479 virtual const char* serialize(
void* dataBuffer,
btSerializer* serializer)
const;
490 return m_useQuantization;
btAlignedObjectArray< btOptimizedBvhNode > NodeArray
for code readability:
btAlignedObjectArray< btBvhSubtreeInfo > BvhSubtreeInfoArray
#define MAX_NUM_PARTS_IN_BITS
btAlignedObjectArray< btQuantizedBvhNode > QuantizedNodeArray
#define btQuantizedBvhData
float btScalar
The btScalar type abstracts floating point numbers, to easily switch between double and single floati...
#define ATTRIBUTE_ALIGNED16(a)
#define SIMD_FORCE_INLINE
btBvhSubtreeInfo provides info to gather a subtree of limited size
BT_DECLARE_ALIGNED_ALLOCATOR()
void setAabbFromQuantizeNode(const btQuantizedBvhNode &quantizedNode)
virtual ~btNodeOverlapCallback()
virtual void processNode(int subPart, int triangleIndex)=0
The btQuantizedBvh class stores an AABB tree that can be quickly traversed on CPU and Cell SPU.
void setInternalNodeAabbMax(int nodeIndex, const btVector3 &aabbMax)
QuantizedNodeArray & getLeafNodeArray()
QuantizedNodeArray m_quantizedLeafNodes
btTraversalMode m_traversalMode
void quantize(unsigned short *out, const btVector3 &point, int isMax) const
BvhSubtreeInfoArray & getSubtreeInfoArray()
btVector3 m_bvhQuantization
void setInternalNodeEscapeIndex(int nodeIndex, int escapeIndex)
@ TRAVERSAL_STACKLESS_CACHE_FRIENDLY
BvhSubtreeInfoArray m_SubtreeHeaders
NodeArray m_contiguousNodes
QuantizedNodeArray & getQuantizedNodeArray()
void walkRecursiveQuantizedTreeAgainstQuantizedTree(const btQuantizedBvhNode *treeNodeA, const btQuantizedBvhNode *treeNodeB, btNodeOverlapCallback *nodeCallback) const
use the 16-byte stackless 'skipindex' node tree to do a recursive traversal
void setInternalNodeAabbMin(int nodeIndex, const btVector3 &aabbMin)
two versions, one for quantized and normal nodes.
void mergeInternalNodeAabb(int nodeIndex, const btVector3 &newAabbMin, const btVector3 &newAabbMax)
BT_DECLARE_ALIGNED_ALLOCATOR()
virtual int calculateSerializeBufferSizeNew() const
void quantizeWithClamp(unsigned short *out, const btVector3 &point2, int isMax) const
void setTraversalMode(btTraversalMode traversalMode)
setTraversalMode let's you choose between stackless, recursive or stackless cache friendly tree trave...
btVector3 getAabbMax(int nodeIndex) const
btVector3 getAabbMin(int nodeIndex) const
QuantizedNodeArray m_quantizedContiguousNodes
btVector3 unQuantize(const unsigned short *vecIn) const
btVector3 can be used to represent 3D points and vectors.
const btScalar & getZ() const
Return the z value.
void setMax(const btVector3 &other)
Set each element to the max of the current values and the values of another btVector3.
void setValue(const btScalar &_x, const btScalar &_y, const btScalar &_z)
const btScalar & getY() const
Return the y value.
void setMin(const btVector3 &other)
Set each element to the min of the current values and the values of another btVector3.
const btScalar & getX() const
Return the x value.
unsigned short m_quantizedAabbMin[3]
unsigned short m_quantizedAabbMax[3]
btVector3DoubleData m_aabbMaxOrg
btVector3DoubleData m_aabbMinOrg
btVector3FloatData m_aabbMaxOrg
btVector3FloatData m_aabbMinOrg
btOptimizedBvhNode contains both internal and leaf node information.
BT_DECLARE_ALIGNED_ALLOCATOR()
btBvhSubtreeInfoData * m_subTreeInfoPtr
int m_numContiguousLeafNodes
btVector3DoubleData m_bvhAabbMin
btVector3DoubleData m_bvhAabbMax
int m_numQuantizedContiguousNodes
btVector3DoubleData m_bvhQuantization
btQuantizedBvhNodeData * m_quantizedContiguousNodesPtr
btOptimizedBvhNodeDoubleData * m_contiguousNodesPtr
btOptimizedBvhNodeFloatData * m_contiguousNodesPtr
btVector3FloatData m_bvhAabbMin
int m_numQuantizedContiguousNodes
btBvhSubtreeInfoData * m_subTreeInfoPtr
int m_numContiguousLeafNodes
btVector3FloatData m_bvhQuantization
btQuantizedBvhNodeData * m_quantizedContiguousNodesPtr
btVector3FloatData m_bvhAabbMax
int m_escapeIndexOrTriangleIndex
unsigned short m_quantizedAabbMax[3]
unsigned short m_quantizedAabbMin[3]
btQuantizedBvhNode is a compressed aabb node, 16 bytes.
unsigned short int m_quantizedAabbMin[3]
unsigned short int m_quantizedAabbMax[3]
int getEscapeIndex() const
int m_escapeIndexOrTriangleIndex
BT_DECLARE_ALIGNED_ALLOCATOR()
int getTriangleIndex() const