Torque3D Documentation / _generateds / openVRTrackedObject.cpp

openVRTrackedObject.cpp

Engine/source/platform/input/openVR/openVRTrackedObject.cpp

More...

Public Variables

Public Functions

ConsoleDocClass(OpenVRTrackedObject , "@brief Renders and handles interactions OpenVR controllers and tracked <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">objects.\n\n</a>" "This class implements basic rendering and interactions with OpenVR <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">controllers.\n\n</a>" "The object should be controlled by <a href="/coding/file/pointer_8h/#pointer_8h_1aeeddce917cf130d62c370b8f216026dd">a</a> player object. Controllers will be rendered <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">at\n</a>" "the correct position regardless of the current transform of the <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">object.\n</a>" "@ingroup <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">OpenVR\n</a>" )
DefineEngineMethod(OpenVRTrackedObject , setModelName , void , (String modelName) , "Set model name. Typically you should do this from the client <a href="/coding/file/cmdgram_8cpp/#cmdgram_8cpp_1a5bafda9519252aa2d0fd038153f77dca">to</a> update the server representation." )

Detailed Description

Public Variables

const U32 sCollisionMoveMask 

Public Functions

ConsoleDocClass(OpenVRTrackedObject , "@brief Renders and handles interactions OpenVR controllers and tracked <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">objects.\n\n</a>" "This class implements basic rendering and interactions with OpenVR <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">controllers.\n\n</a>" "The object should be controlled by <a href="/coding/file/pointer_8h/#pointer_8h_1aeeddce917cf130d62c370b8f216026dd">a</a> player object. Controllers will be rendered <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">at\n</a>" "the correct position regardless of the current transform of the <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">object.\n</a>" "@ingroup <a href="/coding/file/cmdscan_8cpp/#cmdscan_8cpp_1aeab71244afb687f16d8c4f5ee9d6ef0e">OpenVR\n</a>" )

DefineEngineMethod(OpenVRTrackedObject , setModelName , void , (String modelName) , "Set model name. Typically you should do this from the client <a href="/coding/file/cmdgram_8cpp/#cmdgram_8cpp_1a5bafda9519252aa2d0fd038153f77dca">to</a> update the server representation." )

IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData )

IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject )

  1
  2#include "platform/platform.h"
  3#include "platform/input/openVR/openVRTrackedObject.h"
  4#include "platform/input/openVR/openVRProvider.h"
  5
  6#include "math/mathIO.h"
  7#include "scene/sceneRenderState.h"
  8#include "console/consoleTypes.h"
  9#include "core/stream/bitStream.h"
 10#include "core/resourceManager.h"
 11#include "materials/materialManager.h"
 12#include "materials/baseMatInstance.h"
 13#include "renderInstance/renderPassManager.h"
 14#include "lighting/lightQuery.h"
 15#include "console/engineAPI.h"
 16#include "gfx/gfxTextureManager.h"
 17#include "gfx/sim/debugDraw.h"
 18#include "gfx/gfxTransformSaver.h"
 19#include "environment/skyBox.h"
 20#include "collision/boxConvex.h"
 21#include "collision/concretePolyList.h"
 22#include "T3D/physics/physicsPlugin.h"
 23#include "T3D/physics/physicsCollision.h"
 24#include "T3D/physics/physicsBody.h"
 25
 26#ifdef TORQUE_EXTENDED_MOVE
 27#include "T3D/gameBase/extended/extendedMove.h"
 28#endif
 29
 30
 31bool OpenVRTrackedObject::smDebugControllerMovePosition = true;
 32bool OpenVRTrackedObject::smDebugControllerPosition = false;
 33
 34static const U32 sCollisionMoveMask = (PlayerObjectType |
 35   StaticShapeObjectType | VehicleObjectType);
 36
 37U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType
 38U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask;
 39
 40//-----------------------------------------------------------------------------
 41
 42IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData);
 43
 44OpenVRTrackedObjectData::OpenVRTrackedObjectData() :
 45   mShapeFile(NULL)
 46{
 47   mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02);
 48   mCollisionBoxMax = Point3F(0.02, 0.05, 0.02);
 49}
 50
 51OpenVRTrackedObjectData::~OpenVRTrackedObjectData()
 52{
 53}
 54
 55bool OpenVRTrackedObjectData::onAdd()
 56{
 57   if (Parent::onAdd())
 58   {
 59      return true;
 60   }
 61
 62   return false;
 63}
 64
 65bool OpenVRTrackedObjectData::preload(bool server, String &errorStr)
 66{
 67   if (!Parent::preload(server, errorStr))
 68      return false;
 69
 70   bool error = false;
 71   if (!server)
 72   {
 73      mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL;
 74   }
 75}
 76
 77void OpenVRTrackedObjectData::initPersistFields()
 78{
 79   addGroup("Render Components");
 80   addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model.");
 81   addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min");
 82   addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min");
 83   endGroup("Render Components");
 84
 85   Parent::initPersistFields();
 86}
 87
 88void OpenVRTrackedObjectData::packData(BitStream* stream)
 89{
 90   Parent::packData(stream);
 91
 92   stream->writeString(mShapeFile);
 93}
 94
 95void OpenVRTrackedObjectData::unpackData(BitStream* stream)
 96{
 97   Parent::unpackData(stream);
 98
 99   mShapeFile = stream->readSTString();
100}
101
102//-----------------------------------------------------------------------------
103
104
105IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject);
106
107ConsoleDocClass(OpenVRTrackedObject,
108   "@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n"
109   "This class implements basic rendering and interactions with OpenVR controllers.\n\n"
110   "The object should be controlled by a player object. Controllers will be rendered at\n"
111   "the correct position regardless of the current transform of the object.\n"
112   "@ingroup OpenVR\n");
113
114
115//-----------------------------------------------------------------------------
116// Object setup and teardown
117//-----------------------------------------------------------------------------
118OpenVRTrackedObject::OpenVRTrackedObject() :
119   mDataBlock(NULL),
120   mShapeInstance(NULL),
121   mBasicModel(NULL),
122   mDeviceIndex(-1),
123   mMappedMoveIndex(-1),
124   mIgnoreParentRotation(true),
125   mConvexList(new Convex()),
126   mPhysicsRep(NULL)
127{
128   // Flag this object so that it will always
129   // be sent across the network to clients
130   mNetFlags.set(Ghostable | ScopeAlways);
131
132   // Set it as a "static" object that casts shadows
133   mTypeMask |= StaticObjectType | StaticShapeObjectType;
134
135   mPose.connected = false;
136}
137
138OpenVRTrackedObject::~OpenVRTrackedObject()
139{
140   clearRenderData();
141   delete mConvexList;
142}
143
144void OpenVRTrackedObject::updateRenderData()
145{
146   clearRenderData();
147
148   if (!mDataBlock)
149      return;
150
151   // Are we using a model?
152   if (mDataBlock->mShape)
153   {
154      if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape)
155      {
156         delete mShapeInstance;
157         mShapeInstance = NULL;
158      }
159
160      if (!mShapeInstance)
161      {
162         mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject());
163      }
164   }
165   else
166   {
167      setupRenderDataFromModel(isClientObject());
168   }
169}
170
171void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels)
172{
173   clearRenderData();
174   
175   if (!OPENVR || !OPENVR->isEnabled())
176      return;
177
178   vr::IVRRenderModels *models = OPENVR->getRenderModels();
179   if (!models)
180      return;
181
182   if (!mShapeInstance && mModelName && mModelName[0] != '\0')
183   {
184      bool failed = false;
185      S32 idx = OPENVR->preloadRenderModel(mModelName);
186      while (!OPENVR->getRenderModel(idx, &mBasicModel, failed))
187      {
188         if (failed)
189            break;
190      }
191   }
192
193   if (loadComponentModels)
194   {
195      mRenderComponents.setSize(models->GetComponentCount(mModelName));
196
197      for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
198      {
199         RenderModelSlot &slot = mRenderComponents[i];
200         char buffer[1024];
201
202         slot.mappedNodeIdx = -1;
203         slot.componentName = NULL;
204         slot.nativeModel = NULL;
205
206         U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer));
207         if (result == 0)
208            continue;
209
210#ifdef DEBUG_CONTROLLER_MODELS
211         Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer);
212#endif
213
214         slot.componentName = StringTable->insert(buffer, true);
215
216         result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer));
217         if (result == 0)
218         {
219#ifdef DEBUG_CONTROLLER_MODELS
220            Con::printf("Controller[%s] component %i NO MODEL", mModelName, i);
221#endif
222            continue;
223         }
224
225#ifdef DEBUG_CONTROLLER_MODELS
226         Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName);
227#endif
228
229         bool failed = false;
230         S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true));
231         while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed))
232         {
233            if (failed)
234               break;
235         }
236      }
237   }
238}
239
240void OpenVRTrackedObject::clearRenderData()
241{
242   mBasicModel = NULL;
243   mRenderComponents.clear();
244}
245
246//-----------------------------------------------------------------------------
247// Object Editing
248//-----------------------------------------------------------------------------
249void OpenVRTrackedObject::initPersistFields()
250{
251   // SceneObject already handles exposing the transform
252   Parent::initPersistFields();
253
254   addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
255   addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
256   addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
257
258   static bool conInit = false;
259   if (!conInit)
260   {
261      Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition);
262      Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition);
263      conInit = true;
264   }
265}
266
267void OpenVRTrackedObject::inspectPostApply()
268{
269   Parent::inspectPostApply();
270
271   // Flag the network mask to send the updates
272   // to the client object
273   setMaskBits(UpdateMask);
274}
275
276bool OpenVRTrackedObject::onAdd()
277{
278   if (!Parent::onAdd())
279      return false;
280
281   // Set up a 1x1x1 bounding box
282   mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f),
283      Point3F(0.5f, 0.5f, 0.5f));
284
285   resetWorldBox();
286
287   // Add this object to the scene
288   addToScene();
289
290   if (mDataBlock)
291   {
292      mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
293      mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
294      resetWorldBox();
295   }
296   else
297   {
298      setGlobalBounds();
299   }
300
301   return true;
302}
303
304void OpenVRTrackedObject::onRemove()
305{
306   // Remove this object from the scene
307   removeFromScene();
308
309   clearRenderData();
310
311   SAFE_DELETE(mPhysicsRep);
312
313   Parent::onRemove();
314}
315
316void OpenVRTrackedObject::_updatePhysics()
317{
318   SAFE_DELETE(mPhysicsRep);
319
320   if (!PHYSICSMGR)
321      return;
322
323   PhysicsCollision *colShape = NULL;
324   MatrixF offset(true);
325   colShape = PHYSICSMGR->createCollision();
326   colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset);
327
328   if (colShape)
329   {
330      PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client");
331      mPhysicsRep = PHYSICSMGR->createBody();
332      mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world);
333      mPhysicsRep->setTransform(getTransform());
334   }
335}
336
337bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload)
338{
339   mDataBlock = dynamic_cast<OpenVRTrackedObjectData*>(dptr);
340   if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload))
341      return false;
342
343   // Setup the models
344   clearRenderData();
345
346   mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
347   mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
348
349   mGlobalBounds = false;
350
351   resetWorldBox();
352
353   _updatePhysics();
354
355   scriptOnNewDataBlock();
356
357   return true;
358}
359
360void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding)
361{
362   mInteractObject = object;
363   mHoldInteractedObject = holding;
364}
365
366void OpenVRTrackedObject::setTransform(const MatrixF & mat)
367{
368   // Let SceneObject handle all of the matrix manipulation
369   Parent::setTransform(mat);
370
371   // Dirty our network mask so that the new transform gets
372   // transmitted to the client object
373   setMaskBits(UpdateMask);
374}
375
376void OpenVRTrackedObject::setModelName(String &modelName)
377{
378   if (!isServerObject())
379      return;
380
381   mModelName = StringTable->insert(modelName.c_str(), true);
382   setMaskBits(UpdateMask);
383}
384
385U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream)
386{
387   // Allow the Parent to get a crack at writing its info
388   U32 retMask = Parent::packUpdate(conn, mask, stream);
389
390   // Write our transform information
391   if (stream->writeFlag(mask & UpdateMask))
392   {
393      mathWrite(*stream, getTransform());
394      mathWrite(*stream, getScale());
395
396      stream->write((S16)mDeviceIndex);
397      stream->write((S16)mMappedMoveIndex);
398      stream->writeString(mModelName);
399   }
400
401   return retMask;
402}
403
404void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream)
405{
406   // Let the Parent read any info it sent
407   Parent::unpackUpdate(conn, stream);
408
409   if (stream->readFlag())  // UpdateMask
410   {
411      mathRead(*stream, &mObjToWorld);
412      mathRead(*stream, &mObjScale);
413
414      setTransform(mObjToWorld);
415      
416      S16 readDeviceIndex;
417      S16 readMoveIndex;
418      stream->read(&readDeviceIndex);
419      stream->read(&readMoveIndex);
420
421      mDeviceIndex = readDeviceIndex;
422      mMappedMoveIndex = readMoveIndex;
423      mModelName = stream->readSTString();
424
425      updateRenderData();
426   }
427
428}
429
430void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream)
431{
432   Parent::writePacketData(conn, stream);
433}
434
435void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream)
436{
437   Parent::readPacketData(conn, stream);
438}
439
440MatrixF OpenVRTrackedObject::getTrackedTransform()
441{
442   IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
443   MatrixF trackedMat(1);
444
445   pose.orientation.setMatrix(&trackedMat);
446   trackedMat.setPosition(pose.position);
447
448   return trackedMat;
449}
450
451MatrixF OpenVRTrackedObject::getLastTrackedTransform()
452{
453   MatrixF trackedMat(1);
454
455   mPose.orientation.setMatrix(&trackedMat);
456   trackedMat.setPosition(mPose.position);
457
458   return trackedMat;
459}
460
461MatrixF OpenVRTrackedObject::getBaseTrackingTransform()
462{
463   if (isMounted())
464   {
465      MatrixF mat;
466
467      mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat);
468      if (mIgnoreParentRotation)
469      {
470         Point3F pos = mat.getPosition();
471         mat = MatrixF(1);
472         mat.setPosition(pos);
473      }
474      //mat.inverse();
475      return mat;
476   }
477
478   return MatrixF(1);
479}
480
481void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state)
482{
483   RenderPassManager *renderPass = state->getRenderPass();
484
485   // debug rendering for now
486
487   if (mDeviceIndex < 0)
488      return;
489
490   // Current pose
491   IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
492   IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0);
493
494   if (!pose.connected && !mPose.connected)
495      return;
496
497   MatrixF offsetMat = getBaseTrackingTransform();
498   //offsetMat.inverse();
499
500   Point3F pos = offsetMat.getPosition();
501   //Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z);
502
503   const F32 CONTROLLER_SCALE = 0.1;
504
505   if (smDebugControllerPosition)
506   {
507      ColorI drawColor = ColorI::GREEN;
508      if (!pose.valid)
509      {
510         drawColor = ColorI::RED;
511      }
512
513      // Draw Camera
514      /*
515      DisplayPose cameraPose;
516      OPENVR->getFrameEyePose(&cameraPose, -1);
517      Point3F cameraCenter(0);
518      MatrixF cameraMat(1);
519      cameraPose.orientation.setMatrix(&cameraMat);
520      cameraMat.setPosition(cameraPose.position);
521      cameraMat.mulP(cameraCenter);
522      //DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN);
523      
524      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box 
525      */
526
527      // Draw Tracked HMD Pos
528      Point3F hmdCenter(0, 0, 0);
529      MatrixF hmdMat(1);
530      hmdPose.orientation.setMatrix(&hmdMat);
531      hmdMat.setPosition(hmdPose.position);
532      hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
533      hmdMat = offsetMat * hmdMat;
534      hmdMat.mulP(hmdCenter);
535      DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED);
536      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box 
537
538
539      // Draw Controller
540      MatrixF mat(1);
541      pose.orientation.setMatrix(&mat);
542      mat.setPosition(pose.position);
543      mat.inverse(); // same as HMD
544      mat = offsetMat * mat;
545
546      Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
547      Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
548      Point3F middle(0, 0, 0);
549
550      Point3F center(0, 0, 0);
551      mat.mulP(center);
552
553      //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
554
555      mat.mulP(middleStart);
556      mat.mulP(middle);
557      mat.mulP(middleEnd);
558
559      char buffer[256];
560      dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
561      DebugDrawer::get()->drawText(middle, buffer);
562      DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
563      DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
564      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
565      DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
566   }
567
568   if (isClientObject() && smDebugControllerMovePosition)
569   {
570      MatrixF transform = getRenderTransform();
571      transform.scale(mObjScale);
572      DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform);
573      
574      // jamesu - grab server object pose for debugging
575      OpenVRTrackedObject* tracked = static_cast<OpenVRTrackedObject*>(getServerObject());
576      if (tracked)
577      {
578         mPose = tracked->mPose;
579      }
580
581      ColorI drawColor = ColorI::GREEN;
582      if (!pose.valid)
583      {
584         drawColor = ColorI::RED;
585      }
586                                                                                                 // Draw Controller
587      MatrixF mat(1);
588      mPose.orientation.setMatrix(&mat);
589      mat.setPosition(mPose.position);
590      mat.inverse(); // same as HMD
591      mat = offsetMat * mat;
592
593      Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
594      Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
595      Point3F middle(0, 0, 0);
596
597      Point3F center(0, 0, 0);
598      mat.mulP(center);
599
600      //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
601
602      mat.mulP(middleStart);
603      mat.mulP(middle);
604      mat.mulP(middleEnd);
605
606      char buffer[256];
607      dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
608      DebugDrawer::get()->drawText(middle, buffer);
609      DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
610      DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
611      DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box 
612      DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
613   }
614
615   // Controller matrix base
616   MatrixF trackedMat = getTrackedTransform();
617   MatrixF invTrackedMat(1);
618
619   invTrackedMat = trackedMat;
620   invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
621
622   invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
623   trackedMat = invTrackedMat;
624   trackedMat.inverse();
625
626   // Render the controllers, using either the render model or the shape
627   if (mShapeInstance)
628   {
629      // Calculate the distance of this object from the camera
630      Point3F cameraOffset = invTrackedMat.getPosition();
631      cameraOffset -= state->getDiffuseCameraPosition();
632      F32 dist = cameraOffset.len();
633      if (dist < 0.01f)
634      dist = 0.01f;
635
636      // Set up the LOD for the shape
637      F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z));
638
639      mShapeInstance->setDetailFromDistance(state, dist * invScale);
640
641      // Make sure we have a valid level of detail
642      if (mShapeInstance->getCurrentDetail() < 0)
643         return;
644
645      // GFXTransformSaver is a handy helper class that restores
646      // the current GFX matrices to their original values when
647      // it goes out of scope at the end of the function
648      GFXTransformSaver saver;
649
650      // Set up our TS render state
651      TSRenderState rdata;
652      rdata.setSceneState(state);
653      rdata.setFadeOverride(1.0f);
654
655      // We might have some forward lit materials
656      // so pass down a query to gather lights.
657      LightQuery query;
658      query.init(getWorldSphere());
659      rdata.setLightQuery(&query);
660
661      // Set the world matrix to the objects render transform
662      MatrixF mat = trackedMat;
663
664      mat.scale(mObjScale);
665      GFX->setWorldMatrix(mat);
666
667      // TODO: move the nodes about for components
668
669      mShapeInstance->animate();
670      mShapeInstance->render(rdata);
671   }
672   else if (mRenderComponents.size() > 0)
673   {
674      vr::IVRRenderModels *models = OPENVR->getRenderModels();
675      if (!models)
676         return;
677
678      vr::IVRSystem* vrs = vr::VRSystem();
679
680      if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState))
681      {
682         return;
683      }
684
685      for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
686      {
687         RenderModelSlot slot = mRenderComponents[i];
688         vr::RenderModel_ControllerMode_State_t modeState;
689         vr::RenderModel_ComponentState_t componentState;
690
691         modeState.bScrollWheelVisible = false;
692
693         if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState))
694         {
695            MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
696
697            // Set our RenderInst as a standard mesh render
698            ri->type = RenderPassManager::RIT_Mesh;
699
700            // Calculate our sorting point
701            if (state && slot.nativeModel)
702            {
703               // Calculate our sort point manually.
704               const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat);
705               ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
706            }
707            else
708            {
709               ri->sortDistSq = 0.0f;
710            }
711
712            MatrixF newTransform = trackedMat;
713            MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel);
714            MatrixF offComponentMat(1);
715            OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat);
716
717            newTransform = offComponentMat * newTransform;
718
719            newTransform.inverse();
720
721            //DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE);
722
723            if (!slot.nativeModel)
724               continue;
725            if (i < 1)
726               continue;
727
728            // Set up our transforms
729            ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
730            ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
731            ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
732
733            // If our material needs lights then fill the RIs
734            // light vector with the best lights.
735            if (true)
736            {
737               LightQuery query;
738               Point3F center(0, 0, 0);
739               invTrackedMat.mulP(center);
740               query.init(SphereF(center, 10.0f));
741               query.getLights(ri->lights, 8);
742            }
743
744            // Draw model
745            slot.nativeModel->draw(state, ri);
746            state->getRenderPass()->addInst(ri);
747         }
748      }
749   }
750   else if (mBasicModel)
751   {
752      MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
753
754      // Set our RenderInst as a standard mesh render
755      ri->type = RenderPassManager::RIT_Mesh;
756
757      // Calculate our sorting point
758      if (state)
759      {
760         // Calculate our sort point manually.
761         const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat);
762         ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
763      }
764      else
765      {
766         ri->sortDistSq = 0.0f;
767      }
768
769      MatrixF newTransform = invTrackedMat;
770      // Set up our transforms
771      ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
772      ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
773      ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
774
775      // If our material needs lights then fill the RIs
776      // light vector with the best lights.
777      if (true)
778      {
779         LightQuery query;
780         Point3F center(0, 0, 0);
781         invTrackedMat.mulP(center);
782         query.init(SphereF(center, 10.0f));
783         query.getLights(ri->lights, 8);
784      }
785
786      // Draw model
787      mBasicModel->draw(state, ri);
788      state->getRenderPass()->addInst(ri);
789   }
790}
791
792U32 OpenVRTrackedObject::getCollisionMask()
793{
794   if (isServerObject())
795      return sServerCollisionMask;
796   else
797      return sClientCollisionMask;
798}
799
800void OpenVRTrackedObject::updateWorkingCollisionSet()
801{
802   const U32 mask = getCollisionMask();
803   Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale());
804   F32 len = (50) * TickSec;
805   F32 l = (len * 1.1) + 0.1;  // fudge factor
806   convexBox.minExtents -= Point3F(l, l, l);
807   convexBox.maxExtents += Point3F(l, l, l);
808
809   disableCollision();
810   mConvexList->updateWorkingList(convexBox, mask);
811   enableCollision();
812}
813
814void OpenVRTrackedObject::updateMove(const Move *move)
815{
816   // Set transform based on move
817
818#ifdef TORQUE_EXTENDED_MOVE
819
820   const ExtendedMove* emove = dynamic_cast<const ExtendedMove*>(move);
821   if (!emove)
822      return;
823
824   U32 emoveIndex = mMappedMoveIndex;
825   if (emoveIndex >= ExtendedMove::MaxPositionsRotations)
826      emoveIndex = 0;
827
828   //IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
829   //Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex);
830
831   if (!emove->EulerBasedRotation[emoveIndex])
832   {
833      AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
834      // Update our pose based on the move info
835      mPose.orientation = inRot;
836      mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]);
837      mPose.valid = true;
838      mPose.connected = true;
839   }
840
841   // Set transform based on move pose
842   MatrixF trackedMat(1);
843   MatrixF invTrackedMat(1);
844
845   mPose.orientation.setMatrix(&trackedMat);
846   trackedMat.setPosition(mPose.position);
847
848   invTrackedMat = trackedMat;
849   invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
850
851   invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
852   trackedMat = invTrackedMat;
853   trackedMat.inverse();
854
855   SceneObject::setTransform(invTrackedMat);
856
857   if (mPhysicsRep)
858      mPhysicsRep->setTransform(invTrackedMat);
859#endif
860}
861
862void OpenVRTrackedObject::processTick(const Move *move)
863{
864   // Perform collision checks
865   if (isServerObject())
866   {
867      updateMove(move);
868
869      if (!mPhysicsRep)
870      {
871         updateWorkingCollisionSet();
872      }
873   }
874
875   Parent::processTick(move);
876}
877
878void OpenVRTrackedObject::interpolateTick(F32 delta)
879{
880   // Set latest transform
881
882   Parent::interpolateTick(delta);
883}
884
885void OpenVRTrackedObject::advanceTime(F32 dt)
886{
887   Parent::advanceTime(dt);
888}
889
890bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info)
891{
892   if (!mPose.connected || !mPose.valid)
893      return false;
894
895   // Collide against bounding box.
896   F32 st, et, fst = 0.0f, fet = 1.0f;
897   F32 *bmin = &mObjBox.minExtents.x;
898   F32 *bmax = &mObjBox.maxExtents.x;
899   F32 const *si = &start.x;
900   F32 const *ei = &end.x;
901
902   for (S32 i = 0; i < 3; i++) {
903      if (*si < *ei) {
904         if (*si > *bmax || *ei < *bmin)
905            return false;
906         F32 di = *ei - *si;
907         st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f;
908         et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f;
909      }
910      else {
911         if (*ei > *bmax || *si < *bmin)
912            return false;
913         F32 di = *ei - *si;
914         st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f;
915         et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f;
916      }
917      if (st > fst) fst = st;
918      if (et < fet) fet = et;
919      if (fet < fst)
920         return false;
921      bmin++; bmax++;
922      si++; ei++;
923   }
924
925   info->normal = start - end;
926   info->normal.normalizeSafe();
927   getTransform().mulV(info->normal);
928
929   info->t = fst;
930   info->object = this;
931   info->point.interpolate(start, end, fst);
932   info->material = 0;
933   return true;
934}
935
936void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex)
937{
938   // These should really come out of a pool
939   mConvexList->collectGarbage();
940
941   Box3F realBox = box;
942   mWorldToObj.mul(realBox);
943   realBox.minExtents.convolveInverse(mObjScale);
944   realBox.maxExtents.convolveInverse(mObjScale);
945
946   if (realBox.isOverlapped(getObjBox()) == false)
947      return;
948
949   // Just return a box convex for the entire shape...
950   Convex* cc = 0;
951   CollisionWorkingList& wl = convex->getWorkingList();
952   for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) {
953      if (itr->mConvex->getType() == BoxConvexType &&
954         itr->mConvex->getObject() == this) {
955         cc = itr->mConvex;
956         break;
957      }
958   }
959   if (cc)
960      return;
961
962   // Create a new convex.
963   BoxConvex* cp = new BoxConvex;
964   mConvexList->registerObject(cp);
965   convex->addToWorkingList(cp);
966   cp->init(this);
967
968   mObjBox.getCenter(&cp->mCenter);
969   cp->mSize.x = mObjBox.len_x() / 2.0f;
970   cp->mSize.y = mObjBox.len_y() / 2.0f;
971   cp->mSize.z = mObjBox.len_z() / 2.0f;
972}
973
974bool OpenVRTrackedObject::testObject(SceneObject* enter)
975{
976   return false; // TODO
977}
978
979DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.")
980{
981   object->setModelName(modelName);
982}
983