Add pybullet setVRCameraState and b3SetVRCameraStateCommandInit to set the VR camera root transform (position/orientation) and optional tracking object unique id (-1 for no tracking)

Fix robotcontrol.py script, getContactPointData -> getContactPoints
This commit is contained in:
Erwin Coumans
2017-01-05 17:41:58 -08:00
parent c940f0ec47
commit 83e103ac15
10 changed files with 229 additions and 41 deletions

View File

@@ -71,6 +71,21 @@ void SimpleCamera::setVRCamera(const float viewMat[16], const float projectionMa
}
}
bool SimpleCamera::getVRCamera(float viewMat[16], float projectionMatrix[16])
{
if (m_data->m_enableVR)
{
for (int i=0;i<16;i++)
{
viewMat[i] = m_data->m_viewMatrixVR[i];
projectionMatrix[i] = m_data->m_projectionMatrixVR[i];
}
}
return false;
}
void SimpleCamera::disableVRCamera()
{
m_data->m_enableVR = false;

View File

@@ -15,6 +15,8 @@ struct SimpleCamera : public CommonCameraInterface
virtual void getCameraViewMatrix(float m[16]) const;
virtual void setVRCamera(const float viewMat[16], const float projectionMatrix[16]);
virtual bool getVRCamera(float viewMat[16], float projectionMatrix[16]);
virtual void setVRCameraOffsetTransform(const float offset[16]);
virtual void disableVRCamera();

View File

@@ -2191,3 +2191,54 @@ void b3GetVREventsData(b3PhysicsClientHandle physClient, struct b3VREventsData*
cl->getCachedVREvents(vrEventsData);
}
}
b3SharedMemoryCommandHandle b3SetVRCameraStateCommandInit(b3PhysicsClientHandle physClient)
{
PhysicsClient* cl = (PhysicsClient*)physClient;
b3Assert(cl);
b3Assert(cl->canSubmitCommand());
struct SharedMemoryCommand* command = cl->getAvailableSharedMemoryCommand();
b3Assert(command);
command->m_type = CMD_SET_VR_CAMERA_STATE;
command->m_updateFlags = 0;
return (b3SharedMemoryCommandHandle)command;
}
int b3SetVRCameraRootPosition(b3SharedMemoryCommandHandle commandHandle, double rootPos[3])
{
struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle;
b3Assert(command);
b3Assert(command->m_type == CMD_SET_VR_CAMERA_STATE);
command->m_updateFlags |= VR_CAMERA_ROOT_POSITION;
command->m_vrCameraStateArguments.m_rootPosition[0] = rootPos[0];
command->m_vrCameraStateArguments.m_rootPosition[1] = rootPos[1];
command->m_vrCameraStateArguments.m_rootPosition[2] = rootPos[2];
return 0;
}
int b3SetVRCameraRootOrientation(b3SharedMemoryCommandHandle commandHandle, double rootOrn[4])
{
struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle;
b3Assert(command);
b3Assert(command->m_type == CMD_SET_VR_CAMERA_STATE);
command->m_updateFlags |= VR_CAMERA_ROOT_ORIENTATION;
command->m_vrCameraStateArguments.m_rootOrientation[0] = rootOrn[0];
command->m_vrCameraStateArguments.m_rootOrientation[1] = rootOrn[1];
command->m_vrCameraStateArguments.m_rootOrientation[2] = rootOrn[2];
return 0;
}
int b3SetVRCameraTrackingObject(b3SharedMemoryCommandHandle commandHandle, int objectUniqueId)
{
struct SharedMemoryCommand* command = (struct SharedMemoryCommand*) commandHandle;
b3Assert(command);
b3Assert(command->m_type == CMD_SET_VR_CAMERA_STATE);
command->m_updateFlags |= VR_CAMERA_ROOT_TRACKING_OBJECT;
command->m_vrCameraStateArguments.m_trackingObjectUniqueId = objectUniqueId;
return 0;
}

View File

@@ -314,6 +314,12 @@ int b3LoadBunnySetCollisionMargin(b3SharedMemoryCommandHandle commandHandle, dou
b3SharedMemoryCommandHandle b3RequestVREventsCommandInit(b3PhysicsClientHandle physClient);
void b3GetVREventsData(b3PhysicsClientHandle physClient, struct b3VREventsData* vrEventsData);
b3SharedMemoryCommandHandle b3SetVRCameraStateCommandInit(b3PhysicsClientHandle physClient);
int b3SetVRCameraRootPosition(b3SharedMemoryCommandHandle commandHandle, double rootPos[3]);
int b3SetVRCameraRootOrientation(b3SharedMemoryCommandHandle commandHandle, double rootOrn[4]);
int b3SetVRCameraTrackingObject(b3SharedMemoryCommandHandle commandHandle, int objectUniqueId);
#ifdef __cplusplus

View File

@@ -44,11 +44,14 @@ bool gEnableRealTimeSimVR=false;
bool gCreateDefaultRobotAssets = false;
int gInternalSimFlags = 0;
bool gResetSimulation = 0;
int gHuskyId = -1;
btTransform huskyTr = btTransform::getIdentity();
int gVRTrackingObjectUniqueId = -1;
btTransform gVRTrackingObjectTr = btTransform::getIdentity();
int gCreateObjectSimVR = -1;
int gEnableKukaControl = 0;
btVector3 gVRTeleportPos1(0,0,0);
btQuaternion gVRTeleportOrn(0, 0, 0,1);
btScalar simTimeScalingFactor = 1;
btScalar gRhsClamp = 1.f;
@@ -1369,6 +1372,32 @@ bool PhysicsServerCommandProcessor::processCommand(const struct SharedMemoryComm
}
#endif
case CMD_SET_VR_CAMERA_STATE:
{
if (clientCmd.m_updateFlags & VR_CAMERA_ROOT_POSITION)
{
gVRTeleportPos1[0] = clientCmd.m_vrCameraStateArguments.m_rootPosition[0];
gVRTeleportPos1[1] = clientCmd.m_vrCameraStateArguments.m_rootPosition[1];
gVRTeleportPos1[2] = clientCmd.m_vrCameraStateArguments.m_rootPosition[2];
}
if (clientCmd.m_updateFlags & VR_CAMERA_ROOT_ORIENTATION)
{
gVRTeleportOrn[0] = clientCmd.m_vrCameraStateArguments.m_rootOrientation[0];
gVRTeleportOrn[1] = clientCmd.m_vrCameraStateArguments.m_rootOrientation[1];
gVRTeleportOrn[2] = clientCmd.m_vrCameraStateArguments.m_rootOrientation[2];
gVRTeleportOrn[3] = clientCmd.m_vrCameraStateArguments.m_rootOrientation[3];
}
if (clientCmd.m_updateFlags & VR_CAMERA_ROOT_TRACKING_OBJECT)
{
gVRTrackingObjectUniqueId = clientCmd.m_vrCameraStateArguments.m_trackingObjectUniqueId;
}
serverStatusOut.m_type = CMD_CLIENT_COMMAND_COMPLETED;
hasStatus = true;
break;
}
case CMD_REQUEST_VR_EVENTS_DATA:
{
serverStatusOut.m_sendVREvents.m_numVRControllerEvents = 0;
@@ -4287,12 +4316,12 @@ void PhysicsServerCommandProcessor::stepSimulationRealTime(double dtInSec, const
gSubStep = m_data->m_physicsDeltaTime;
}
if (gHuskyId >= 0)
if (gVRTrackingObjectUniqueId >= 0)
{
InternalBodyHandle* bodyHandle = m_data->getHandle(gHuskyId);
InternalBodyHandle* bodyHandle = m_data->getHandle(gVRTrackingObjectUniqueId);
if (bodyHandle && bodyHandle->m_multiBody)
{
huskyTr = bodyHandle->m_multiBody->getBaseWorldTransform();
gVRTrackingObjectTr = bodyHandle->m_multiBody->getBaseWorldTransform();
}
}
@@ -4595,8 +4624,6 @@ void PhysicsServerCommandProcessor::createDefaultRobotAssets()
//loadUrdf("knight.urdf", btVector3(-1.2, 0.2, 0.7), btQuaternion(btVector3(1, 0, 0), SIMD_HALF_PI), true, false, &bodyId, &gBufferServerToClient[0], gBufferServerToClient.size());
loadUrdf("husky/husky.urdf", btVector3(2, -5, 1), btQuaternion(0, 0, 0, 1), true, false, &bodyId, &gBufferServerToClient[0], gBufferServerToClient.size());
gHuskyId = bodyId;
b3Printf("huskyId = %d", gHuskyId);
m_data->m_huskyId = bodyId;
m_data->m_dynamicsWorld->setGravity(btVector3(0, 0, -10));

View File

@@ -26,9 +26,14 @@
//@todo(erwincoumans) those globals are hacks for a VR demo, move this to Python/pybullet!
extern btVector3 gLastPickPos;
btVector3 gVRTeleportPos1(0,0,0);
btVector3 gVRTeleportPosLocal(0,0,0);
btQuaternion gVRTeleportOrnLocal(0,0,0,1);
extern btVector3 gVRTeleportPos1;
extern btQuaternion gVRTeleportOrn;
btScalar gVRTeleportRotZ = 0;
btQuaternion gVRTeleportOrn(0, 0, 0,1);
extern btVector3 gVRGripperPos;
extern btQuaternion gVRGripperOrn;
extern btVector3 gVRController2Pos;
@@ -57,9 +62,9 @@ static void saveCurrentSettingsVR()
FILE* f = fopen(startFileNameVR, "w");
if (f)
{
fprintf(f, "--camPosX= %f\n", gVRTeleportPos1[0]);
fprintf(f, "--camPosY= %f\n", gVRTeleportPos1[1]);
fprintf(f, "--camPosZ= %f\n", gVRTeleportPos1[2]);
fprintf(f, "--camPosX= %f\n", gVRTeleportPosLocal[0]);
fprintf(f, "--camPosY= %f\n", gVRTeleportPosLocal[1]);
fprintf(f, "--camPosZ= %f\n", gVRTeleportPosLocal[2]);
fprintf(f, "--camRotZ= %f\n", gVRTeleportRotZ);
fclose(f);
}
@@ -109,9 +114,9 @@ void midiCallback(double deltatime, std::vector< unsigned char > *message, void
if (message->at(1) == 16)
{
gVRTeleportRotZ= getParamf(-3.1415, 3.1415, message->at(2));
gVRTeleportOrn = btQuaternion(btVector3(0, 0, 1), gVRTeleportRotZ);
gVRTeleportOrnLocal = btQuaternion(btVector3(0, 0, 1), gVRTeleportRotZ);
saveCurrentSettingsVR();
// b3Printf("gVRTeleportOrn rotZ = %f\n", gVRTeleportRotZ);
// b3Printf("gVRTeleportOrnLocal rotZ = %f\n", gVRTeleportRotZ);
}
if (message->at(1) == 32)
@@ -123,9 +128,9 @@ void midiCallback(double deltatime, std::vector< unsigned char > *message, void
{
if (message->at(1) == i)
{
gVRTeleportPos1[i] = getParamf(-2, 2, message->at(2));
gVRTeleportPosLocal[i] = getParamf(-2, 2, message->at(2));
saveCurrentSettingsVR();
// b3Printf("gVRTeleportPos[%d] = %f\n", i,gVRTeleportPos1[i]);
// b3Printf("gVRTeleportPos[%d] = %f\n", i,gVRTeleportPosLocal[i]);
}
}
@@ -1040,19 +1045,19 @@ public:
setSharedMemoryKey(shmemKey);
}
if (args.GetCmdLineArgument("camPosX", gVRTeleportPos1[0]))
if (args.GetCmdLineArgument("camPosX", gVRTeleportPosLocal[0]))
{
printf("camPosX=%f\n", gVRTeleportPos1[0]);
printf("camPosX=%f\n", gVRTeleportPosLocal[0]);
}
if (args.GetCmdLineArgument("camPosY", gVRTeleportPos1[1]))
if (args.GetCmdLineArgument("camPosY", gVRTeleportPosLocal[1]))
{
printf("camPosY=%f\n", gVRTeleportPos1[1]);
printf("camPosY=%f\n", gVRTeleportPosLocal[1]);
}
if (args.GetCmdLineArgument("camPosZ", gVRTeleportPos1[2]))
if (args.GetCmdLineArgument("camPosZ", gVRTeleportPosLocal[2]))
{
printf("camPosZ=%f\n", gVRTeleportPos1[2]);
printf("camPosZ=%f\n", gVRTeleportPosLocal[2]);
}
float camRotZ = 0.f;
@@ -1060,7 +1065,7 @@ public:
{
printf("camRotZ = %f\n", camRotZ);
btQuaternion ornZ(btVector3(0, 0, 1), camRotZ);
gVRTeleportOrn = ornZ;
gVRTeleportOrnLocal = ornZ;
}
if (args.CheckCmdLineFlag("robotassets"))
@@ -1474,8 +1479,8 @@ extern int gDroppedSimulationSteps;
extern int gNumSteps;
extern double gDtInSec;
extern double gSubStep;
extern int gHuskyId;
extern btTransform huskyTr;
extern int gVRTrackingObjectUniqueId;
extern btTransform gVRTrackingObjectTr;
struct LineSegment
{
@@ -1607,15 +1612,22 @@ void PhysicsServerExample::drawUserDebugLines()
void PhysicsServerExample::renderScene()
{
btTransform vrTrans;
gVRTeleportPos1 = gVRTeleportPosLocal;
gVRTeleportOrn = gVRTeleportOrnLocal;
#if 0
///little VR test to follow/drive Husky vehicle
if (gHuskyId >= 0)
if (gVRTrackingObjectUniqueId >= 0)
{
gVRTeleportPos1 = huskyTr.getOrigin();
gVRTeleportOrn = huskyTr.getRotation();
btTransform vrTrans;
vrTrans.setOrigin(gVRTeleportPosLocal);
vrTrans.setRotation(gVRTeleportOrnLocal);
vrTrans = vrTrans * gVRTrackingObjectTr;
gVRTeleportPos1 = vrTrans.getOrigin();
gVRTeleportOrn = vrTrans.getRotation();
}
#endif
B3_PROFILE("PhysicsServerExample::RenderScene");

View File

@@ -594,6 +594,26 @@ struct UserDebugDrawResultArgs
int m_debugItemUniqueId;
};
struct SendVREvents
{
int m_numVRControllerEvents;
b3VRControllerEvent m_controllerEvents[MAX_VR_CONTROLLERS];
};
enum eVRCameraEnums
{
VR_CAMERA_ROOT_POSITION=1,
VR_CAMERA_ROOT_ORIENTATION=2,
VR_CAMERA_ROOT_TRACKING_OBJECT=4
};
struct VRCameraState
{
double m_rootPosition[3];
double m_rootOrientation[4];
int m_trackingObjectUniqueId;
};
struct SharedMemoryCommand
{
@@ -635,6 +655,7 @@ struct SharedMemoryCommand
struct UserDebugDrawArgs m_userDebugDrawArgs;
struct RequestRaycastIntersections m_requestRaycastIntersections;
struct LoadBunnyArgs m_loadBunnyArguments;
struct VRCameraState m_vrCameraStateArguments;
};
};
@@ -657,11 +678,8 @@ struct SendOverlappingObjectsArgs
int m_numRemainingOverlappingObjects;
};
struct SendVREvents
{
int m_numVRControllerEvents;
b3VRControllerEvent m_controllerEvents[MAX_VR_CONTROLLERS];
};

View File

@@ -46,6 +46,8 @@ enum EnumSharedMemoryClientCommand
CMD_SET_SHADOW,
CMD_USER_DEBUG_DRAW,
CMD_REQUEST_VR_EVENTS_DATA,
CMD_SET_VR_CAMERA_STATE,
//don't go beyond this command!
CMD_MAX_CLIENT_COMMANDS,

View File

@@ -2431,6 +2431,56 @@ static PyObject* pybullet_getMatrixFromQuaterion(PyObject* self, PyObject* args)
};
static PyObject* pybullet_setVRCameraState(PyObject* self, PyObject* args, PyObject *keywds)
{
b3SharedMemoryCommandHandle commandHandle;
b3SharedMemoryStatusHandle statusHandle;
int statusType;
int physicsClientId = 0;
b3PhysicsClientHandle sm = 0;
PyObject* rootPosObj=0;
PyObject* rootOrnObj=0;
int trackObjectUid=-2;
double rootPos[3];
double rootOrn[4];
static char *kwlist[] = {"rootPosition", "rootOrientation", "trackObject", "physicsClientId", NULL };
if (!PyArg_ParseTupleAndKeywords(args, keywds, "|OOii", kwlist,&rootPosObj, &rootOrnObj, &trackObjectUid,&physicsClientId))
{
return NULL;
}
sm = getPhysicsClient(physicsClientId);
if (sm == 0)
{
PyErr_SetString(SpamError, "Not connected to physics server.");
return NULL;
}
commandHandle = b3SetVRCameraStateCommandInit(sm);
if (pybullet_internalSetVectord(rootPosObj,rootPos))
{
b3SetVRCameraRootPosition(commandHandle,rootPos);
}
if (pybullet_internalSetVector4d(rootOrnObj,rootOrn))
{
b3SetVRCameraRootOrientation(commandHandle,rootOrn);
}
if (trackObjectUid>=-1)
{
b3SetVRCameraTrackingObject(commandHandle,trackObjectUid);
}
statusHandle = b3SubmitClientCommandAndWaitStatus(sm, commandHandle);
statusType = b3GetStatusType(statusHandle);
Py_INCREF(Py_None);
return Py_None;
}
static PyObject* pybullet_getVREvents(PyObject* self, PyObject* args, PyObject *keywds)
{
b3SharedMemoryCommandHandle commandHandle;
@@ -3732,10 +3782,10 @@ static PyObject* pybullet_renderImageObsolete(PyObject* self, PyObject* args) {
static PyObject* pybullet_applyExternalForce(PyObject* self, PyObject* args,PyObject* keywds) {
{
int objectUniqueId, linkIndex, flags;
int objectUniqueId=-1, linkIndex=-1, flags;
double force[3];
double position[3] = {0.0, 0.0, 0.0};
PyObject* forceObj, *posObj;
PyObject* forceObj=0, *posObj=0;
b3SharedMemoryCommandHandle command;
b3SharedMemoryStatusHandle statusHandle;
@@ -4304,15 +4354,15 @@ static PyMethodDef SpamMethods[] = {
"Set a single joint motor control mode and desired target value. There is "
"no immediate state change, stepSimulation will process the motors."},
{"applyExternalForce",(PyCFunction) pybullet_applyExternalForce, METH_VARARGS,
{"applyExternalForce",(PyCFunction) pybullet_applyExternalForce, METH_VARARGS| METH_KEYWORDS,
"for objectUniqueId, linkIndex (-1 for base/root link), apply a force "
"[x,y,z] at the a position [x,y,z], flag to select FORCE_IN_LINK_FRAME or "
"FORCE_IN_WORLD_FRAME coordinates"},
"WORLD_FRAME coordinates"},
{"applyExternalTorque", (PyCFunction)pybullet_applyExternalTorque, METH_VARARGS| METH_KEYWORDS,
"for objectUniqueId, linkIndex (-1 for base/root link) apply a torque "
"[x,y,z] in Cartesian coordinates, flag to select TORQUE_IN_LINK_FRAME or "
"TORQUE_IN_WORLD_FRAME coordinates"},
"WORLD_FRAME coordinates"},
{"renderImage", pybullet_renderImageObsolete, METH_VARARGS,
"obsolete, please use getCameraImage and getViewProjectionMatrices instead"
@@ -4420,6 +4470,11 @@ static PyMethodDef SpamMethods[] = {
{ "getVREvents", (PyCFunction)pybullet_getVREvents, METH_VARARGS | METH_KEYWORDS,
"Get Virtual Reality events, for example to track VR controllers position/buttons"
},
{ "setVRCameraState", (PyCFunction)pybullet_setVRCameraState, METH_VARARGS | METH_KEYWORDS,
"Set properties of the VR Camera such as its root transform "
"for teleporting or to track objects (camera inside a vehicle for example)."
},
{ "rayTest", (PyCFunction)pybullet_rayTest, METH_VARARGS | METH_KEYWORDS,
"Cast a ray and return the first object hit, if any. "

View File

@@ -23,7 +23,7 @@ for joint in range (2,6) :
for step in range (400):
p.stepSimulation()
p.getContactPointData(husky)
p.getContactPoints(husky)
p.disconnect()