diff --git a/data/multibody.bullet b/data/multibody.bullet index fec2a61a8..67a198781 100644 Binary files a/data/multibody.bullet and b/data/multibody.bullet differ diff --git a/examples/Importers/ImportURDFDemo/UrdfRenderingInterface.h b/examples/Importers/ImportURDFDemo/UrdfRenderingInterface.h index 4e86a06bf..de2ddbe51 100644 --- a/examples/Importers/ImportURDFDemo/UrdfRenderingInterface.h +++ b/examples/Importers/ImportURDFDemo/UrdfRenderingInterface.h @@ -91,6 +91,9 @@ struct UrdfRenderingInterface ///register a texture using an in-memory pixel buffer of a given width and height virtual int registerTexture(unsigned char* texels, int width, int height) = 0; + + virtual void setProjectiveTextureMatrices(const float viewMatrix[16], const float projectionMatrix[16]) {} + virtual void setProjectiveTexture(bool useProjectiveTexture) {} }; #endif //LINK_VISUAL_SHAPES_CONVERTER_H diff --git a/examples/SharedMemory/PhysicsServerCommandProcessor.cpp b/examples/SharedMemory/PhysicsServerCommandProcessor.cpp index 06e7a1a5d..c698b07a4 100644 --- a/examples/SharedMemory/PhysicsServerCommandProcessor.cpp +++ b/examples/SharedMemory/PhysicsServerCommandProcessor.cpp @@ -3645,10 +3645,38 @@ bool PhysicsServerCommandProcessor::processRequestCameraImageCommand(const struc if (m_data->m_pluginManager.getRenderInterface()) { + if ((flags & ER_USE_PROJECTIVE_TEXTURE) != 0) + { + m_data->m_pluginManager.getRenderInterface()->setProjectiveTexture(true); + if ((clientCmd.m_updateFlags & REQUEST_PIXEL_ARGS_HAS_PROJECTIVE_TEXTURE_MATRICES) != 0) + { + for (int i = 0; i < 16; i++) + { + projTextureViewMat[i] = clientCmd.m_requestPixelDataArguments.m_projectiveTextureViewMatrix[i]; + projTextureProjMat[i] = clientCmd.m_requestPixelDataArguments.m_projectiveTextureProjectionMatrix[i]; + } + } + else // If no specified matrices for projective texture, then use the camera matrices. + { + for (int i = 0; i < 16; i++) + { + projTextureViewMat[i] = viewMat[i]; + projTextureProjMat[i] = projMat[i]; + } + } + m_data->m_pluginManager.getRenderInterface()->setProjectiveTextureMatrices(projTextureViewMat, projTextureProjMat); + } + else + { + m_data->m_pluginManager.getRenderInterface()->setProjectiveTexture(false); + } + + m_data->m_pluginManager.getRenderInterface()->copyCameraImageData(pixelRGBA, numRequestedPixels, depthBuffer, numRequestedPixels, segmentationMaskBuffer, numRequestedPixels, startPixelIndex, &width, &height, &numPixelsCopied); + m_data->m_pluginManager.getRenderInterface()->setProjectiveTexture(false); } m_data->m_guiHelper->debugDisplayCameraImageData(clientCmd.m_requestPixelDataArguments.m_viewMatrix, diff --git a/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.cpp b/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.cpp index c1cabc93d..70548b301 100644 --- a/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.cpp +++ b/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.cpp @@ -87,8 +87,8 @@ struct EGLRendererObjectArray } }; -#define START_WIDTH 640 -#define START_HEIGHT 480 +#define START_WIDTH 2560 +#define START_HEIGHT 2048 struct EGLRendererVisualShapeConverterInternalData { @@ -886,6 +886,7 @@ void EGLRendererVisualShapeConverter::changeRGBAColor(int bodyUniqueId, int link m_data->m_visualShapes[i].m_rgbaColor[1] = rgbaColor[1]; m_data->m_visualShapes[i].m_rgbaColor[2] = rgbaColor[2]; m_data->m_visualShapes[i].m_rgbaColor[3] = rgbaColor[3]; + m_data->m_instancingRenderer->writeSingleInstanceColorToCPU(rgbaColor,i); } } @@ -983,6 +984,16 @@ void EGLRendererVisualShapeConverter::setWidthAndHeight(int width, int height) m_data->m_rgbColorBuffer = TGAImage(width, height, TGAImage::RGB); } +void EGLRendererVisualShapeConverter::setProjectiveTextureMatrices(const float viewMatrix[16], const float projectionMatrix[16]) +{ + m_data->m_instancingRenderer->setProjectiveTextureMatrices(viewMatrix,projectionMatrix); +} + +void EGLRendererVisualShapeConverter::setProjectiveTexture(bool useProjectiveTexture) +{ + m_data->m_instancingRenderer->setProjectiveTexture(useProjectiveTexture); +} + //copied from OpenGLGuiHelper.cpp void EGLRendererVisualShapeConverter::copyCameraImageDataGL( unsigned char* pixelsRGBA, int rgbaBufferSizeInPixels, @@ -1044,7 +1055,7 @@ void EGLRendererVisualShapeConverter::copyCameraImageDataGL( glstat = glGetError(); b3Assert(glstat == GL_NO_ERROR); } - if ((sourceWidth * sourceHeight * sizeof(float)) == depthBufferSizeInPixels) + if ((sourceWidth * sourceHeight) == depthBufferSizeInPixels) { glReadPixels(0, 0, sourceWidth, sourceHeight, GL_DEPTH_COMPONENT, GL_FLOAT, &(m_data->m_sourceDepthBuffer[0])); int glstat; diff --git a/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.h b/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.h index 26d281058..d0aefb68a 100644 --- a/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.h +++ b/examples/SharedMemory/plugins/eglPlugin/eglRendererVisualShapeConverter.h @@ -51,6 +51,9 @@ struct EGLRendererVisualShapeConverter : public UrdfRenderingInterface virtual int loadTextureFile(const char* filename); virtual int registerTexture(unsigned char* texels, int width, int height); + virtual void setProjectiveTextureMatrices(const float viewMatrix[16], const float projectionMatrix[16]); + virtual void setProjectiveTexture(bool useProjectiveTexture); + virtual void syncTransform(int shapeUid, const class btTransform& worldTransform, const class btVector3& localScaling); }; diff --git a/examples/pybullet/examples/getCameraImageTest.py b/examples/pybullet/examples/getCameraImageTest.py new file mode 100644 index 000000000..290580cb4 --- /dev/null +++ b/examples/pybullet/examples/getCameraImageTest.py @@ -0,0 +1,100 @@ +import matplotlib.pyplot as plt +import numpy as np +import pybullet as p +import time + + +direct = p.connect(p.GUI)#, options="--window_backend=2 --render_device=0") +#egl = p.loadPlugin("eglRendererPlugin") + + +p.loadURDF('plane.urdf') +p.loadURDF("r2d2.urdf",[0,0,1]) +p.loadURDF('cube_small.urdf', basePosition=[0.0, 0.0, 0.025]) +cube_trans = p.loadURDF('cube_small.urdf', basePosition=[0.0, 0.1, 0.025]) +p.changeVisualShape(cube_trans,-1,rgbaColor=[1,1,1,0.1]) +width = 128 +height = 128 + +fov = 60 +aspect = width / height +near = 0.02 +far = 1 + +view_matrix = p.computeViewMatrix([0, 0, 0.5], [0, 0, 0], [1, 0, 0]) +projection_matrix = p.computeProjectionMatrixFOV(fov, aspect, near, far) + +# Get depth values using the OpenGL renderer +images = p.getCameraImage(width, height, view_matrix, projection_matrix, shadow=True,renderer=p.ER_BULLET_HARDWARE_OPENGL) +rgb_opengl= np.reshape(images[2], (height, width, 4))*1./255. +depth_buffer_opengl = np.reshape(images[3], [width, height]) +depth_opengl = far * near / (far - (far - near) * depth_buffer_opengl) +seg_opengl = np.reshape(images[4], [width, height])*1./255. +time.sleep(1) + +# Get depth values using Tiny renderer +images = p.getCameraImage(width, height, view_matrix, projection_matrix, shadow=True, renderer=p.ER_TINY_RENDERER) +depth_buffer_tiny = np.reshape(images[3], [width, height]) +depth_tiny = far * near / (far - (far - near) * depth_buffer_tiny) +rgb_tiny= np.reshape(images[2], (height, width, 4))*1./255. +seg_tiny = np.reshape(images[4],[width,height])*1./255. + + + +bearStartPos1 = [-3.3,0,0] +bearStartOrientation1 = p.getQuaternionFromEuler([0,0,0]) +bearId1 = p.loadURDF("plane.urdf", bearStartPos1, bearStartOrientation1) +bearStartPos2 = [0,0,0] +bearStartOrientation2 = p.getQuaternionFromEuler([0,0,0]) +bearId2 = p.loadURDF("teddy_large.urdf",bearStartPos2, bearStartOrientation2) +textureId = p.loadTexture("checker_grid.jpg") +for b in range (p.getNumBodies()): + p.changeVisualShape(b,linkIndex=-1,textureUniqueId=textureId) + for j in range(p.getNumJoints(b)): + p.changeVisualShape(b,linkIndex=j,textureUniqueId=textureId) + +viewMat = [0.642787516117096, -0.4393851161003113, 0.6275069713592529, 0.0, 0.766044557094574, 0.36868777871131897, -0.5265407562255859, 0.0, -0.0, 0.8191521167755127, 0.5735764503479004, 0.0, 2.384185791015625e-07, 2.384185791015625e-07, -5.000000476837158, 1.0] +projMat = [0.7499999403953552, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0] +images = p.getCameraImage(width, height, viewMatrix = viewMat, projectionMatrix = projMat, renderer=p.ER_BULLET_HARDWARE_OPENGL, flags=p.ER_USE_PROJECTIVE_TEXTURE, projectiveTextureView=viewMat, projectiveTextureProj=projMat) +proj_opengl= np.reshape(images[2], (height, width, 4))*1./255. +time.sleep(1) + +images = p.getCameraImage(width, height, viewMatrix = viewMat, projectionMatrix = projMat, renderer=p.ER_TINY_RENDERER, flags=p.ER_USE_PROJECTIVE_TEXTURE, projectiveTextureView=viewMat, projectiveTextureProj=projMat) +proj_tiny= np.reshape(images[2], (height, width, 4))*1./255. + +# Plot both images - should show depth values of 0.45 over the cube and 0.5 over the plane +plt.subplot(4, 2, 1) +plt.imshow(depth_opengl, cmap='gray', vmin=0, vmax=1) +plt.title('Depth OpenGL3') + +plt.subplot(4, 2, 2) +plt.imshow(depth_tiny, cmap='gray', vmin=0, vmax=1) +plt.title('Depth TinyRenderer') + +plt.subplot(4,2,3) +plt.imshow(rgb_opengl) +plt.title('RGB OpenGL3') + +plt.subplot(4,2,4) +plt.imshow(rgb_tiny) +plt.title('RGB Tiny') + +plt.subplot(4,2,5) +plt.imshow(seg_opengl) +plt.title('Seg OpenGL3') + +plt.subplot(4,2,6) +plt.imshow(seg_tiny) +plt.title('Seg Tiny') + +plt.subplot(4,2,7) +plt.imshow(proj_opengl) +plt.title('Proj OpenGL') + +plt.subplot(4,2,8) +plt.imshow(proj_tiny) +plt.title('Proj Tiny') +plt.subplots_adjust(hspace=0.7) + + +plt.show() diff --git a/examples/pybullet/examples/inverse_kinematics.py b/examples/pybullet/examples/inverse_kinematics.py index 7293c15c2..c311c3ca1 100644 --- a/examples/pybullet/examples/inverse_kinematics.py +++ b/examples/pybullet/examples/inverse_kinematics.py @@ -48,6 +48,7 @@ p.setRealTimeSimulation(useRealTimeSimulation) trailDuration = 15 while 1: + p.getCameraImage(320,200, flags=p.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX, renderer=p.ER_HARDWARE_OPENGL_RENDERER) if (useRealTimeSimulation): dt = datetime.now() t = (dt.second/60.)*2.*math.pi diff --git a/examples/pybullet/examples/testrender_egl.py b/examples/pybullet/examples/testrender_egl.py index ee25f64b7..0625a363a 100644 --- a/examples/pybullet/examples/testrender_egl.py +++ b/examples/pybullet/examples/testrender_egl.py @@ -54,7 +54,7 @@ while (1): viewMatrix = pybullet.computeViewMatrixFromYawPitchRoll(camTargetPos, camDistance, yaw, pitch, roll, upAxisIndex) aspect = pixelWidth / pixelHeight; projectionMatrix = pybullet.computeProjectionMatrixFOV(fov, aspect, nearPlane, farPlane); - img_arr = pybullet.getCameraImage(pixelWidth, pixelHeight, viewMatrix,projectionMatrix, shadow=1,flags=pybullet.ER_SEGMENTATION_MASK, lightDirection=[1,1,1],renderer=pybullet.ER_BULLET_HARDWARE_OPENGL) + img_arr = pybullet.getCameraImage(pixelWidth, pixelHeight, viewMatrix,projectionMatrix, shadow=1,lightDirection=[1,1,1],renderer=pybullet.ER_BULLET_HARDWARE_OPENGL) stop = time.time() print ("renderImage %f" % (stop - start)) diff --git a/examples/pybullet/examples/transparent.py b/examples/pybullet/examples/transparent.py index 5c6cc09c6..6e66ad982 100644 --- a/examples/pybullet/examples/transparent.py +++ b/examples/pybullet/examples/transparent.py @@ -15,4 +15,5 @@ while (1): blue = p.readUserDebugParameter(blueSlider) alpha = p.readUserDebugParameter(alphaSlider) p.changeVisualShape(sphereUid,-1,rgbaColor=[red,green,blue,alpha]) - time.sleep(0.01) \ No newline at end of file + p.getCameraImage(320,200,flags=p.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX, renderer=p.ER_BULLET_HARDWARE_OPENGL) + time.sleep(0.01) diff --git a/examples/pybullet/examples/userData.py b/examples/pybullet/examples/userData.py index 164c470b5..139f3186c 100644 --- a/examples/pybullet/examples/userData.py +++ b/examples/pybullet/examples/userData.py @@ -3,7 +3,7 @@ import time from pybullet_utils import bullet_client -server = bullet_client.BulletClient(connection_mode=pb.GUI_SERVER) +server = bullet_client.BulletClient(connection_mode=pb.SHARED_MEMORY_SERVER) print ("Connecting to bullet server") diff --git a/setup.py b/setup.py index 9ceafeeb0..ae9ec4f66 100644 --- a/setup.py +++ b/setup.py @@ -559,7 +559,7 @@ if 'BT_USE_EGL' in EGL_CXX_FLAGS: setup( name = 'pybullet', - version='2.2.8', + version='2.2.9', description='Official Python Interface for the Bullet Physics SDK specialized for Robotics Simulation and Reinforcement Learning', long_description='pybullet is an easy to use Python module for physics simulation, robotics and deep reinforcement learning based on the Bullet Physics SDK. With pybullet you can load articulated bodies from URDF, SDF and other file formats. pybullet provides forward dynamics simulation, inverse dynamics computation, forward and inverse kinematics and collision detection and ray intersection queries. Aside from physics simulation, pybullet supports to rendering, with a CPU renderer and OpenGL visualization and support for virtual reality headsets.', url='https://github.com/bulletphysics/bullet3',