diff --git a/build3/premake4.lua b/build3/premake4.lua index ae83944d4..4bfb35d28 100644 --- a/build3/premake4.lua +++ b/build3/premake4.lua @@ -29,6 +29,11 @@ description = "Try to link and use the system OpenGL headers version instead of dynamically loading OpenGL (dlopen is default)" } + newoption + { + trigger = "enable_openvr", + description = "Enable experimental Virtual Reality examples, using OpenVR for HTC Vive and Oculus Rift" + } newoption { trigger = "enable_system_x11", diff --git a/examples/BasicDemo/premake4.lua b/examples/BasicDemo/premake4.lua index 6474d9298..dd88389e1 100644 --- a/examples/BasicDemo/premake4.lua +++ b/examples/BasicDemo/premake4.lua @@ -59,6 +59,7 @@ end + project "App_BasicExampleGuiWithSoftwareRenderer" if _OPTIONS["ios"] then @@ -133,3 +134,62 @@ files { "../TinyRenderer/TinyRenderer.cpp", "../Utils/b3ResourcePath.cpp" } + + + + + if _OPTIONS["enable_openvr"] then + +project "App_BasicExampleVR" + +if _OPTIONS["ios"] then + kind "WindowedApp" +else + kind "ConsoleApp" +end +defines {"B3_USE_STANDALONE_EXAMPLE","BT_ENABLE_VR"} + + + +includedirs {"../../src", + "../ThirdPartyLibs/openvr/headers", + "../ThirdPartyLibs/openvr/samples/shared"} + +links { + "BulletDynamics","BulletCollision", "LinearMath", "OpenGL_Window","Bullet3Common", "openvr_api" +} + + initOpenGL() + initGlew() + + +language "C++" + +files { + "BasicExample.cpp", + "*.h", + "../StandaloneMain/hellovr_opengl_main.cpp", + "../ExampleBrowser/OpenGLGuiHelper.cpp", + "../ExampleBrowser/GL_ShapeDrawer.cpp", + "../ExampleBrowser/CollisionShape2TriangleMesh.cpp", + "../ThirdPartyLibs/openvr/samples/shared/lodepng.cpp", + "../ThirdPartyLibs/openvr/samples/shared/lodepng.h", + "../ThirdPartyLibs/openvr/samples/shared/Matrices.cpp", + "../ThirdPartyLibs/openvr/samples/shared/Matrices.h", + "../ThirdPartyLibs/openvr/samples/shared/pathtools.cpp", + "../ThirdPartyLibs/openvr/samples/shared/pathtools.h", + "../ThirdPartyLibs/openvr/samples/shared/Vectors.h", + +} + +if os.is("Windows") then + libdirs {"../ThirdPartyLibs/openvr/lib/win32"} +end + +if os.is("Linux") then initX11() end + +if os.is("MacOSX") then + links{"Cocoa.framework"} +end + +end \ No newline at end of file diff --git a/examples/OpenGLWindow/GLInstancingRenderer.cpp b/examples/OpenGLWindow/GLInstancingRenderer.cpp index ebf450afd..24002ad25 100644 --- a/examples/OpenGLWindow/GLInstancingRenderer.cpp +++ b/examples/OpenGLWindow/GLInstancingRenderer.cpp @@ -158,10 +158,13 @@ struct InternalDataRenderer : public GLInstanceRendererInternalData GLRenderToTexture* m_shadowMap; GLuint m_shadowTexture; + GLuint m_renderFrameBuffer; + InternalDataRenderer() : m_shadowMap(0), - m_shadowTexture(0) + m_shadowTexture(0), + m_renderFrameBuffer(0) { //clear to zero to make it obvious if the matrix is used uninitialized for (int i=0;i<16;i++) @@ -1663,7 +1666,7 @@ b3Assert(glGetError() ==GL_NO_ERROR); { glDisable (GL_BLEND); } - + glActiveTexture(GL_TEXTURE0); break; } default: @@ -1688,6 +1691,7 @@ b3Assert(glGetError() ==GL_NO_ERROR); { // writeTextureToPng(shadowMapWidth,shadowMapHeight,"shadowmap.png",4); m_data->m_shadowMap->disable(); + glBindFramebuffer( GL_FRAMEBUFFER, m_data->m_renderFrameBuffer); glViewport(dims[0],dims[1],dims[2],dims[3]); } @@ -1733,4 +1737,10 @@ int GLInstancingRenderer::getInstanceCapacity() const { return m_data->m_maxNumObjectCapacity; } + +void GLInstancingRenderer::setRenderFrameBuffer(unsigned int renderFrameBuffer) +{ + m_data->m_renderFrameBuffer = (GLuint) renderFrameBuffer; +} + #endif //NO_OPENGL3 diff --git a/examples/OpenGLWindow/GLInstancingRenderer.h b/examples/OpenGLWindow/GLInstancingRenderer.h index 34df83637..66664b844 100644 --- a/examples/OpenGLWindow/GLInstancingRenderer.h +++ b/examples/OpenGLWindow/GLInstancingRenderer.h @@ -132,6 +132,7 @@ public: } virtual void clearZBuffer(); + virtual void setRenderFrameBuffer(unsigned int renderFrameBuffer); }; #endif //GL_INSTANCING_RENDERER_H diff --git a/examples/StandaloneMain/hellovr_opengl_main.cpp b/examples/StandaloneMain/hellovr_opengl_main.cpp new file mode 100644 index 000000000..746f10835 --- /dev/null +++ b/examples/StandaloneMain/hellovr_opengl_main.cpp @@ -0,0 +1,1986 @@ +#ifdef BT_ENABLE_VR +//========= Copyright Valve Corporation ============// + +#include "../OpenGLWindow/SimpleOpenGL3App.h" +#include "../OpenGLWindow/OpenGLInclude.h" +#include "Bullet3Common/b3Quaternion.h" +#include "../ExampleBrowser/OpenGLGuiHelper.h" +#include "../CommonInterfaces/CommonExampleInterface.h" +#include "../CommonInterfaces/CommonGUIHelperInterface.h" +#include "BulletCollision/CollisionDispatch/btCollisionObject.h" +#include "BulletCollision/CollisionShapes/btCollisionShape.h" +#include "BulletDynamics/Dynamics/btDiscreteDynamicsWorld.h" + + +//how can you try typing on a keyboard, without seeing it? +//it is pretty funny, to see the desktop in VR! + + +#include +#include +#include + +#include + +#include "lodepng.h" +#include "Matrices.h" +#include "pathtools.h" + +CommonExampleInterface* sExample; +OpenGLGuiHelper* sGuiPtr = 0; + + +#if defined(POSIX) +#include "unistd.h" +#endif +#ifdef _WIN32 +#include +#endif + +void ThreadSleep( unsigned long nMilliseconds ) +{ +#if defined(_WIN32) + ::Sleep( nMilliseconds ); +#elif defined(POSIX) + usleep( nMilliseconds * 1000 ); +#endif +} + + +class CGLRenderModel +{ +public: + CGLRenderModel( const std::string & sRenderModelName ); + ~CGLRenderModel(); + + bool BInit( const vr::RenderModel_t & vrModel, const vr::RenderModel_TextureMap_t & vrDiffuseTexture ); + void Cleanup(); + void Draw(); + const std::string & GetName() const { return m_sModelName; } + +private: + GLuint m_glVertBuffer; + GLuint m_glIndexBuffer; + GLuint m_glVertArray; + GLuint m_glTexture; + GLsizei m_unVertexCount; + std::string m_sModelName; +}; + +static bool g_bPrintf = true; + +//----------------------------------------------------------------------------- +// Purpose: +//------------------------------------------------------------------------------ +class CMainApplication +{ +public: + CMainApplication( int argc, char *argv[] ); + virtual ~CMainApplication(); + + bool BInit(); + bool BInitGL(); + bool BInitCompositor(); + + void SetupRenderModels(); + + void Shutdown(); + + void RunMainLoop(); + bool HandleInput(); + void ProcessVREvent( const vr::VREvent_t & event ); + void RenderFrame(); + + bool SetupTexturemaps(); + + void SetupScene(); + void AddCubeToScene( Matrix4 mat, std::vector &vertdata ); + void AddCubeVertex( float fl0, float fl1, float fl2, float fl3, float fl4, std::vector &vertdata ); + + void DrawControllers(); + + bool SetupStereoRenderTargets(); + void SetupDistortion(); + void SetupCameras(); + + void RenderStereoTargets(); + void RenderDistortion(); + void RenderScene( vr::Hmd_Eye nEye ); + + Matrix4 GetHMDMatrixProjectionEye( vr::Hmd_Eye nEye ); + Matrix4 GetHMDMatrixPoseEye( vr::Hmd_Eye nEye ); + Matrix4 GetCurrentViewProjectionMatrix( vr::Hmd_Eye nEye ); + void UpdateHMDMatrixPose(); + + Matrix4 ConvertSteamVRMatrixToMatrix4( const vr::HmdMatrix34_t &matPose ); + + GLuint CompileGLShader( const char *pchShaderName, const char *pchVertexShader, const char *pchFragmentShader ); + bool CreateAllShaders(); + + void SetupRenderModelForTrackedDevice( vr::TrackedDeviceIndex_t unTrackedDeviceIndex ); + CGLRenderModel *FindOrLoadRenderModel( const char *pchRenderModelName ); + +private: + bool m_bDebugOpenGL; + bool m_bVerbose; + bool m_bPerf; + bool m_bVblank; + bool m_bGlFinishHack; + + vr::IVRSystem *m_pHMD; + vr::IVRRenderModels *m_pRenderModels; + std::string m_strDriver; + std::string m_strDisplay; + vr::TrackedDevicePose_t m_rTrackedDevicePose[ vr::k_unMaxTrackedDeviceCount ]; + Matrix4 m_rmat4DevicePose[ vr::k_unMaxTrackedDeviceCount ]; + bool m_rbShowTrackedDevice[ vr::k_unMaxTrackedDeviceCount ]; + +private: + SimpleOpenGL3App* m_app; + uint32_t m_nWindowWidth; + uint32_t m_nWindowHeight; + + +private: // OpenGL bookkeeping + int m_iTrackedControllerCount; + int m_iTrackedControllerCount_Last; + int m_iValidPoseCount; + int m_iValidPoseCount_Last; + bool m_bShowCubes; + + std::string m_strPoseClasses; // what classes we saw poses for this frame + char m_rDevClassChar[ vr::k_unMaxTrackedDeviceCount ]; // for each device, a character representing its class + + int m_iSceneVolumeWidth; + int m_iSceneVolumeHeight; + int m_iSceneVolumeDepth; + float m_fScaleSpacing; + float m_fScale; + + int m_iSceneVolumeInit; // if you want something other than the default 20x20x20 + + float m_fNearClip; + float m_fFarClip; + + GLuint m_iTexture; + + unsigned int m_uiVertcount; + + GLuint m_glSceneVertBuffer; + GLuint m_unSceneVAO; + GLuint m_unLensVAO; + GLuint m_glIDVertBuffer; + GLuint m_glIDIndexBuffer; + unsigned int m_uiIndexSize; + + GLuint m_glControllerVertBuffer; + GLuint m_unControllerVAO; + unsigned int m_uiControllerVertcount; + + Matrix4 m_mat4HMDPose; + Matrix4 m_mat4eyePosLeft; + Matrix4 m_mat4eyePosRight; + + Matrix4 m_mat4ProjectionCenter; + Matrix4 m_mat4ProjectionLeft; + Matrix4 m_mat4ProjectionRight; + + struct VertexDataScene + { + Vector3 position; + Vector2 texCoord; + }; + + struct VertexDataLens + { + Vector2 position; + Vector2 texCoordRed; + Vector2 texCoordGreen; + Vector2 texCoordBlue; + }; + + GLuint m_unSceneProgramID; + GLuint m_unLensProgramID; + GLuint m_unControllerTransformProgramID; + GLuint m_unRenderModelProgramID; + + GLint m_nSceneMatrixLocation; + GLint m_nControllerMatrixLocation; + GLint m_nRenderModelMatrixLocation; + + struct FramebufferDesc + { + GLuint m_nDepthBufferId; + GLuint m_nRenderTextureId; + GLuint m_nRenderFramebufferId; + GLuint m_nResolveTextureId; + GLuint m_nResolveFramebufferId; + }; + FramebufferDesc leftEyeDesc; + FramebufferDesc rightEyeDesc; + + bool CreateFrameBuffer( int nWidth, int nHeight, FramebufferDesc &framebufferDesc ); + + uint32_t m_nRenderWidth; + uint32_t m_nRenderHeight; + + std::vector< CGLRenderModel * > m_vecRenderModels; + CGLRenderModel *m_rTrackedDeviceToRenderModel[ vr::k_unMaxTrackedDeviceCount ]; +}; + + +//----------------------------------------------------------------------------- +// Purpose: Constructor +//----------------------------------------------------------------------------- +CMainApplication::CMainApplication( int argc, char *argv[] ) + : m_app(NULL) + , m_nWindowWidth( 1280 ) + , m_nWindowHeight( 720 ) + , m_unSceneProgramID( 0 ) + , m_unLensProgramID( 0 ) + , m_unControllerTransformProgramID( 0 ) + , m_unRenderModelProgramID( 0 ) + , m_pHMD( NULL ) + , m_pRenderModels( NULL ) + , m_bDebugOpenGL( false ) + , m_bVerbose( false ) + , m_bPerf( false ) + , m_bVblank( false ) + , m_bGlFinishHack( true ) + , m_glControllerVertBuffer( 0 ) + , m_unControllerVAO( 0 ) + , m_unLensVAO( 0 ) + , m_unSceneVAO( 0 ) + , m_nSceneMatrixLocation( -1 ) + , m_nControllerMatrixLocation( -1 ) + , m_nRenderModelMatrixLocation( -1 ) + , m_iTrackedControllerCount( 0 ) + , m_iTrackedControllerCount_Last( -1 ) + , m_iValidPoseCount( 0 ) + , m_iValidPoseCount_Last( -1 ) + , m_iSceneVolumeInit( 20 ) + , m_strPoseClasses("") + , m_bShowCubes( false ) +{ + + for( int i = 1; i < argc; i++ ) + { + if( !stricmp( argv[i], "-gldebug" ) ) + { + m_bDebugOpenGL = true; + } + else if( !stricmp( argv[i], "-verbose" ) ) + { + m_bVerbose = true; + } + else if( !stricmp( argv[i], "-novblank" ) ) + { + m_bVblank = false; + } + else if( !stricmp( argv[i], "-noglfinishhack" ) ) + { + m_bGlFinishHack = false; + } + else if( !stricmp( argv[i], "-noprintf" ) ) + { + g_bPrintf = false; + } + else if ( !stricmp( argv[i], "-cubevolume" ) && ( argc > i + 1 ) && ( *argv[ i + 1 ] != '-' ) ) + { + m_iSceneVolumeInit = atoi( argv[ i + 1 ] ); + i++; + } + } + // other initialization tasks are done in BInit + memset(m_rDevClassChar, 0, sizeof(m_rDevClassChar)); +}; + + +//----------------------------------------------------------------------------- +// Purpose: Destructor +//----------------------------------------------------------------------------- +CMainApplication::~CMainApplication() +{ + // work is done in Shutdown + b3Printf( "Shutdown" ); +} + + +//----------------------------------------------------------------------------- +// Purpose: Helper to get a string from a tracked device property and turn it +// into a std::string +//----------------------------------------------------------------------------- +std::string GetTrackedDeviceString( vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL ) +{ + uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty( unDevice, prop, NULL, 0, peError ); + if( unRequiredBufferLen == 0 ) + return ""; + + char *pchBuffer = new char[ unRequiredBufferLen ]; + unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty( unDevice, prop, pchBuffer, unRequiredBufferLen, peError ); + std::string sResult = pchBuffer; + delete [] pchBuffer; + return sResult; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::BInit() +{ + + // Loading the SteamVR Runtime + vr::EVRInitError eError = vr::VRInitError_None; + m_pHMD = vr::VR_Init( &eError, vr::VRApplication_Scene ); + + if ( eError != vr::VRInitError_None ) + { + m_pHMD = NULL; + char buf[1024]; + sprintf_s( buf, sizeof( buf ), "Unable to init VR runtime: %s", vr::VR_GetVRInitErrorAsEnglishDescription( eError ) ); + b3Warning( "VR_Init Failed %s", buf); + return false; + } + + + m_pRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface( vr::IVRRenderModels_Version, &eError ); + if( !m_pRenderModels ) + { + m_pHMD = NULL; + vr::VR_Shutdown(); + + char buf[1024]; + sprintf_s( buf, sizeof( buf ), "Unable to get render model interface: %s", vr::VR_GetVRInitErrorAsEnglishDescription( eError ) ); + b3Warning( "VR_Init Failed %s", buf); + return false; + } + +// int nWindowPosX = 700; +// int nWindowPosY = 100; + m_nWindowWidth = 1280; + m_nWindowHeight = 720; + + /* + + //SDL_GL_SetAttribute( SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_COMPATIBILITY ); + SDL_GL_SetAttribute( SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE ); + SDL_GL_SetAttribute( SDL_GL_MULTISAMPLEBUFFERS, 0 ); + SDL_GL_SetAttribute( SDL_GL_MULTISAMPLESAMPLES, 0 ); + if( m_bDebugOpenGL ) + SDL_GL_SetAttribute( SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_DEBUG_FLAG ); + + */ + m_app = new SimpleOpenGL3App("SimpleOpenGL3App",m_nWindowWidth,m_nWindowHeight,true); + + sGuiPtr = new OpenGLGuiHelper(m_app,false); + + + CommonExampleOptions options(sGuiPtr); + + sExample = StandaloneExampleCreateFunc(options); + sExample->initPhysics(); + sExample->resetCamera(); + +#if 0 + int cubeIndex = m_app->registerCubeShape(1,1,1); + + b3Quaternion orn(0,0,0,1); + + { + b3Vector3 color=b3MakeVector3(0.3,0.3,0.6); + b3Vector3 pos = b3MakeVector3(0,0,0); + b3Vector3 scaling=b3MakeVector3 (1,.1,1); + m_app->m_renderer->registerGraphicsInstance(cubeIndex,pos,orn,color,scaling); + } + { + b3Vector3 color=b3MakeVector3(0.3,0.6,0.3); + b3Vector3 pos = b3MakeVector3(0,0.3,0); + b3Vector3 scaling=b3MakeVector3 (.1,.1,.1); + m_app->m_renderer->registerGraphicsInstance(cubeIndex,pos,orn,color,scaling); + } +#endif + + + + m_app->m_renderer->writeTransforms(); + + + +/* if (m_pWindow == NULL) + { + printf( "%s - Window could not be created! SDL Error: %s\n", __FUNCTION__, SDL_GetError() ); + return false; + } + */ + + /*m_pContext = SDL_GL_CreateContext(m_pWindow); + if (m_pContext == NULL) + { + printf( "%s - OpenGL context could not be created! SDL Error: %s\n", __FUNCTION__, SDL_GetError() ); + return false; + } + + + glewExperimental = GL_TRUE; + GLenum nGlewError = glewInit(); + if (nGlewError != GLEW_OK) + { + printf( "%s - Error initializing GLEW! %s\n", __FUNCTION__, glewGetErrorString( nGlewError ) ); + return false; + } + glGetError(); // to clear the error caused deep in GLEW + + if ( SDL_GL_SetSwapInterval( m_bVblank ? 1 : 0 ) < 0 ) + { + printf( "%s - Warning: Unable to set VSync! SDL Error: %s\n", __FUNCTION__, SDL_GetError() ); + return false; + } + + */ + m_strDriver = "No Driver"; + m_strDisplay = "No Display"; + + m_strDriver = GetTrackedDeviceString( m_pHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String ); + m_strDisplay = GetTrackedDeviceString( m_pHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String ); + + std::string strWindowTitle = "hellovr_bullet - " + m_strDriver + " " + m_strDisplay; + m_app->m_window->setWindowTitle(strWindowTitle.c_str() ); + + // cube array + m_iSceneVolumeWidth = m_iSceneVolumeInit; + m_iSceneVolumeHeight = m_iSceneVolumeInit; + m_iSceneVolumeDepth = m_iSceneVolumeInit; + + m_fScale = 0.3f; + m_fScaleSpacing = 4.0f; + + m_fNearClip = 0.1f; + m_fFarClip = 30.0f; + + m_iTexture = 0; + m_uiVertcount = 0; + +// m_MillisecondsTimer.start(1, this); +// m_SecondsTimer.start(1000, this); + + if (!BInitGL()) + { + printf("%s - Unable to initialize OpenGL!\n", __FUNCTION__); + return false; + } + + if (!BInitCompositor()) + { + printf("%s - Failed to initialize VR Compositor!\n", __FUNCTION__); + return false; + } + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +/*void APIENTRY DebugCallback(GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const char* message, const void* userParam) +{ + b3Printf( "GL Error: %s\n", message ); +} +*/ + +static void APIENTRY DebugCallback (GLenum source, GLenum type, GLuint id, GLenum severity, GLsizei length, const GLchar* message, GLvoid* userParam) +{ + b3Printf( "GL Error: %s\n", message ); +} + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::BInitGL() +{ + if( m_bDebugOpenGL ) + { + const GLvoid *userParam=0; + glDebugMessageCallback(DebugCallback, userParam); + glDebugMessageControl( GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, nullptr, GL_TRUE ); + glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS); + } + + if( !CreateAllShaders() ) + return false; + + SetupTexturemaps(); + SetupScene(); + SetupCameras(); + SetupStereoRenderTargets(); + SetupDistortion(); + + SetupRenderModels(); + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::BInitCompositor() +{ + vr::EVRInitError peError = vr::VRInitError_None; + + if ( !vr::VRCompositor() ) + { + printf( "Compositor initialization failed. See log file for details\n" ); + return false; + } + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::Shutdown() +{ + if( m_pHMD ) + { + vr::VR_Shutdown(); + m_pHMD = NULL; + } + + for( std::vector< CGLRenderModel * >::iterator i = m_vecRenderModels.begin(); i != m_vecRenderModels.end(); i++ ) + { + delete (*i); + } + m_vecRenderModels.clear(); + + if( 1)//m_pContext ) + { + glDebugMessageControl( GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, nullptr, GL_FALSE ); + glDebugMessageCallback(nullptr, nullptr); + glDeleteBuffers(1, &m_glSceneVertBuffer); + glDeleteBuffers(1, &m_glIDVertBuffer); + glDeleteBuffers(1, &m_glIDIndexBuffer); + + if ( m_unSceneProgramID ) + { + glDeleteProgram( m_unSceneProgramID ); + } + if ( m_unControllerTransformProgramID ) + { + glDeleteProgram( m_unControllerTransformProgramID ); + } + if ( m_unRenderModelProgramID ) + { + glDeleteProgram( m_unRenderModelProgramID ); + } + if ( m_unLensProgramID ) + { + glDeleteProgram( m_unLensProgramID ); + } + + glDeleteRenderbuffers( 1, &leftEyeDesc.m_nDepthBufferId ); + glDeleteTextures( 1, &leftEyeDesc.m_nRenderTextureId ); + glDeleteFramebuffers( 1, &leftEyeDesc.m_nRenderFramebufferId ); + glDeleteTextures( 1, &leftEyeDesc.m_nResolveTextureId ); + glDeleteFramebuffers( 1, &leftEyeDesc.m_nResolveFramebufferId ); + + glDeleteRenderbuffers( 1, &rightEyeDesc.m_nDepthBufferId ); + glDeleteTextures( 1, &rightEyeDesc.m_nRenderTextureId ); + glDeleteFramebuffers( 1, &rightEyeDesc.m_nRenderFramebufferId ); + glDeleteTextures( 1, &rightEyeDesc.m_nResolveTextureId ); + glDeleteFramebuffers( 1, &rightEyeDesc.m_nResolveFramebufferId ); + + if( m_unLensVAO != 0 ) + { + glDeleteVertexArrays( 1, &m_unLensVAO ); + } + if( m_unSceneVAO != 0 ) + { + glDeleteVertexArrays( 1, &m_unSceneVAO ); + } + if( m_unControllerVAO != 0 ) + { + glDeleteVertexArrays( 1, &m_unControllerVAO ); + } + } + + delete m_app; + m_app=0; + +} + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::HandleInput() +{ + bool bRet = false; + + // Process SteamVR events + vr::VREvent_t event; + while( m_pHMD->PollNextEvent( &event, sizeof( event ) ) ) + { + ProcessVREvent( event ); + } + + // Process SteamVR controller state + for( vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++ ) + { + vr::VRControllerState_t state; + if( m_pHMD->GetControllerState( unDevice, &state ) ) + { + if (state.ulButtonPressed) + { + b3Printf("state.ulButtonPressed=%d\n",state.ulButtonPressed); + sExample->exitPhysics(); + m_app->m_instancingRenderer->removeAllInstances(); + sExample->initPhysics(); + + } + m_rbShowTrackedDevice[ unDevice ] = state.ulButtonPressed == 0; + } + } + + return bRet; +} + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::RunMainLoop() +{ + bool bQuit = false; + + //SDL_StartTextInput(); + //SDL_ShowCursor( SDL_DISABLE ); + + while ( !bQuit && !m_app->m_window->requestedExit()) + { + bQuit = HandleInput(); + + RenderFrame(); + } + + //SDL_StopTextInput(); +} + + +//----------------------------------------------------------------------------- +// Purpose: Processes a single VR event +//----------------------------------------------------------------------------- +void CMainApplication::ProcessVREvent( const vr::VREvent_t & event ) +{ + switch( event.eventType ) + { + case vr::VREvent_TrackedDeviceActivated: + { + SetupRenderModelForTrackedDevice( event.trackedDeviceIndex ); + b3Printf( "Device %u attached. Setting up render model.\n", event.trackedDeviceIndex ); + } + break; + case vr::VREvent_TrackedDeviceDeactivated: + { + b3Printf( "Device %u detached.\n", event.trackedDeviceIndex ); + } + break; + case vr::VREvent_TrackedDeviceUpdated: + { + b3Printf( "Device %u updated.\n", event.trackedDeviceIndex ); + } + break; + } +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::RenderFrame() +{ + // for now as fast as possible + if ( m_pHMD ) + { + DrawControllers(); + RenderStereoTargets(); + RenderDistortion(); + + vr::Texture_t leftEyeTexture = {(void*)leftEyeDesc.m_nResolveTextureId, vr::API_OpenGL, vr::ColorSpace_Gamma }; + vr::VRCompositor()->Submit(vr::Eye_Left, &leftEyeTexture ); + vr::Texture_t rightEyeTexture = {(void*)rightEyeDesc.m_nResolveTextureId, vr::API_OpenGL, vr::ColorSpace_Gamma }; + vr::VRCompositor()->Submit(vr::Eye_Right, &rightEyeTexture ); + } + + if ( m_bVblank && m_bGlFinishHack ) + { + //$ HACKHACK. From gpuview profiling, it looks like there is a bug where two renders and a present + // happen right before and after the vsync causing all kinds of jittering issues. This glFinish() + // appears to clear that up. Temporary fix while I try to get nvidia to investigate this problem. + // 1/29/2014 mikesart + glFinish(); + } + + // SwapWindow + { + m_app->swapBuffer(); + //SDL_GL_SwapWindow( m_pWindow ); + + } + + // Clear + { + // We want to make sure the glFinish waits for the entire present to complete, not just the submission + // of the command. So, we do a clear here right here so the glFinish will wait fully for the swap. + glClearColor( 0, 0, 0, 1 ); + glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); + } + + // Flush and wait for swap. + if ( m_bVblank ) + { + glFlush(); + glFinish(); + } + + // Spew out the controller and pose count whenever they change. + if ( m_iTrackedControllerCount != m_iTrackedControllerCount_Last || m_iValidPoseCount != m_iValidPoseCount_Last ) + { + m_iValidPoseCount_Last = m_iValidPoseCount; + m_iTrackedControllerCount_Last = m_iTrackedControllerCount; + + b3Printf( "PoseCount:%d(%s) Controllers:%d\n", m_iValidPoseCount, m_strPoseClasses.c_str(), m_iTrackedControllerCount ); + } + + UpdateHMDMatrixPose(); +} + + +//----------------------------------------------------------------------------- +// Purpose: Compiles a GL shader program and returns the handle. Returns 0 if +// the shader couldn't be compiled for some reason. +//----------------------------------------------------------------------------- +GLuint CMainApplication::CompileGLShader( const char *pchShaderName, const char *pchVertexShader, const char *pchFragmentShader ) +{ + GLuint unProgramID = glCreateProgram(); + + GLuint nSceneVertexShader = glCreateShader(GL_VERTEX_SHADER); + glShaderSource( nSceneVertexShader, 1, &pchVertexShader, NULL); + glCompileShader( nSceneVertexShader ); + + GLint vShaderCompiled = GL_FALSE; + glGetShaderiv( nSceneVertexShader, GL_COMPILE_STATUS, &vShaderCompiled); + if ( vShaderCompiled != GL_TRUE) + { + b3Printf("%s - Unable to compile vertex shader %d!\n", pchShaderName, nSceneVertexShader); + glDeleteProgram( unProgramID ); + glDeleteShader( nSceneVertexShader ); + return 0; + } + glAttachShader( unProgramID, nSceneVertexShader); + glDeleteShader( nSceneVertexShader ); // the program hangs onto this once it's attached + + GLuint nSceneFragmentShader = glCreateShader(GL_FRAGMENT_SHADER); + glShaderSource( nSceneFragmentShader, 1, &pchFragmentShader, NULL); + glCompileShader( nSceneFragmentShader ); + + GLint fShaderCompiled = GL_FALSE; + glGetShaderiv( nSceneFragmentShader, GL_COMPILE_STATUS, &fShaderCompiled); + if (fShaderCompiled != GL_TRUE) + { + b3Printf("%s - Unable to compile fragment shader %d!\n", pchShaderName, nSceneFragmentShader ); + glDeleteProgram( unProgramID ); + glDeleteShader( nSceneFragmentShader ); + return 0; + } + + glAttachShader( unProgramID, nSceneFragmentShader ); + glDeleteShader( nSceneFragmentShader ); // the program hangs onto this once it's attached + + glLinkProgram( unProgramID ); + + GLint programSuccess = GL_TRUE; + glGetProgramiv( unProgramID, GL_LINK_STATUS, &programSuccess); + if ( programSuccess != GL_TRUE ) + { + b3Printf("%s - Error linking program %d!\n", pchShaderName, unProgramID); + glDeleteProgram( unProgramID ); + return 0; + } + + glUseProgram( unProgramID ); + glUseProgram( 0 ); + + return unProgramID; +} + + +//----------------------------------------------------------------------------- +// Purpose: Creates all the shaders used by HelloVR SDL +//----------------------------------------------------------------------------- +bool CMainApplication::CreateAllShaders() +{ + m_unSceneProgramID = CompileGLShader( + "Scene", + + // Vertex Shader + "#version 410\n" + "uniform mat4 matrix;\n" + "layout(location = 0) in vec4 position;\n" + "layout(location = 1) in vec2 v2UVcoordsIn;\n" + "layout(location = 2) in vec3 v3NormalIn;\n" + "out vec2 v2UVcoords;\n" + "void main()\n" + "{\n" + " v2UVcoords = v2UVcoordsIn;\n" + " gl_Position = matrix * position;\n" + "}\n", + + // Fragment Shader + "#version 410 core\n" + "uniform sampler2D mytexture;\n" + "in vec2 v2UVcoords;\n" + "out vec4 outputColor;\n" + "void main()\n" + "{\n" + " outputColor = texture(mytexture, v2UVcoords);\n" + "}\n" + ); + m_nSceneMatrixLocation = glGetUniformLocation( m_unSceneProgramID, "matrix" ); + if( m_nSceneMatrixLocation == -1 ) + { + b3Printf( "Unable to find matrix uniform in scene shader\n" ); + return false; + } + + m_unControllerTransformProgramID = CompileGLShader( + "Controller", + + // vertex shader + "#version 410\n" + "uniform mat4 matrix;\n" + "layout(location = 0) in vec4 position;\n" + "layout(location = 1) in vec3 v3ColorIn;\n" + "out vec4 v4Color;\n" + "void main()\n" + "{\n" + " v4Color.xyz = v3ColorIn; v4Color.a = 1.0;\n" + " gl_Position = matrix * position;\n" + "}\n", + + // fragment shader + "#version 410\n" + "in vec4 v4Color;\n" + "out vec4 outputColor;\n" + "void main()\n" + "{\n" + " outputColor = v4Color;\n" + "}\n" + ); + m_nControllerMatrixLocation = glGetUniformLocation( m_unControllerTransformProgramID, "matrix" ); + if( m_nControllerMatrixLocation == -1 ) + { + b3Printf( "Unable to find matrix uniform in controller shader\n" ); + return false; + } + + m_unRenderModelProgramID = CompileGLShader( + "render model", + + // vertex shader + "#version 410\n" + "uniform mat4 matrix;\n" + "layout(location = 0) in vec4 position;\n" + "layout(location = 1) in vec3 v3NormalIn;\n" + "layout(location = 2) in vec2 v2TexCoordsIn;\n" + "out vec2 v2TexCoord;\n" + "void main()\n" + "{\n" + " v2TexCoord = v2TexCoordsIn;\n" + " gl_Position = matrix * vec4(position.xyz, 1);\n" + "}\n", + + //fragment shader + "#version 410 core\n" + "uniform sampler2D diffuse;\n" + "in vec2 v2TexCoord;\n" + "out vec4 outputColor;\n" + "void main()\n" + "{\n" + " outputColor = texture( diffuse, v2TexCoord);\n" + "}\n" + + ); + m_nRenderModelMatrixLocation = glGetUniformLocation( m_unRenderModelProgramID, "matrix" ); + if( m_nRenderModelMatrixLocation == -1 ) + { + b3Printf( "Unable to find matrix uniform in render model shader\n" ); + return false; + } + + m_unLensProgramID = CompileGLShader( + "Distortion", + + // vertex shader + "#version 410 core\n" + "layout(location = 0) in vec4 position;\n" + "layout(location = 1) in vec2 v2UVredIn;\n" + "layout(location = 2) in vec2 v2UVGreenIn;\n" + "layout(location = 3) in vec2 v2UVblueIn;\n" + "noperspective out vec2 v2UVred;\n" + "noperspective out vec2 v2UVgreen;\n" + "noperspective out vec2 v2UVblue;\n" + "void main()\n" + "{\n" + " v2UVred = v2UVredIn;\n" + " v2UVgreen = v2UVGreenIn;\n" + " v2UVblue = v2UVblueIn;\n" + " gl_Position = position;\n" + "}\n", + + // fragment shader + "#version 410 core\n" + "uniform sampler2D mytexture;\n" + + "noperspective in vec2 v2UVred;\n" + "noperspective in vec2 v2UVgreen;\n" + "noperspective in vec2 v2UVblue;\n" + + "out vec4 outputColor;\n" + + "void main()\n" + "{\n" + " float fBoundsCheck = ( (dot( vec2( lessThan( v2UVgreen.xy, vec2(0.05, 0.05)) ), vec2(1.0, 1.0))+dot( vec2( greaterThan( v2UVgreen.xy, vec2( 0.95, 0.95)) ), vec2(1.0, 1.0))) );\n" + " if( fBoundsCheck > 1.0 )\n" + " { outputColor = vec4( 0, 0, 0, 1.0 ); }\n" + " else\n" + " {\n" + " float red = texture(mytexture, v2UVred).x;\n" + " float green = texture(mytexture, v2UVgreen).y;\n" + " float blue = texture(mytexture, v2UVblue).z;\n" + " outputColor = vec4( red, green, blue, 1.0 ); }\n" + "}\n" + ); + + + return m_unSceneProgramID != 0 + && m_unControllerTransformProgramID != 0 + && m_unRenderModelProgramID != 0 + && m_unLensProgramID != 0; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::SetupTexturemaps() +{ + std::string sExecutableDirectory = Path_StripFilename( Path_GetExecutablePath() ); + std::string strFullPath = Path_MakeAbsolute( "../cube_texture.png", sExecutableDirectory ); + + std::vector imageRGBA; + unsigned nImageWidth, nImageHeight; + unsigned nError = lodepng::decode( imageRGBA, nImageWidth, nImageHeight, strFullPath.c_str() ); + + if ( nError != 0 ) + return false; + + glGenTextures(1, &m_iTexture ); + glBindTexture( GL_TEXTURE_2D, m_iTexture ); + + glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, nImageWidth, nImageHeight, + 0, GL_RGBA, GL_UNSIGNED_BYTE, &imageRGBA[0] ); + + glGenerateMipmap(GL_TEXTURE_2D); + + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + + GLfloat fLargest; + glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &fLargest); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, fLargest); + + glBindTexture( GL_TEXTURE_2D, 0 ); + + return ( m_iTexture != 0 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: create a sea of cubes +//----------------------------------------------------------------------------- +void CMainApplication::SetupScene() +{ + if ( !m_pHMD ) + return; + + std::vector vertdataarray; + + Matrix4 matScale; + matScale.scale( m_fScale, m_fScale, m_fScale ); + Matrix4 matTransform; + matTransform.translate( + -( (float)m_iSceneVolumeWidth * m_fScaleSpacing ) / 2.f, + -( (float)m_iSceneVolumeHeight * m_fScaleSpacing ) / 2.f, + -( (float)m_iSceneVolumeDepth * m_fScaleSpacing ) / 2.f); + + Matrix4 mat = matScale * matTransform; + + for( int z = 0; z< m_iSceneVolumeDepth; z++ ) + { + for( int y = 0; y< m_iSceneVolumeHeight; y++ ) + { + for( int x = 0; x< m_iSceneVolumeWidth; x++ ) + { + AddCubeToScene( mat, vertdataarray ); + mat = mat * Matrix4().translate( m_fScaleSpacing, 0, 0 ); + } + mat = mat * Matrix4().translate( -((float)m_iSceneVolumeWidth) * m_fScaleSpacing, m_fScaleSpacing, 0 ); + } + mat = mat * Matrix4().translate( 0, -((float)m_iSceneVolumeHeight) * m_fScaleSpacing, m_fScaleSpacing ); + } + m_uiVertcount = vertdataarray.size()/5; + + glGenVertexArrays( 1, &m_unSceneVAO ); + glBindVertexArray( m_unSceneVAO ); + + glGenBuffers( 1, &m_glSceneVertBuffer ); + glBindBuffer( GL_ARRAY_BUFFER, m_glSceneVertBuffer ); + glBufferData( GL_ARRAY_BUFFER, sizeof(float) * vertdataarray.size(), &vertdataarray[0], GL_STATIC_DRAW); + + glBindBuffer( GL_ARRAY_BUFFER, m_glSceneVertBuffer ); + + GLsizei stride = sizeof(VertexDataScene); + uintptr_t offset = 0; + + glEnableVertexAttribArray( 0 ); + glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, stride , (const void *)offset); + + offset += sizeof(Vector3); + glEnableVertexAttribArray( 1 ); + glVertexAttribPointer( 1, 2, GL_FLOAT, GL_FALSE, stride, (const void *)offset); + + glBindVertexArray( 0 ); + glDisableVertexAttribArray(0); + glDisableVertexAttribArray(1); + +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::AddCubeVertex( float fl0, float fl1, float fl2, float fl3, float fl4, std::vector &vertdata ) +{ + vertdata.push_back( fl0 ); + vertdata.push_back( fl1 ); + vertdata.push_back( fl2 ); + vertdata.push_back( fl3 ); + vertdata.push_back( fl4 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::AddCubeToScene( Matrix4 mat, std::vector &vertdata ) +{ + // Matrix4 mat( outermat.data() ); + + Vector4 A = mat * Vector4( 0, 0, 0, 1 ); + Vector4 B = mat * Vector4( 1, 0, 0, 1 ); + Vector4 C = mat * Vector4( 1, 1, 0, 1 ); + Vector4 D = mat * Vector4( 0, 1, 0, 1 ); + Vector4 E = mat * Vector4( 0, 0, 1, 1 ); + Vector4 F = mat * Vector4( 1, 0, 1, 1 ); + Vector4 G = mat * Vector4( 1, 1, 1, 1 ); + Vector4 H = mat * Vector4( 0, 1, 1, 1 ); + + // triangles instead of quads + AddCubeVertex( E.x, E.y, E.z, 0, 1, vertdata ); //Front + AddCubeVertex( F.x, F.y, F.z, 1, 1, vertdata ); + AddCubeVertex( G.x, G.y, G.z, 1, 0, vertdata ); + AddCubeVertex( G.x, G.y, G.z, 1, 0, vertdata ); + AddCubeVertex( H.x, H.y, H.z, 0, 0, vertdata ); + AddCubeVertex( E.x, E.y, E.z, 0, 1, vertdata ); + + AddCubeVertex( B.x, B.y, B.z, 0, 1, vertdata ); //Back + AddCubeVertex( A.x, A.y, A.z, 1, 1, vertdata ); + AddCubeVertex( D.x, D.y, D.z, 1, 0, vertdata ); + AddCubeVertex( D.x, D.y, D.z, 1, 0, vertdata ); + AddCubeVertex( C.x, C.y, C.z, 0, 0, vertdata ); + AddCubeVertex( B.x, B.y, B.z, 0, 1, vertdata ); + + AddCubeVertex( H.x, H.y, H.z, 0, 1, vertdata ); //Top + AddCubeVertex( G.x, G.y, G.z, 1, 1, vertdata ); + AddCubeVertex( C.x, C.y, C.z, 1, 0, vertdata ); + AddCubeVertex( C.x, C.y, C.z, 1, 0, vertdata ); + AddCubeVertex( D.x, D.y, D.z, 0, 0, vertdata ); + AddCubeVertex( H.x, H.y, H.z, 0, 1, vertdata ); + + AddCubeVertex( A.x, A.y, A.z, 0, 1, vertdata ); //Bottom + AddCubeVertex( B.x, B.y, B.z, 1, 1, vertdata ); + AddCubeVertex( F.x, F.y, F.z, 1, 0, vertdata ); + AddCubeVertex( F.x, F.y, F.z, 1, 0, vertdata ); + AddCubeVertex( E.x, E.y, E.z, 0, 0, vertdata ); + AddCubeVertex( A.x, A.y, A.z, 0, 1, vertdata ); + + AddCubeVertex( A.x, A.y, A.z, 0, 1, vertdata ); //Left + AddCubeVertex( E.x, E.y, E.z, 1, 1, vertdata ); + AddCubeVertex( H.x, H.y, H.z, 1, 0, vertdata ); + AddCubeVertex( H.x, H.y, H.z, 1, 0, vertdata ); + AddCubeVertex( D.x, D.y, D.z, 0, 0, vertdata ); + AddCubeVertex( A.x, A.y, A.z, 0, 1, vertdata ); + + AddCubeVertex( F.x, F.y, F.z, 0, 1, vertdata ); //Right + AddCubeVertex( B.x, B.y, B.z, 1, 1, vertdata ); + AddCubeVertex( C.x, C.y, C.z, 1, 0, vertdata ); + AddCubeVertex( C.x, C.y, C.z, 1, 0, vertdata ); + AddCubeVertex( G.x, G.y, G.z, 0, 0, vertdata ); + AddCubeVertex( F.x, F.y, F.z, 0, 1, vertdata ); +} + + +//----------------------------------------------------------------------------- +// Purpose: Draw all of the controllers as X/Y/Z lines +//----------------------------------------------------------------------------- +void CMainApplication::DrawControllers() +{ + // don't draw controllers if somebody else has input focus + if( m_pHMD->IsInputFocusCapturedByAnotherProcess() ) + return; + + std::vector vertdataarray; + + m_uiControllerVertcount = 0; + m_iTrackedControllerCount = 0; + + for ( vr::TrackedDeviceIndex_t unTrackedDevice = vr::k_unTrackedDeviceIndex_Hmd + 1; unTrackedDevice < vr::k_unMaxTrackedDeviceCount; ++unTrackedDevice ) + { + if ( !m_pHMD->IsTrackedDeviceConnected( unTrackedDevice ) ) + continue; + + if( m_pHMD->GetTrackedDeviceClass( unTrackedDevice ) != vr::TrackedDeviceClass_Controller ) + continue; + + m_iTrackedControllerCount += 1; + + if( !m_rTrackedDevicePose[ unTrackedDevice ].bPoseIsValid ) + continue; + + const Matrix4 & mat = m_rmat4DevicePose[unTrackedDevice]; + + Vector4 center = mat * Vector4( 0, 0, 0, 1 ); + + for ( int i = 0; i < 3; ++i ) + { + Vector3 color( 0, 0, 0 ); + Vector4 point( 0, 0, 0, 1 ); + point[i] += 0.05f; // offset in X, Y, Z + color[i] = 1.0; // R, G, B + point = mat * point; + vertdataarray.push_back( center.x ); + vertdataarray.push_back( center.y ); + vertdataarray.push_back( center.z ); + + vertdataarray.push_back( color.x ); + vertdataarray.push_back( color.y ); + vertdataarray.push_back( color.z ); + + vertdataarray.push_back( point.x ); + vertdataarray.push_back( point.y ); + vertdataarray.push_back( point.z ); + + vertdataarray.push_back( color.x ); + vertdataarray.push_back( color.y ); + vertdataarray.push_back( color.z ); + + m_uiControllerVertcount += 2; + } + + Vector4 start = mat * Vector4( 0, 0, -0.02f, 1 ); + Vector4 end = mat * Vector4( 0, 0, -39.f, 1 ); + Vector3 color( .92f, .92f, .71f ); + + vertdataarray.push_back( start.x );vertdataarray.push_back( start.y );vertdataarray.push_back( start.z ); + vertdataarray.push_back( color.x );vertdataarray.push_back( color.y );vertdataarray.push_back( color.z ); + + vertdataarray.push_back( end.x );vertdataarray.push_back( end.y );vertdataarray.push_back( end.z ); + vertdataarray.push_back( color.x );vertdataarray.push_back( color.y );vertdataarray.push_back( color.z ); + m_uiControllerVertcount += 2; + } + + // Setup the VAO the first time through. + if ( m_unControllerVAO == 0 ) + { + glGenVertexArrays( 1, &m_unControllerVAO ); + glBindVertexArray( m_unControllerVAO ); + + glGenBuffers( 1, &m_glControllerVertBuffer ); + glBindBuffer( GL_ARRAY_BUFFER, m_glControllerVertBuffer ); + + GLuint stride = 2 * 3 * sizeof( float ); + GLuint offset = 0; + + glEnableVertexAttribArray( 0 ); + glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, stride, (const void *)offset); + + offset += sizeof( Vector3 ); + glEnableVertexAttribArray( 1 ); + glVertexAttribPointer( 1, 3, GL_FLOAT, GL_FALSE, stride, (const void *)offset); + + glBindVertexArray( 0 ); + } + + glBindBuffer( GL_ARRAY_BUFFER, m_glControllerVertBuffer ); + + // set vertex data if we have some + if( vertdataarray.size() > 0 ) + { + //$ TODO: Use glBufferSubData for this... + glBufferData( GL_ARRAY_BUFFER, sizeof(float) * vertdataarray.size(), &vertdataarray[0], GL_STREAM_DRAW ); + } +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::SetupCameras() +{ + m_mat4ProjectionLeft = GetHMDMatrixProjectionEye( vr::Eye_Left ); + m_mat4ProjectionRight = GetHMDMatrixProjectionEye( vr::Eye_Right ); + m_mat4eyePosLeft = GetHMDMatrixPoseEye( vr::Eye_Left ); + m_mat4eyePosRight = GetHMDMatrixPoseEye( vr::Eye_Right ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::CreateFrameBuffer( int nWidth, int nHeight, FramebufferDesc &framebufferDesc ) +{ + glGenFramebuffers(1, &framebufferDesc.m_nRenderFramebufferId ); + glBindFramebuffer(GL_FRAMEBUFFER, framebufferDesc.m_nRenderFramebufferId); + + glGenRenderbuffers(1, &framebufferDesc.m_nDepthBufferId); + glBindRenderbuffer(GL_RENDERBUFFER, framebufferDesc.m_nDepthBufferId); + glRenderbufferStorageMultisample(GL_RENDERBUFFER, 4, GL_DEPTH_COMPONENT, nWidth, nHeight ); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, framebufferDesc.m_nDepthBufferId ); + + glGenTextures(1, &framebufferDesc.m_nRenderTextureId ); + glBindTexture(GL_TEXTURE_2D_MULTISAMPLE, framebufferDesc.m_nRenderTextureId ); + glTexImage2DMultisample(GL_TEXTURE_2D_MULTISAMPLE, 4, GL_RGBA8, nWidth, nHeight, true); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D_MULTISAMPLE, framebufferDesc.m_nRenderTextureId, 0); + + glGenFramebuffers(1, &framebufferDesc.m_nResolveFramebufferId ); + glBindFramebuffer(GL_FRAMEBUFFER, framebufferDesc.m_nResolveFramebufferId); + + glGenTextures(1, &framebufferDesc.m_nResolveTextureId ); + glBindTexture(GL_TEXTURE_2D, framebufferDesc.m_nResolveTextureId ); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, nWidth, nHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, framebufferDesc.m_nResolveTextureId, 0); + + // check FBO status + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + if (status != GL_FRAMEBUFFER_COMPLETE) + { + return false; + } + + glBindFramebuffer( GL_FRAMEBUFFER, 0 ); + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +bool CMainApplication::SetupStereoRenderTargets() +{ + if ( !m_pHMD ) + return false; + + m_pHMD->GetRecommendedRenderTargetSize( &m_nRenderWidth, &m_nRenderHeight ); + + CreateFrameBuffer( m_nRenderWidth, m_nRenderHeight, leftEyeDesc ); + CreateFrameBuffer( m_nRenderWidth, m_nRenderHeight, rightEyeDesc ); + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::SetupDistortion() +{ + if ( !m_pHMD ) + return; + + GLushort m_iLensGridSegmentCountH = 43; + GLushort m_iLensGridSegmentCountV = 43; + + float w = (float)( 1.0/float(m_iLensGridSegmentCountH-1)); + float h = (float)( 1.0/float(m_iLensGridSegmentCountV-1)); + + float u, v = 0; + + std::vector vVerts(0); + VertexDataLens vert; + + //left eye distortion verts + float Xoffset = -1; + for( int y=0; yComputeDistortion(vr::Eye_Left, u, v); + + vert.texCoordRed = Vector2(dc0.rfRed[0], 1 - dc0.rfRed[1]); + vert.texCoordGreen = Vector2(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); + vert.texCoordBlue = Vector2(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); + + vVerts.push_back( vert ); + } + } + + //right eye distortion verts + Xoffset = 0; + for( int y=0; yComputeDistortion( vr::Eye_Right, u, v ); + + vert.texCoordRed = Vector2(dc0.rfRed[0], 1 - dc0.rfRed[1]); + vert.texCoordGreen = Vector2(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); + vert.texCoordBlue = Vector2(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); + + vVerts.push_back( vert ); + } + } + + std::vector vIndices; + GLushort a,b,c,d; + + GLushort offset = 0; + for( GLushort y=0; ym_instancingRenderer->init(); + + + + // Left Eye + { + Matrix4 viewMatLeft = m_mat4eyePosLeft * m_mat4HMDPose; + m_app->m_instancingRenderer->getActiveCamera()->setVRCamera(viewMatLeft.get(),m_mat4ProjectionLeft.get()); + m_app->m_instancingRenderer->updateCamera(); + } + + glBindFramebuffer( GL_FRAMEBUFFER, leftEyeDesc.m_nRenderFramebufferId ); + glViewport(0, 0, m_nRenderWidth, m_nRenderHeight ); + + + + m_app->m_window->startRendering(); + RenderScene( vr::Eye_Left ); + + m_app->drawGrid(); + sExample->stepSimulation(1./60.); + sExample->renderScene(); + + + + m_app->m_instancingRenderer->setRenderFrameBuffer((unsigned int)leftEyeDesc.m_nRenderFramebufferId); + + m_app->m_instancingRenderer->renderScene(); + + glBindFramebuffer( GL_FRAMEBUFFER, 0 ); + + glDisable( GL_MULTISAMPLE ); + + glBindFramebuffer(GL_READ_FRAMEBUFFER, leftEyeDesc.m_nRenderFramebufferId); + glBindFramebuffer(GL_DRAW_FRAMEBUFFER, leftEyeDesc.m_nResolveFramebufferId ); + + glBlitFramebuffer( 0, 0, m_nRenderWidth, m_nRenderHeight, 0, 0, m_nRenderWidth, m_nRenderHeight, + GL_COLOR_BUFFER_BIT, + GL_LINEAR ); + + glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); + glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0 ); + + glEnable( GL_MULTISAMPLE ); + + // Right Eye + + { + Matrix4 viewMatRight = m_mat4eyePosRight * m_mat4HMDPose; + m_app->m_instancingRenderer->getActiveCamera()->setVRCamera(viewMatRight.get(),m_mat4ProjectionRight.get()); + m_app->m_instancingRenderer->updateCamera(); + } + + glBindFramebuffer( GL_FRAMEBUFFER, rightEyeDesc.m_nRenderFramebufferId ); + glViewport(0, 0, m_nRenderWidth, m_nRenderHeight ); + + m_app->m_window->startRendering(); + RenderScene( vr::Eye_Right ); + + m_app->drawGrid(); + m_app->m_instancingRenderer->setRenderFrameBuffer((unsigned int)rightEyeDesc.m_nRenderFramebufferId); + + m_app->m_renderer->renderScene(); + + glBindFramebuffer( GL_FRAMEBUFFER, 0 ); + + glDisable( GL_MULTISAMPLE ); + + glBindFramebuffer(GL_READ_FRAMEBUFFER, rightEyeDesc.m_nRenderFramebufferId ); + glBindFramebuffer(GL_DRAW_FRAMEBUFFER, rightEyeDesc.m_nResolveFramebufferId ); + + glBlitFramebuffer( 0, 0, m_nRenderWidth, m_nRenderHeight, 0, 0, m_nRenderWidth, m_nRenderHeight, + GL_COLOR_BUFFER_BIT, + GL_LINEAR ); + + glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); + glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::RenderScene( vr::Hmd_Eye nEye ) +{ + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + glEnable(GL_DEPTH_TEST); + + if( m_bShowCubes ) + { + glUseProgram( m_unSceneProgramID ); + glUniformMatrix4fv( m_nSceneMatrixLocation, 1, GL_FALSE, GetCurrentViewProjectionMatrix( nEye ).get() ); + glBindVertexArray( m_unSceneVAO ); + glBindTexture( GL_TEXTURE_2D, m_iTexture ); + glDrawArrays( GL_TRIANGLES, 0, m_uiVertcount ); + glBindVertexArray( 0 ); + } + + bool bIsInputCapturedByAnotherProcess = m_pHMD->IsInputFocusCapturedByAnotherProcess(); + + if( !bIsInputCapturedByAnotherProcess ) + { + // draw the controller axis lines + glUseProgram( m_unControllerTransformProgramID ); + glUniformMatrix4fv( m_nControllerMatrixLocation, 1, GL_FALSE, GetCurrentViewProjectionMatrix( nEye ).get() ); + glBindVertexArray( m_unControllerVAO ); + glDrawArrays( GL_LINES, 0, m_uiControllerVertcount ); + glBindVertexArray( 0 ); + } + + // ----- Render Model rendering ----- + glUseProgram( m_unRenderModelProgramID ); + + for( uint32_t unTrackedDevice = 0; unTrackedDevice < vr::k_unMaxTrackedDeviceCount; unTrackedDevice++ ) + { + if( !m_rTrackedDeviceToRenderModel[ unTrackedDevice ] || !m_rbShowTrackedDevice[ unTrackedDevice ] ) + continue; + + const vr::TrackedDevicePose_t & pose = m_rTrackedDevicePose[ unTrackedDevice ]; + if( !pose.bPoseIsValid ) + continue; + + if( bIsInputCapturedByAnotherProcess && m_pHMD->GetTrackedDeviceClass( unTrackedDevice ) == vr::TrackedDeviceClass_Controller ) + continue; + + const Matrix4 & matDeviceToTracking = m_rmat4DevicePose[ unTrackedDevice ]; + Matrix4 matMVP = GetCurrentViewProjectionMatrix( nEye ) * matDeviceToTracking; + glUniformMatrix4fv( m_nRenderModelMatrixLocation, 1, GL_FALSE, matMVP.get() ); + + m_rTrackedDeviceToRenderModel[ unTrackedDevice ]->Draw(); + } + + glUseProgram( 0 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::RenderDistortion() +{ + glDisable(GL_DEPTH_TEST); + glViewport( 0, 0, m_nWindowWidth, m_nWindowHeight ); + + glBindVertexArray( m_unLensVAO ); + glUseProgram( m_unLensProgramID ); + + //render left lens (first half of index array ) + glBindTexture(GL_TEXTURE_2D, leftEyeDesc.m_nResolveTextureId ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, 0 ); + + //render right lens (second half of index array ) + glBindTexture(GL_TEXTURE_2D, rightEyeDesc.m_nResolveTextureId ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, (const void *)(m_uiIndexSize) ); + + glBindVertexArray( 0 ); + glUseProgram( 0 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +Matrix4 CMainApplication::GetHMDMatrixProjectionEye( vr::Hmd_Eye nEye ) +{ + if ( !m_pHMD ) + return Matrix4(); + + vr::HmdMatrix44_t mat = m_pHMD->GetProjectionMatrix( nEye, m_fNearClip, m_fFarClip, vr::API_OpenGL); + + return Matrix4( + mat.m[0][0], mat.m[1][0], mat.m[2][0], mat.m[3][0], + mat.m[0][1], mat.m[1][1], mat.m[2][1], mat.m[3][1], + mat.m[0][2], mat.m[1][2], mat.m[2][2], mat.m[3][2], + mat.m[0][3], mat.m[1][3], mat.m[2][3], mat.m[3][3] + ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +Matrix4 CMainApplication::GetHMDMatrixPoseEye( vr::Hmd_Eye nEye ) +{ + if ( !m_pHMD ) + return Matrix4(); + + vr::HmdMatrix34_t matEyeRight = m_pHMD->GetEyeToHeadTransform( nEye ); + Matrix4 matrixObj( + matEyeRight.m[0][0], matEyeRight.m[1][0], matEyeRight.m[2][0], 0.0, + matEyeRight.m[0][1], matEyeRight.m[1][1], matEyeRight.m[2][1], 0.0, + matEyeRight.m[0][2], matEyeRight.m[1][2], matEyeRight.m[2][2], 0.0, + matEyeRight.m[0][3], matEyeRight.m[1][3], matEyeRight.m[2][3], 1.0f + ); + + return matrixObj.invert(); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +Matrix4 CMainApplication::GetCurrentViewProjectionMatrix( vr::Hmd_Eye nEye ) +{ + Matrix4 matMVP; + if( nEye == vr::Eye_Left ) + { + matMVP = m_mat4ProjectionLeft * m_mat4eyePosLeft * m_mat4HMDPose; + } + else if( nEye == vr::Eye_Right ) + { + matMVP = m_mat4ProjectionRight * m_mat4eyePosRight * m_mat4HMDPose; + } + + return matMVP; +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +void CMainApplication::UpdateHMDMatrixPose() +{ + if ( !m_pHMD ) + return; + + vr::VRCompositor()->WaitGetPoses(m_rTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0 ); + + m_iValidPoseCount = 0; + m_strPoseClasses = ""; + for ( int nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; ++nDevice ) + { + if ( m_rTrackedDevicePose[nDevice].bPoseIsValid ) + { + m_iValidPoseCount++; + m_rmat4DevicePose[nDevice] = ConvertSteamVRMatrixToMatrix4( m_rTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking ); + if (m_rDevClassChar[nDevice]==0) + { + switch (m_pHMD->GetTrackedDeviceClass(nDevice)) + { + case vr::TrackedDeviceClass_Controller: m_rDevClassChar[nDevice] = 'C'; break; + case vr::TrackedDeviceClass_HMD: m_rDevClassChar[nDevice] = 'H'; break; + case vr::TrackedDeviceClass_Invalid: m_rDevClassChar[nDevice] = 'I'; break; + case vr::TrackedDeviceClass_Other: m_rDevClassChar[nDevice] = 'O'; break; + case vr::TrackedDeviceClass_TrackingReference: m_rDevClassChar[nDevice] = 'T'; break; + default: m_rDevClassChar[nDevice] = '?'; break; + } + } + m_strPoseClasses += m_rDevClassChar[nDevice]; + } + } + + if ( m_rTrackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].bPoseIsValid ) + { + m_mat4HMDPose = m_rmat4DevicePose[vr::k_unTrackedDeviceIndex_Hmd].invert(); + } +} + + +//----------------------------------------------------------------------------- +// Purpose: Finds a render model we've already loaded or loads a new one +//----------------------------------------------------------------------------- +CGLRenderModel *CMainApplication::FindOrLoadRenderModel( const char *pchRenderModelName ) +{ + CGLRenderModel *pRenderModel = NULL; + for( std::vector< CGLRenderModel * >::iterator i = m_vecRenderModels.begin(); i != m_vecRenderModels.end(); i++ ) + { + if( !stricmp( (*i)->GetName().c_str(), pchRenderModelName ) ) + { + pRenderModel = *i; + break; + } + } + + // load the model if we didn't find one + if( !pRenderModel ) + { + vr::RenderModel_t *pModel; + vr::EVRRenderModelError error; + while ( 1 ) + { + error = vr::VRRenderModels()->LoadRenderModel_Async( pchRenderModelName, &pModel ); + if ( error != vr::VRRenderModelError_Loading ) + break; + + ThreadSleep( 1 ); + } + + if ( error != vr::VRRenderModelError_None ) + { + b3Printf( "Unable to load render model %s - %s\n", pchRenderModelName, vr::VRRenderModels()->GetRenderModelErrorNameFromEnum( error ) ); + return NULL; // move on to the next tracked device + } + + vr::RenderModel_TextureMap_t *pTexture; + while ( 1 ) + { + error = vr::VRRenderModels()->LoadTexture_Async( pModel->diffuseTextureId, &pTexture ); + if ( error != vr::VRRenderModelError_Loading ) + break; + + ThreadSleep( 1 ); + } + + if ( error != vr::VRRenderModelError_None ) + { + b3Printf( "Unable to load render texture id:%d for render model %s\n", pModel->diffuseTextureId, pchRenderModelName ); + vr::VRRenderModels()->FreeRenderModel( pModel ); + return NULL; // move on to the next tracked device + } + + pRenderModel = new CGLRenderModel( pchRenderModelName ); + if ( !pRenderModel->BInit( *pModel, *pTexture ) ) + { + b3Printf( "Unable to create GL model from render model %s\n", pchRenderModelName ); + delete pRenderModel; + pRenderModel = NULL; + } + else + { + m_vecRenderModels.push_back( pRenderModel ); + } + vr::VRRenderModels()->FreeRenderModel( pModel ); + vr::VRRenderModels()->FreeTexture( pTexture ); + } + return pRenderModel; +} + + +//----------------------------------------------------------------------------- +// Purpose: Create/destroy GL a Render Model for a single tracked device +//----------------------------------------------------------------------------- +void CMainApplication::SetupRenderModelForTrackedDevice( vr::TrackedDeviceIndex_t unTrackedDeviceIndex ) +{ + if( unTrackedDeviceIndex >= vr::k_unMaxTrackedDeviceCount ) + return; + + // try to find a model we've already set up + std::string sRenderModelName = GetTrackedDeviceString( m_pHMD, unTrackedDeviceIndex, vr::Prop_RenderModelName_String ); + CGLRenderModel *pRenderModel = FindOrLoadRenderModel( sRenderModelName.c_str() ); + if( !pRenderModel ) + { + std::string sTrackingSystemName = GetTrackedDeviceString( m_pHMD, unTrackedDeviceIndex, vr::Prop_TrackingSystemName_String ); + b3Printf( "Unable to load render model for tracked device %d (%s.%s)", unTrackedDeviceIndex, sTrackingSystemName.c_str(), sRenderModelName.c_str() ); + } + else + { + m_rTrackedDeviceToRenderModel[ unTrackedDeviceIndex ] = pRenderModel; + m_rbShowTrackedDevice[ unTrackedDeviceIndex ] = true; + } +} + + +//----------------------------------------------------------------------------- +// Purpose: Create/destroy GL Render Models +//----------------------------------------------------------------------------- +void CMainApplication::SetupRenderModels() +{ + memset( m_rTrackedDeviceToRenderModel, 0, sizeof( m_rTrackedDeviceToRenderModel ) ); + + if( !m_pHMD ) + return; + + for( uint32_t unTrackedDevice = vr::k_unTrackedDeviceIndex_Hmd + 1; unTrackedDevice < vr::k_unMaxTrackedDeviceCount; unTrackedDevice++ ) + { + if( !m_pHMD->IsTrackedDeviceConnected( unTrackedDevice ) ) + continue; + + SetupRenderModelForTrackedDevice( unTrackedDevice ); + } + +} + + +//----------------------------------------------------------------------------- +// Purpose: Converts a SteamVR matrix to our local matrix class +//----------------------------------------------------------------------------- +Matrix4 CMainApplication::ConvertSteamVRMatrixToMatrix4( const vr::HmdMatrix34_t &matPose ) +{ + Matrix4 matrixObj( + matPose.m[0][0], matPose.m[1][0], matPose.m[2][0], 0.0, + matPose.m[0][1], matPose.m[1][1], matPose.m[2][1], 0.0, + matPose.m[0][2], matPose.m[1][2], matPose.m[2][2], 0.0, + matPose.m[0][3], matPose.m[1][3], matPose.m[2][3], 1.0f + ); + return matrixObj; +} + + +//----------------------------------------------------------------------------- +// Purpose: Create/destroy GL Render Models +//----------------------------------------------------------------------------- +CGLRenderModel::CGLRenderModel( const std::string & sRenderModelName ) + : m_sModelName( sRenderModelName ) +{ + m_glIndexBuffer = 0; + m_glVertArray = 0; + m_glVertBuffer = 0; + m_glTexture = 0; +} + + +CGLRenderModel::~CGLRenderModel() +{ + Cleanup(); +} + + +//----------------------------------------------------------------------------- +// Purpose: Allocates and populates the GL resources for a render model +//----------------------------------------------------------------------------- +bool CGLRenderModel::BInit( const vr::RenderModel_t & vrModel, const vr::RenderModel_TextureMap_t & vrDiffuseTexture ) +{ + // create and bind a VAO to hold state for this model + glGenVertexArrays( 1, &m_glVertArray ); + glBindVertexArray( m_glVertArray ); + + // Populate a vertex buffer + glGenBuffers( 1, &m_glVertBuffer ); + glBindBuffer( GL_ARRAY_BUFFER, m_glVertBuffer ); + glBufferData( GL_ARRAY_BUFFER, sizeof( vr::RenderModel_Vertex_t ) * vrModel.unVertexCount, vrModel.rVertexData, GL_STATIC_DRAW ); + + // Identify the components in the vertex buffer + glEnableVertexAttribArray( 0 ); + glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, sizeof( vr::RenderModel_Vertex_t ), (void *)offsetof( vr::RenderModel_Vertex_t, vPosition ) ); + glEnableVertexAttribArray( 1 ); + glVertexAttribPointer( 1, 3, GL_FLOAT, GL_FALSE, sizeof( vr::RenderModel_Vertex_t ), (void *)offsetof( vr::RenderModel_Vertex_t, vNormal ) ); + glEnableVertexAttribArray( 2 ); + glVertexAttribPointer( 2, 2, GL_FLOAT, GL_FALSE, sizeof( vr::RenderModel_Vertex_t ), (void *)offsetof( vr::RenderModel_Vertex_t, rfTextureCoord ) ); + + // Create and populate the index buffer + glGenBuffers( 1, &m_glIndexBuffer ); + glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, m_glIndexBuffer ); + glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( uint16_t ) * vrModel.unTriangleCount * 3, vrModel.rIndexData, GL_STATIC_DRAW ); + + glBindVertexArray( 0 ); + + // create and populate the texture + glGenTextures(1, &m_glTexture ); + glBindTexture( GL_TEXTURE_2D, m_glTexture ); + + glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, vrDiffuseTexture.unWidth, vrDiffuseTexture.unHeight, + 0, GL_RGBA, GL_UNSIGNED_BYTE, vrDiffuseTexture.rubTextureMapData ); + + // If this renders black ask McJohn what's wrong. + glGenerateMipmap(GL_TEXTURE_2D); + + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + + GLfloat fLargest; + glGetFloatv( GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &fLargest ); + glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, fLargest ); + + glBindTexture( GL_TEXTURE_2D, 0 ); + + m_unVertexCount = vrModel.unTriangleCount * 3; + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: Frees the GL resources for a render model +//----------------------------------------------------------------------------- +void CGLRenderModel::Cleanup() +{ + if( m_glVertBuffer ) + { + glDeleteBuffers(1, &m_glIndexBuffer); + glDeleteVertexArrays( 1, &m_glVertArray ); + glDeleteBuffers(1, &m_glVertBuffer); + m_glIndexBuffer = 0; + m_glVertArray = 0; + m_glVertBuffer = 0; + } +} + + +//----------------------------------------------------------------------------- +// Purpose: Draws the render model +//----------------------------------------------------------------------------- +void CGLRenderModel::Draw() +{ + glBindVertexArray( m_glVertArray ); + + glActiveTexture( GL_TEXTURE0 ); + glBindTexture( GL_TEXTURE_2D, m_glTexture ); + + glDrawElements( GL_TRIANGLES, m_unVertexCount, GL_UNSIGNED_SHORT, 0 ); + + glBindVertexArray( 0 ); +} + + +//----------------------------------------------------------------------------- +// Purpose: +//----------------------------------------------------------------------------- +int main(int argc, char *argv[]) +{ + CMainApplication *pMainApplication = new CMainApplication( argc, argv ); + + if (!pMainApplication->BInit()) + { + pMainApplication->Shutdown(); + return 1; + } + + pMainApplication->RunMainLoop(); + + pMainApplication->Shutdown(); + + return 0; +} +#endif //BT_ENABLE_VR \ No newline at end of file diff --git a/examples/ThirdPartyLibs/openvr/bin/linux32/libopenvr_api.so b/examples/ThirdPartyLibs/openvr/bin/linux32/libopenvr_api.so new file mode 100644 index 000000000..a25054508 Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/bin/linux32/libopenvr_api.so differ diff --git a/examples/ThirdPartyLibs/openvr/bin/linux64/libopenvr_api.so b/examples/ThirdPartyLibs/openvr/bin/linux64/libopenvr_api.so new file mode 100644 index 000000000..c3c6d47ca Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/bin/linux64/libopenvr_api.so differ diff --git a/examples/ThirdPartyLibs/openvr/bin/osx32/libopenvr_api.dylib b/examples/ThirdPartyLibs/openvr/bin/osx32/libopenvr_api.dylib new file mode 100644 index 000000000..b296e20b8 Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/bin/osx32/libopenvr_api.dylib differ diff --git a/examples/ThirdPartyLibs/openvr/bin/win32/openvr_api.dll b/examples/ThirdPartyLibs/openvr/bin/win32/openvr_api.dll new file mode 100644 index 000000000..d2b37d56b Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/bin/win32/openvr_api.dll differ diff --git a/examples/ThirdPartyLibs/openvr/bin/win64/openvr_api.dll b/examples/ThirdPartyLibs/openvr/bin/win64/openvr_api.dll new file mode 100644 index 000000000..d13791a39 Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/bin/win64/openvr_api.dll differ diff --git a/examples/ThirdPartyLibs/openvr/headers/openvr.h b/examples/ThirdPartyLibs/openvr/headers/openvr.h new file mode 100644 index 000000000..df727a3b5 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/headers/openvr.h @@ -0,0 +1,3227 @@ +#pragma once + +// openvr.h +//========= Copyright Valve Corporation ============// +// Dynamically generated file. Do not modify this file directly. + +#ifndef _OPENVR_API +#define _OPENVR_API + +#include + + + +// vrtypes.h +#ifndef _INCLUDE_VRTYPES_H +#define _INCLUDE_VRTYPES_H + +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +typedef void* glSharedTextureHandle_t; +typedef int32_t glInt_t; +typedef uint32_t glUInt_t; + +// right-handed system +// +y is up +// +x is to the right +// -z is going away from you +// Distance unit is meters +struct HmdMatrix34_t +{ + float m[3][4]; +}; + +struct HmdMatrix44_t +{ + float m[4][4]; +}; + +struct HmdVector3_t +{ + float v[3]; +}; + +struct HmdVector4_t +{ + float v[4]; +}; + +struct HmdVector3d_t +{ + double v[3]; +}; + +struct HmdVector2_t +{ + float v[2]; +}; + +struct HmdQuaternion_t +{ + double w, x, y, z; +}; + +struct HmdColor_t +{ + float r, g, b, a; +}; + +struct HmdQuad_t +{ + HmdVector3_t vCorners[ 4 ]; +}; + +struct HmdRect2_t +{ + HmdVector2_t vTopLeft; + HmdVector2_t vBottomRight; +}; + +/** Used to return the post-distortion UVs for each color channel. +* UVs range from 0 to 1 with 0,0 in the upper left corner of the +* source render target. The 0,0 to 1,1 range covers a single eye. */ +struct DistortionCoordinates_t +{ + float rfRed[2]; + float rfGreen[2]; + float rfBlue[2]; +}; + +enum EVREye +{ + Eye_Left = 0, + Eye_Right = 1 +}; + +enum EGraphicsAPIConvention +{ + API_DirectX = 0, // Normalized Z goes from 0 at the viewer to 1 at the far clip plane + API_OpenGL = 1, // Normalized Z goes from 1 at the viewer to -1 at the far clip plane +}; + +enum EColorSpace +{ + ColorSpace_Auto = 0, // Assumes 'gamma' for 8-bit per component formats, otherwise 'linear'. This mirrors the DXGI formats which have _SRGB variants. + ColorSpace_Gamma = 1, // Texture data can be displayed directly on the display without any conversion (a.k.a. display native format). + ColorSpace_Linear = 2, // Same as gamma but has been converted to a linear representation using DXGI's sRGB conversion algorithm. +}; + +struct Texture_t +{ + void* handle; // Native d3d texture pointer or GL texture id. + EGraphicsAPIConvention eType; + EColorSpace eColorSpace; +}; + +enum ETrackingResult +{ + TrackingResult_Uninitialized = 1, + + TrackingResult_Calibrating_InProgress = 100, + TrackingResult_Calibrating_OutOfRange = 101, + + TrackingResult_Running_OK = 200, + TrackingResult_Running_OutOfRange = 201, +}; + +static const uint32_t k_unTrackingStringSize = 32; +static const uint32_t k_unMaxDriverDebugResponseSize = 32768; + +/** Used to pass device IDs to API calls */ +typedef uint32_t TrackedDeviceIndex_t; +static const uint32_t k_unTrackedDeviceIndex_Hmd = 0; +static const uint32_t k_unMaxTrackedDeviceCount = 16; +static const uint32_t k_unTrackedDeviceIndexOther = 0xFFFFFFFE; +static const uint32_t k_unTrackedDeviceIndexInvalid = 0xFFFFFFFF; + +/** Describes what kind of object is being tracked at a given ID */ +enum ETrackedDeviceClass +{ + TrackedDeviceClass_Invalid = 0, // the ID was not valid. + TrackedDeviceClass_HMD = 1, // Head-Mounted Displays + TrackedDeviceClass_Controller = 2, // Tracked controllers + TrackedDeviceClass_TrackingReference = 4, // Camera and base stations that serve as tracking reference points + + TrackedDeviceClass_Other = 1000, +}; + + +/** Describes what specific role associated with a tracked device */ +enum ETrackedControllerRole +{ + TrackedControllerRole_Invalid = 0, // Invalid value for controller type + TrackedControllerRole_LeftHand = 1, // Tracked device associated with the left hand + TrackedControllerRole_RightHand = 2, // Tracked device associated with the right hand +}; + + +/** describes a single pose for a tracked object */ +struct TrackedDevicePose_t +{ + HmdMatrix34_t mDeviceToAbsoluteTracking; + HmdVector3_t vVelocity; // velocity in tracker space in m/s + HmdVector3_t vAngularVelocity; // angular velocity in radians/s (?) + ETrackingResult eTrackingResult; + bool bPoseIsValid; + + // This indicates that there is a device connected for this spot in the pose array. + // It could go from true to false if the user unplugs the device. + bool bDeviceIsConnected; +}; + +/** Identifies which style of tracking origin the application wants to use +* for the poses it is requesting */ +enum ETrackingUniverseOrigin +{ + TrackingUniverseSeated = 0, // Poses are provided relative to the seated zero pose + TrackingUniverseStanding = 1, // Poses are provided relative to the safe bounds configured by the user + TrackingUniverseRawAndUncalibrated = 2, // Poses are provided in the coordinate system defined by the driver. You probably don't want this one. +}; + + +/** Each entry in this enum represents a property that can be retrieved about a +* tracked device. Many fields are only valid for one ETrackedDeviceClass. */ +enum ETrackedDeviceProperty +{ + // general properties that apply to all device classes + Prop_TrackingSystemName_String = 1000, + Prop_ModelNumber_String = 1001, + Prop_SerialNumber_String = 1002, + Prop_RenderModelName_String = 1003, + Prop_WillDriftInYaw_Bool = 1004, + Prop_ManufacturerName_String = 1005, + Prop_TrackingFirmwareVersion_String = 1006, + Prop_HardwareRevision_String = 1007, + Prop_AllWirelessDongleDescriptions_String = 1008, + Prop_ConnectedWirelessDongle_String = 1009, + Prop_DeviceIsWireless_Bool = 1010, + Prop_DeviceIsCharging_Bool = 1011, + Prop_DeviceBatteryPercentage_Float = 1012, // 0 is empty, 1 is full + Prop_StatusDisplayTransform_Matrix34 = 1013, + Prop_Firmware_UpdateAvailable_Bool = 1014, + Prop_Firmware_ManualUpdate_Bool = 1015, + Prop_Firmware_ManualUpdateURL_String = 1016, + Prop_HardwareRevision_Uint64 = 1017, + Prop_FirmwareVersion_Uint64 = 1018, + Prop_FPGAVersion_Uint64 = 1019, + Prop_VRCVersion_Uint64 = 1020, + Prop_RadioVersion_Uint64 = 1021, + Prop_DongleVersion_Uint64 = 1022, + Prop_BlockServerShutdown_Bool = 1023, + Prop_CanUnifyCoordinateSystemWithHmd_Bool = 1024, + Prop_ContainsProximitySensor_Bool = 1025, + Prop_DeviceProvidesBatteryStatus_Bool = 1026, + Prop_DeviceCanPowerOff_Bool = 1027, + Prop_Firmware_ProgrammingTarget_String = 1028, + Prop_DeviceClass_Int32 = 1029, + Prop_HasCamera_Bool = 1030, + Prop_DriverVersion_String = 1031, + Prop_Firmware_ForceUpdateRequired_Bool = 1032, + + // Properties that are unique to TrackedDeviceClass_HMD + Prop_ReportsTimeSinceVSync_Bool = 2000, + Prop_SecondsFromVsyncToPhotons_Float = 2001, + Prop_DisplayFrequency_Float = 2002, + Prop_UserIpdMeters_Float = 2003, + Prop_CurrentUniverseId_Uint64 = 2004, + Prop_PreviousUniverseId_Uint64 = 2005, + Prop_DisplayFirmwareVersion_Uint64 = 2006, + Prop_IsOnDesktop_Bool = 2007, + Prop_DisplayMCType_Int32 = 2008, + Prop_DisplayMCOffset_Float = 2009, + Prop_DisplayMCScale_Float = 2010, + Prop_EdidVendorID_Int32 = 2011, + Prop_DisplayMCImageLeft_String = 2012, + Prop_DisplayMCImageRight_String = 2013, + Prop_DisplayGCBlackClamp_Float = 2014, + Prop_EdidProductID_Int32 = 2015, + Prop_CameraToHeadTransform_Matrix34 = 2016, + Prop_DisplayGCType_Int32 = 2017, + Prop_DisplayGCOffset_Float = 2018, + Prop_DisplayGCScale_Float = 2019, + Prop_DisplayGCPrescale_Float = 2020, + Prop_DisplayGCImage_String = 2021, + Prop_LensCenterLeftU_Float = 2022, + Prop_LensCenterLeftV_Float = 2023, + Prop_LensCenterRightU_Float = 2024, + Prop_LensCenterRightV_Float = 2025, + Prop_UserHeadToEyeDepthMeters_Float = 2026, + Prop_CameraFirmwareVersion_Uint64 = 2027, + Prop_CameraFirmwareDescription_String = 2028, + Prop_DisplayFPGAVersion_Uint64 = 2029, + Prop_DisplayBootloaderVersion_Uint64 = 2030, + Prop_DisplayHardwareVersion_Uint64 = 2031, + Prop_AudioFirmwareVersion_Uint64 = 2032, + Prop_CameraCompatibilityMode_Int32 = 2033, + Prop_ScreenshotHorizontalFieldOfViewDegrees_Float = 2034, + Prop_ScreenshotVerticalFieldOfViewDegrees_Float = 2035, + Prop_DisplaySuppressed_Bool = 2036, + + // Properties that are unique to TrackedDeviceClass_Controller + Prop_AttachedDeviceId_String = 3000, + Prop_SupportedButtons_Uint64 = 3001, + Prop_Axis0Type_Int32 = 3002, // Return value is of type EVRControllerAxisType + Prop_Axis1Type_Int32 = 3003, // Return value is of type EVRControllerAxisType + Prop_Axis2Type_Int32 = 3004, // Return value is of type EVRControllerAxisType + Prop_Axis3Type_Int32 = 3005, // Return value is of type EVRControllerAxisType + Prop_Axis4Type_Int32 = 3006, // Return value is of type EVRControllerAxisType + + // Properties that are unique to TrackedDeviceClass_TrackingReference + Prop_FieldOfViewLeftDegrees_Float = 4000, + Prop_FieldOfViewRightDegrees_Float = 4001, + Prop_FieldOfViewTopDegrees_Float = 4002, + Prop_FieldOfViewBottomDegrees_Float = 4003, + Prop_TrackingRangeMinimumMeters_Float = 4004, + Prop_TrackingRangeMaximumMeters_Float = 4005, + Prop_ModeLabel_String = 4006, + + // Vendors are free to expose private debug data in this reserved region + Prop_VendorSpecific_Reserved_Start = 10000, + Prop_VendorSpecific_Reserved_End = 10999, +}; + +/** No string property will ever be longer than this length */ +static const uint32_t k_unMaxPropertyStringSize = 32 * 1024; + +/** Used to return errors that occur when reading properties. */ +enum ETrackedPropertyError +{ + TrackedProp_Success = 0, + TrackedProp_WrongDataType = 1, + TrackedProp_WrongDeviceClass = 2, + TrackedProp_BufferTooSmall = 3, + TrackedProp_UnknownProperty = 4, + TrackedProp_InvalidDevice = 5, + TrackedProp_CouldNotContactServer = 6, + TrackedProp_ValueNotProvidedByDevice = 7, + TrackedProp_StringExceedsMaximumLength = 8, + TrackedProp_NotYetAvailable = 9, // The property value isn't known yet, but is expected soon. Call again later. +}; + +/** Allows the application to control what part of the provided texture will be used in the +* frame buffer. */ +struct VRTextureBounds_t +{ + float uMin, vMin; + float uMax, vMax; +}; + + +/** Allows the application to control how scene textures are used by the compositor when calling Submit. */ +enum EVRSubmitFlags +{ + // Simple render path. App submits rendered left and right eye images with no lens distortion correction applied. + Submit_Default = 0x00, + + // App submits final left and right eye images with lens distortion already applied (lens distortion makes the images appear + // barrel distorted with chromatic aberration correction applied). The app would have used the data returned by + // vr::IVRSystem::ComputeDistortion() to apply the correct distortion to the rendered images before calling Submit(). + Submit_LensDistortionAlreadyApplied = 0x01, + + // If the texture pointer passed in is actually a renderbuffer (e.g. for MSAA in OpenGL) then set this flag. + Submit_GlRenderBuffer = 0x02, +}; + + +/** Status of the overall system or tracked objects */ +enum EVRState +{ + VRState_Undefined = -1, + VRState_Off = 0, + VRState_Searching = 1, + VRState_Searching_Alert = 2, + VRState_Ready = 3, + VRState_Ready_Alert = 4, + VRState_NotReady = 5, + VRState_Standby = 6, +}; + +/** The types of events that could be posted (and what the parameters mean for each event type) */ +enum EVREventType +{ + VREvent_None = 0, + + VREvent_TrackedDeviceActivated = 100, + VREvent_TrackedDeviceDeactivated = 101, + VREvent_TrackedDeviceUpdated = 102, + VREvent_TrackedDeviceUserInteractionStarted = 103, + VREvent_TrackedDeviceUserInteractionEnded = 104, + VREvent_IpdChanged = 105, + VREvent_EnterStandbyMode = 106, + VREvent_LeaveStandbyMode = 107, + VREvent_TrackedDeviceRoleChanged = 108, + + VREvent_ButtonPress = 200, // data is controller + VREvent_ButtonUnpress = 201, // data is controller + VREvent_ButtonTouch = 202, // data is controller + VREvent_ButtonUntouch = 203, // data is controller + + VREvent_MouseMove = 300, // data is mouse + VREvent_MouseButtonDown = 301, // data is mouse + VREvent_MouseButtonUp = 302, // data is mouse + VREvent_FocusEnter = 303, // data is overlay + VREvent_FocusLeave = 304, // data is overlay + VREvent_Scroll = 305, // data is mouse + VREvent_TouchPadMove = 306, // data is mouse + + VREvent_InputFocusCaptured = 400, // data is process DEPRECATED + VREvent_InputFocusReleased = 401, // data is process DEPRECATED + VREvent_SceneFocusLost = 402, // data is process + VREvent_SceneFocusGained = 403, // data is process + VREvent_SceneApplicationChanged = 404, // data is process - The App actually drawing the scene changed (usually to or from the compositor) + VREvent_SceneFocusChanged = 405, // data is process - New app got access to draw the scene + VREvent_InputFocusChanged = 406, // data is process + VREvent_SceneApplicationSecondaryRenderingStarted = 407, // data is process + + VREvent_HideRenderModels = 410, // Sent to the scene application to request hiding render models temporarily + VREvent_ShowRenderModels = 411, // Sent to the scene application to request restoring render model visibility + + VREvent_OverlayShown = 500, + VREvent_OverlayHidden = 501, + VREvent_DashboardActivated = 502, + VREvent_DashboardDeactivated = 503, + VREvent_DashboardThumbSelected = 504, // Sent to the overlay manager - data is overlay + VREvent_DashboardRequested = 505, // Sent to the overlay manager - data is overlay + VREvent_ResetDashboard = 506, // Send to the overlay manager + VREvent_RenderToast = 507, // Send to the dashboard to render a toast - data is the notification ID + VREvent_ImageLoaded = 508, // Sent to overlays when a SetOverlayRaw or SetOverlayFromFile call finishes loading + VREvent_ShowKeyboard = 509, // Sent to keyboard renderer in the dashboard to invoke it + VREvent_HideKeyboard = 510, // Sent to keyboard renderer in the dashboard to hide it + VREvent_OverlayGamepadFocusGained = 511, // Sent to an overlay when IVROverlay::SetFocusOverlay is called on it + VREvent_OverlayGamepadFocusLost = 512, // Send to an overlay when it previously had focus and IVROverlay::SetFocusOverlay is called on something else + VREvent_OverlaySharedTextureChanged = 513, + VREvent_DashboardGuideButtonDown = 514, + VREvent_DashboardGuideButtonUp = 515, + VREvent_ScreenshotTriggered = 516, // Screenshot button combo was pressed, Dashboard should request a screenshot + VREvent_ImageFailed = 517, // Sent to overlays when a SetOverlayRaw or SetOverlayfromFail fails to load + + // Screenshot API + VREvent_RequestScreenshot = 520, // Sent by vrclient application to compositor to take a screenshot + VREvent_ScreenshotTaken = 521, // Sent by compositor to the application that the screenshot has been taken + VREvent_ScreenshotFailed = 522, // Sent by compositor to the application that the screenshot failed to be taken + VREvent_SubmitScreenshotToDashboard = 523, // Sent by compositor to the dashboard that a completed screenshot was submitted + + VREvent_Notification_Shown = 600, + VREvent_Notification_Hidden = 601, + VREvent_Notification_BeginInteraction = 602, + VREvent_Notification_Destroyed = 603, + + VREvent_Quit = 700, // data is process + VREvent_ProcessQuit = 701, // data is process + VREvent_QuitAborted_UserPrompt = 702, // data is process + VREvent_QuitAcknowledged = 703, // data is process + VREvent_DriverRequestedQuit = 704, // The driver has requested that SteamVR shut down + + VREvent_ChaperoneDataHasChanged = 800, + VREvent_ChaperoneUniverseHasChanged = 801, + VREvent_ChaperoneTempDataHasChanged = 802, + VREvent_ChaperoneSettingsHaveChanged = 803, + VREvent_SeatedZeroPoseReset = 804, + + VREvent_AudioSettingsHaveChanged = 820, + + VREvent_BackgroundSettingHasChanged = 850, + VREvent_CameraSettingsHaveChanged = 851, + VREvent_ReprojectionSettingHasChanged = 852, + VREvent_ModelSkinSettingsHaveChanged = 853, + VREvent_EnvironmentSettingsHaveChanged = 854, + + VREvent_StatusUpdate = 900, + + VREvent_MCImageUpdated = 1000, + + VREvent_FirmwareUpdateStarted = 1100, + VREvent_FirmwareUpdateFinished = 1101, + + VREvent_KeyboardClosed = 1200, + VREvent_KeyboardCharInput = 1201, + VREvent_KeyboardDone = 1202, // Sent when DONE button clicked on keyboard + + VREvent_ApplicationTransitionStarted = 1300, + VREvent_ApplicationTransitionAborted = 1301, + VREvent_ApplicationTransitionNewAppStarted = 1302, + VREvent_ApplicationListUpdated = 1303, + + VREvent_Compositor_MirrorWindowShown = 1400, + VREvent_Compositor_MirrorWindowHidden = 1401, + VREvent_Compositor_ChaperoneBoundsShown = 1410, + VREvent_Compositor_ChaperoneBoundsHidden = 1411, + + VREvent_TrackedCamera_StartVideoStream = 1500, + VREvent_TrackedCamera_StopVideoStream = 1501, + VREvent_TrackedCamera_PauseVideoStream = 1502, + VREvent_TrackedCamera_ResumeVideoStream = 1503, + + VREvent_PerformanceTest_EnableCapture = 1600, + VREvent_PerformanceTest_DisableCapture = 1601, + VREvent_PerformanceTest_FidelityLevel = 1602, + + // Vendors are free to expose private events in this reserved region + VREvent_VendorSpecific_Reserved_Start = 10000, + VREvent_VendorSpecific_Reserved_End = 19999, +}; + + +/** Level of Hmd activity */ +enum EDeviceActivityLevel +{ + k_EDeviceActivityLevel_Unknown = -1, + k_EDeviceActivityLevel_Idle = 0, + k_EDeviceActivityLevel_UserInteraction = 1, + k_EDeviceActivityLevel_UserInteraction_Timeout = 2, + k_EDeviceActivityLevel_Standby = 3, +}; + + +/** VR controller button and axis IDs */ +enum EVRButtonId +{ + k_EButton_System = 0, + k_EButton_ApplicationMenu = 1, + k_EButton_Grip = 2, + k_EButton_DPad_Left = 3, + k_EButton_DPad_Up = 4, + k_EButton_DPad_Right = 5, + k_EButton_DPad_Down = 6, + k_EButton_A = 7, + + k_EButton_Axis0 = 32, + k_EButton_Axis1 = 33, + k_EButton_Axis2 = 34, + k_EButton_Axis3 = 35, + k_EButton_Axis4 = 36, + + // aliases for well known controllers + k_EButton_SteamVR_Touchpad = k_EButton_Axis0, + k_EButton_SteamVR_Trigger = k_EButton_Axis1, + + k_EButton_Dashboard_Back = k_EButton_Grip, + + k_EButton_Max = 64 +}; + +inline uint64_t ButtonMaskFromId( EVRButtonId id ) { return 1ull << id; } + +/** used for controller button events */ +struct VREvent_Controller_t +{ + uint32_t button; // EVRButtonId enum +}; + + +/** used for simulated mouse events in overlay space */ +enum EVRMouseButton +{ + VRMouseButton_Left = 0x0001, + VRMouseButton_Right = 0x0002, + VRMouseButton_Middle = 0x0004, +}; + + +/** used for simulated mouse events in overlay space */ +struct VREvent_Mouse_t +{ + float x, y; // co-ords are in GL space, bottom left of the texture is 0,0 + uint32_t button; // EVRMouseButton enum +}; + +/** used for simulated mouse wheel scroll in overlay space */ +struct VREvent_Scroll_t +{ + float xdelta, ydelta; // movement in fraction of the pad traversed since last delta, 1.0 for a full swipe + uint32_t repeatCount; +}; + +/** when in mouse input mode you can receive data from the touchpad, these events are only sent if the users finger + is on the touchpad (or just released from it) +**/ +struct VREvent_TouchPadMove_t +{ + // true if the users finger is detected on the touch pad + bool bFingerDown; + + // How long the finger has been down in seconds + float flSecondsFingerDown; + + // These values indicate the starting finger position (so you can do some basic swipe stuff) + float fValueXFirst; + float fValueYFirst; + + // This is the raw sampled coordinate without deadzoning + float fValueXRaw; + float fValueYRaw; +}; + +/** notification related events. Details will still change at this point */ +struct VREvent_Notification_t +{ + uint64_t ulUserValue; + uint32_t notificationId; +}; + +/** Used for events about processes */ +struct VREvent_Process_t +{ + uint32_t pid; + uint32_t oldPid; + bool bForced; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Overlay_t +{ + uint64_t overlayHandle; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Status_t +{ + uint32_t statusState; // EVRState enum +}; + +/** Used for keyboard events **/ +struct VREvent_Keyboard_t +{ + char cNewInput[8]; // Up to 11 bytes of new input + uint64_t uUserValue; // Possible flags about the new input +}; + +struct VREvent_Ipd_t +{ + float ipdMeters; +}; + +struct VREvent_Chaperone_t +{ + uint64_t m_nPreviousUniverse; + uint64_t m_nCurrentUniverse; +}; + +/** Not actually used for any events */ +struct VREvent_Reserved_t +{ + uint64_t reserved0; + uint64_t reserved1; +}; + +struct VREvent_PerformanceTest_t +{ + uint32_t m_nFidelityLevel; +}; + +struct VREvent_SeatedZeroPoseReset_t +{ + bool bResetBySystemMenu; +}; + +struct VREvent_Screenshot_t +{ + uint32_t handle; + uint32_t type; +}; + +/** If you change this you must manually update openvr_interop.cs.py */ +typedef union +{ + VREvent_Reserved_t reserved; + VREvent_Controller_t controller; + VREvent_Mouse_t mouse; + VREvent_Scroll_t scroll; + VREvent_Process_t process; + VREvent_Notification_t notification; + VREvent_Overlay_t overlay; + VREvent_Status_t status; + VREvent_Keyboard_t keyboard; + VREvent_Ipd_t ipd; + VREvent_Chaperone_t chaperone; + VREvent_PerformanceTest_t performanceTest; + VREvent_TouchPadMove_t touchPadMove; + VREvent_SeatedZeroPoseReset_t seatedZeroPoseReset; + VREvent_Screenshot_t screenshot; +} VREvent_Data_t; + +/** An event posted by the server to all running applications */ +struct VREvent_t +{ + uint32_t eventType; // EVREventType enum + TrackedDeviceIndex_t trackedDeviceIndex; + float eventAgeSeconds; + // event data must be the end of the struct as its size is variable + VREvent_Data_t data; +}; + + +/** The mesh to draw into the stencil (or depth) buffer to perform +* early stencil (or depth) kills of pixels that will never appear on the HMD. +* This mesh draws on all the pixels that will be hidden after distortion. +* +* If the HMD does not provide a visible area mesh pVertexData will be +* NULL and unTriangleCount will be 0. */ +struct HiddenAreaMesh_t +{ + const HmdVector2_t *pVertexData; + uint32_t unTriangleCount; +}; + + +/** Identifies what kind of axis is on the controller at index n. Read this type +* with pVRSystem->Get( nControllerDeviceIndex, Prop_Axis0Type_Int32 + n ); +*/ +enum EVRControllerAxisType +{ + k_eControllerAxis_None = 0, + k_eControllerAxis_TrackPad = 1, + k_eControllerAxis_Joystick = 2, + k_eControllerAxis_Trigger = 3, // Analog trigger data is in the X axis +}; + + +/** contains information about one axis on the controller */ +struct VRControllerAxis_t +{ + float x; // Ranges from -1.0 to 1.0 for joysticks and track pads. Ranges from 0.0 to 1.0 for triggers were 0 is fully released. + float y; // Ranges from -1.0 to 1.0 for joysticks and track pads. Is always 0.0 for triggers. +}; + + +/** the number of axes in the controller state */ +static const uint32_t k_unControllerStateAxisCount = 5; + + +/** Holds all the state of a controller at one moment in time. */ +struct VRControllerState001_t +{ + // If packet num matches that on your prior call, then the controller state hasn't been changed since + // your last call and there is no need to process it + uint32_t unPacketNum; + + // bit flags for each of the buttons. Use ButtonMaskFromId to turn an ID into a mask + uint64_t ulButtonPressed; + uint64_t ulButtonTouched; + + // Axis data for the controller's analog inputs + VRControllerAxis_t rAxis[ k_unControllerStateAxisCount ]; +}; + + +typedef VRControllerState001_t VRControllerState_t; + + +/** determines how to provide output to the application of various event processing functions. */ +enum EVRControllerEventOutputType +{ + ControllerEventOutput_OSEvents = 0, + ControllerEventOutput_VREvents = 1, +}; + + + +/** Collision Bounds Style */ +enum ECollisionBoundsStyle +{ + COLLISION_BOUNDS_STYLE_BEGINNER = 0, + COLLISION_BOUNDS_STYLE_INTERMEDIATE, + COLLISION_BOUNDS_STYLE_SQUARES, + COLLISION_BOUNDS_STYLE_ADVANCED, + COLLISION_BOUNDS_STYLE_NONE, + + COLLISION_BOUNDS_STYLE_COUNT +}; + +/** Allows the application to customize how the overlay appears in the compositor */ +struct Compositor_OverlaySettings +{ + uint32_t size; // sizeof(Compositor_OverlaySettings) + bool curved, antialias; + float scale, distance, alpha; + float uOffset, vOffset, uScale, vScale; + float gridDivs, gridWidth, gridScale; + HmdMatrix44_t transform; +}; + +/** used to refer to a single VR overlay */ +typedef uint64_t VROverlayHandle_t; + +static const VROverlayHandle_t k_ulOverlayHandleInvalid = 0; + +/** Errors that can occur around VR overlays */ +enum EVROverlayError +{ + VROverlayError_None = 0, + + VROverlayError_UnknownOverlay = 10, + VROverlayError_InvalidHandle = 11, + VROverlayError_PermissionDenied = 12, + VROverlayError_OverlayLimitExceeded = 13, // No more overlays could be created because the maximum number already exist + VROverlayError_WrongVisibilityType = 14, + VROverlayError_KeyTooLong = 15, + VROverlayError_NameTooLong = 16, + VROverlayError_KeyInUse = 17, + VROverlayError_WrongTransformType = 18, + VROverlayError_InvalidTrackedDevice = 19, + VROverlayError_InvalidParameter = 20, + VROverlayError_ThumbnailCantBeDestroyed = 21, + VROverlayError_ArrayTooSmall = 22, + VROverlayError_RequestFailed = 23, + VROverlayError_InvalidTexture = 24, + VROverlayError_UnableToLoadFile = 25, + VROVerlayError_KeyboardAlreadyInUse = 26, + VROverlayError_NoNeighbor = 27, +}; + +/** enum values to pass in to VR_Init to identify whether the application will +* draw a 3D scene. */ +enum EVRApplicationType +{ + VRApplication_Other = 0, // Some other kind of application that isn't covered by the other entries + VRApplication_Scene = 1, // Application will submit 3D frames + VRApplication_Overlay = 2, // Application only interacts with overlays + VRApplication_Background = 3, // Application should not start SteamVR if it's not already running, and should not + // keep it running if everything else quits. + VRApplication_Utility = 4, // Init should not try to load any drivers. The application needs access to utility + // interfaces (like IVRSettings and IVRApplications) but not hardware. + VRApplication_VRMonitor = 5, // Reserved for vrmonitor +}; + + +/** error codes for firmware */ +enum EVRFirmwareError +{ + VRFirmwareError_None = 0, + VRFirmwareError_Success = 1, + VRFirmwareError_Fail = 2, +}; + + +/** error codes for notifications */ +enum EVRNotificationError +{ + VRNotificationError_OK = 0, + VRNotificationError_InvalidNotificationId = 100, + VRNotificationError_NotificationQueueFull = 101, + VRNotificationError_InvalidOverlayHandle = 102, + VRNotificationError_SystemWithUserValueAlreadyExists = 103, +}; + + +/** error codes returned by Vr_Init */ + +// Please add adequate error description to https://developer.valvesoftware.com/w/index.php?title=Category:SteamVRHelp +enum EVRInitError +{ + VRInitError_None = 0, + VRInitError_Unknown = 1, + + VRInitError_Init_InstallationNotFound = 100, + VRInitError_Init_InstallationCorrupt = 101, + VRInitError_Init_VRClientDLLNotFound = 102, + VRInitError_Init_FileNotFound = 103, + VRInitError_Init_FactoryNotFound = 104, + VRInitError_Init_InterfaceNotFound = 105, + VRInitError_Init_InvalidInterface = 106, + VRInitError_Init_UserConfigDirectoryInvalid = 107, + VRInitError_Init_HmdNotFound = 108, + VRInitError_Init_NotInitialized = 109, + VRInitError_Init_PathRegistryNotFound = 110, + VRInitError_Init_NoConfigPath = 111, + VRInitError_Init_NoLogPath = 112, + VRInitError_Init_PathRegistryNotWritable = 113, + VRInitError_Init_AppInfoInitFailed = 114, + VRInitError_Init_Retry = 115, // Used internally to cause retries to vrserver + VRInitError_Init_InitCanceledByUser = 116, // The calling application should silently exit. The user canceled app startup + VRInitError_Init_AnotherAppLaunching = 117, + VRInitError_Init_SettingsInitFailed = 118, + VRInitError_Init_ShuttingDown = 119, + VRInitError_Init_TooManyObjects = 120, + VRInitError_Init_NoServerForBackgroundApp = 121, + VRInitError_Init_NotSupportedWithCompositor = 122, + VRInitError_Init_NotAvailableToUtilityApps = 123, + VRInitError_Init_Internal = 124, + + VRInitError_Driver_Failed = 200, + VRInitError_Driver_Unknown = 201, + VRInitError_Driver_HmdUnknown = 202, + VRInitError_Driver_NotLoaded = 203, + VRInitError_Driver_RuntimeOutOfDate = 204, + VRInitError_Driver_HmdInUse = 205, + VRInitError_Driver_NotCalibrated = 206, + VRInitError_Driver_CalibrationInvalid = 207, + VRInitError_Driver_HmdDisplayNotFound = 208, + + VRInitError_IPC_ServerInitFailed = 300, + VRInitError_IPC_ConnectFailed = 301, + VRInitError_IPC_SharedStateInitFailed = 302, + VRInitError_IPC_CompositorInitFailed = 303, + VRInitError_IPC_MutexInitFailed = 304, + VRInitError_IPC_Failed = 305, + + VRInitError_Compositor_Failed = 400, + VRInitError_Compositor_D3D11HardwareRequired = 401, + VRInitError_Compositor_FirmwareRequiresUpdate = 402, + VRInitError_Compositor_OverlayInitFailed = 403, + VRInitError_Compositor_ScreenshotsInitFailed = 404, + + VRInitError_VendorSpecific_UnableToConnectToOculusRuntime = 1000, + + VRInitError_VendorSpecific_HmdFound_CantOpenDevice = 1101, + VRInitError_VendorSpecific_HmdFound_UnableToRequestConfigStart = 1102, + VRInitError_VendorSpecific_HmdFound_NoStoredConfig = 1103, + VRInitError_VendorSpecific_HmdFound_ConfigTooBig = 1104, + VRInitError_VendorSpecific_HmdFound_ConfigTooSmall = 1105, + VRInitError_VendorSpecific_HmdFound_UnableToInitZLib = 1106, + VRInitError_VendorSpecific_HmdFound_CantReadFirmwareVersion = 1107, + VRInitError_VendorSpecific_HmdFound_UnableToSendUserDataStart = 1108, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataStart = 1109, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataNext = 1110, + VRInitError_VendorSpecific_HmdFound_UserDataAddressRange = 1111, + VRInitError_VendorSpecific_HmdFound_UserDataError = 1112, + VRInitError_VendorSpecific_HmdFound_ConfigFailedSanityCheck = 1113, + + VRInitError_Steam_SteamInstallationNotFound = 2000, +}; + +enum EVRScreenshotType +{ + VRScreenshotType_None = 0, + VRScreenshotType_Mono = 1, // left eye only + VRScreenshotType_Stereo = 2, + VRScreenshotType_Cubemap = 3, + VRScreenshotType_MonoPanorama = 4, + VRScreenshotType_StereoPanorama = 5 +}; + +enum EVRScreenshotPropertyFilenames +{ + VRScreenshotPropertyFilenames_Preview = 0, + VRScreenshotPropertyFilenames_VR = 1, +}; + +enum EVRTrackedCameraError +{ + VRTrackedCameraError_None = 0, + VRTrackedCameraError_OperationFailed = 100, + VRTrackedCameraError_InvalidHandle = 101, + VRTrackedCameraError_InvalidFrameHeaderVersion = 102, + VRTrackedCameraError_OutOfHandles = 103, + VRTrackedCameraError_IPCFailure = 104, + VRTrackedCameraError_NotSupportedForThisDevice = 105, + VRTrackedCameraError_SharedMemoryFailure = 106, + VRTrackedCameraError_FrameBufferingFailure = 107, + VRTrackedCameraError_StreamSetupFailure = 108, + VRTrackedCameraError_InvalidGLTextureId = 109, + VRTrackedCameraError_InvalidSharedTextureHandle = 110, + VRTrackedCameraError_FailedToGetGLTextureId = 111, + VRTrackedCameraError_SharedTextureFailure = 112, + VRTrackedCameraError_NoFrameAvailable = 113, + VRTrackedCameraError_InvalidArgument = 114, + VRTrackedCameraError_InvalidFrameBufferSize = 115, +}; + +enum EVRTrackedCameraFrameType +{ + VRTrackedCameraFrameType_Distorted = 0, // This is the camera video frame size in pixels, still distorted. + VRTrackedCameraFrameType_Undistorted, // In pixels, an undistorted inscribed rectangle region without invalid regions. This size is subject to changes shortly. + VRTrackedCameraFrameType_MaximumUndistorted, // In pixels, maximum undistorted with invalid regions. Non zero alpha component identifies valid regions. + MAX_CAMERA_FRAME_TYPES +}; + +typedef uint64_t TrackedCameraHandle_t; +#define INVALID_TRACKED_CAMERA_HANDLE ((vr::TrackedCameraHandle_t)0) + +struct CameraVideoStreamFrameHeader_t +{ + EVRTrackedCameraFrameType eFrameType; + + uint32_t nWidth; + uint32_t nHeight; + uint32_t nBytesPerPixel; + + uint32_t nFrameSequence; + + TrackedDevicePose_t standingTrackedDevicePose; +}; + +// Screenshot types +typedef uint32_t ScreenshotHandle_t; + +static const uint32_t k_unScreenshotHandleInvalid = 0; + +#pragma pack( pop ) + +// figure out how to import from the VR API dll +#if defined(_WIN32) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __declspec( dllexport ) +#else +#define VR_INTERFACE extern "C" __declspec( dllimport ) +#endif + +#elif defined(GNUC) || defined(COMPILER_GCC) || defined(__APPLE__) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __attribute__((visibility("default"))) +#else +#define VR_INTERFACE extern "C" +#endif + +#else +#error "Unsupported Platform." +#endif + + +#if defined( _WIN32 ) +#define VR_CALLTYPE __cdecl +#else +#define VR_CALLTYPE +#endif + +} // namespace vr + +#endif // _INCLUDE_VRTYPES_H + + +// vrannotation.h +#ifdef API_GEN +# define VR_CLANG_ATTR(ATTR) __attribute__((annotate( ATTR ))) +#else +# define VR_CLANG_ATTR(ATTR) +#endif + +#define VR_METHOD_DESC(DESC) VR_CLANG_ATTR( "desc:" #DESC ";" ) +#define VR_IGNOREATTR() VR_CLANG_ATTR( "ignore" ) +#define VR_OUT_STRUCT() VR_CLANG_ATTR( "out_struct: ;" ) +#define VR_OUT_STRING() VR_CLANG_ATTR( "out_string: ;" ) +#define VR_OUT_ARRAY_CALL(COUNTER,FUNCTION,PARAMS) VR_CLANG_ATTR( "out_array_call:" #COUNTER "," #FUNCTION "," #PARAMS ";" ) +#define VR_OUT_ARRAY_COUNT(COUNTER) VR_CLANG_ATTR( "out_array_count:" #COUNTER ";" ) +#define VR_ARRAY_COUNT(COUNTER) VR_CLANG_ATTR( "array_count:" #COUNTER ";" ) +#define VR_ARRAY_COUNT_D(COUNTER, DESC) VR_CLANG_ATTR( "array_count:" #COUNTER ";desc:" #DESC ) +#define VR_BUFFER_COUNT(COUNTER) VR_CLANG_ATTR( "buffer_count:" #COUNTER ";" ) +#define VR_OUT_BUFFER_COUNT(COUNTER) VR_CLANG_ATTR( "out_buffer_count:" #COUNTER ";" ) +#define VR_OUT_STRING_COUNT(COUNTER) VR_CLANG_ATTR( "out_string_count:" #COUNTER ";" ) + +// ivrsystem.h +namespace vr +{ + +class IVRSystem +{ +public: + + + // ------------------------------------ + // Display Methods + // ------------------------------------ + + /** Suggested size for the intermediate render target that the distortion pulls from. */ + virtual void GetRecommendedRenderTargetSize( uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** The projection matrix for the specified eye */ + virtual HmdMatrix44_t GetProjectionMatrix( EVREye eEye, float fNearZ, float fFarZ, EGraphicsAPIConvention eProjType ) = 0; + + /** The components necessary to build your own projection matrix in case your + * application is doing something fancy like infinite Z */ + virtual void GetProjectionRaw( EVREye eEye, float *pfLeft, float *pfRight, float *pfTop, float *pfBottom ) = 0; + + /** Returns the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in + * the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. */ + virtual DistortionCoordinates_t ComputeDistortion( EVREye eEye, float fU, float fV ) = 0; + + /** Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head + * space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection. + * Normally View and Eye^-1 will be multiplied together and treated as View in your application. + */ + virtual HmdMatrix34_t GetEyeToHeadTransform( EVREye eEye ) = 0; + + /** Returns the number of elapsed seconds since the last recorded vsync event. This + * will come from a vsync timer event in the timer if possible or from the application-reported + * time if that is not available. If no vsync times are available the function will + * return zero for vsync time and frame counter and return false from the method. */ + virtual bool GetTimeSinceLastVsync( float *pfSecondsSinceLastVsync, uint64_t *pulFrameCounter ) = 0; + + /** [D3D9 Only] + * Returns the adapter index that the user should pass into CreateDevice to set up D3D9 in such + * a way that it can go full screen exclusive on the HMD. Returns -1 if there was an error. + */ + virtual int32_t GetD3D9AdapterIndex() = 0; + + /** [D3D10/11 Only] + * Returns the adapter index and output index that the user should pass into EnumAdapters and EnumOutputs + * to create the device and swap chain in DX10 and DX11. If an error occurs both indices will be set to -1. + */ + virtual void GetDXGIOutputInfo( int32_t *pnAdapterIndex ) = 0; + + // ------------------------------------ + // Display Mode methods + // ------------------------------------ + + /** Use to determine if the headset display is part of the desktop (i.e. extended) or hidden (i.e. direct mode). */ + virtual bool IsDisplayOnDesktop() = 0; + + /** Set the display visibility (true = extended, false = direct mode). Return value of true indicates that the change was successful. */ + virtual bool SetDisplayVisibility( bool bIsVisibleOnDesktop ) = 0; + + // ------------------------------------ + // Tracking Methods + // ------------------------------------ + + /** The pose that the tracker thinks that the HMD will be in at the specified number of seconds into the + * future. Pass 0 to get the state at the instant the method is called. Most of the time the application should + * calculate the time until the photons will be emitted from the display and pass that time into the method. + * + * This is roughly analogous to the inverse of the view matrix in most applications, though + * many games will need to do some additional rotation or translation on top of the rotation + * and translation provided by the head pose. + * + * For devices where bPoseIsValid is true the application can use the pose to position the device + * in question. The provided array can be any size up to k_unMaxTrackedDeviceCount. + * + * Seated experiences should call this method with TrackingUniverseSeated and receive poses relative + * to the seated zero pose. Standing experiences should call this method with TrackingUniverseStanding + * and receive poses relative to the Chaperone Play Area. TrackingUniverseRawAndUncalibrated should + * probably not be used unless the application is the Chaperone calibration tool itself, but will provide + * poses relative to the hardware-specific coordinate system in the driver. + */ + virtual void GetDeviceToAbsoluteTrackingPose( ETrackingUniverseOrigin eOrigin, float fPredictedSecondsToPhotonsFromNow, VR_ARRAY_COUNT(unTrackedDevicePoseArrayCount) TrackedDevicePose_t *pTrackedDevicePoseArray, uint32_t unTrackedDevicePoseArrayCount ) = 0; + + /** Sets the zero pose for the seated tracker coordinate system to the current position and yaw of the HMD. After + * ResetSeatedZeroPose all GetDeviceToAbsoluteTrackingPose calls that pass TrackingUniverseSeated as the origin + * will be relative to this new zero pose. The new zero coordinate system will not change the fact that the Y axis + * is up in the real world, so the next pose returned from GetDeviceToAbsoluteTrackingPose after a call to + * ResetSeatedZeroPose may not be exactly an identity matrix. + * + * NOTE: This function overrides the user's previously saved seated zero pose and should only be called as the result of a user action. + * Users are also able to set their seated zero pose via the OpenVR Dashboard. + **/ + virtual void ResetSeatedZeroPose() = 0; + + /** Returns the transform from the seated zero pose to the standing absolute tracking system. This allows + * applications to represent the seated origin to used or transform object positions from one coordinate + * system to the other. + * + * The seated origin may or may not be inside the Play Area or Collision Bounds returned by IVRChaperone. Its position + * depends on what the user has set from the Dashboard settings and previous calls to ResetSeatedZeroPose. */ + virtual HmdMatrix34_t GetSeatedZeroPoseToStandingAbsoluteTrackingPose() = 0; + + /** Returns the transform from the tracking origin to the standing absolute tracking system. This allows + * applications to convert from raw tracking space to the calibrated standing coordinate system. */ + virtual HmdMatrix34_t GetRawZeroPoseToStandingAbsoluteTrackingPose() = 0; + + /** Get a sorted array of device indices of a given class of tracked devices (e.g. controllers). Devices are sorted right to left + * relative to the specified tracked device (default: hmd -- pass in -1 for absolute tracking space). Returns the number of devices + * in the list, or the size of the array needed if not large enough. */ + virtual uint32_t GetSortedTrackedDeviceIndicesOfClass( ETrackedDeviceClass eTrackedDeviceClass, VR_ARRAY_COUNT(unTrackedDeviceIndexArrayCount) vr::TrackedDeviceIndex_t *punTrackedDeviceIndexArray, uint32_t unTrackedDeviceIndexArrayCount, vr::TrackedDeviceIndex_t unRelativeToTrackedDeviceIndex = k_unTrackedDeviceIndex_Hmd ) = 0; + + /** Returns the level of activity on the device. */ + virtual EDeviceActivityLevel GetTrackedDeviceActivityLevel( vr::TrackedDeviceIndex_t unDeviceId ) = 0; + + /** Convenience utility to apply the specified transform to the specified pose. + * This properly transforms all pose components, including velocity and angular velocity + */ + virtual void ApplyTransform( TrackedDevicePose_t *pOutputPose, const TrackedDevicePose_t *pTrackedDevicePose, const HmdMatrix34_t *pTransform ) = 0; + + /** Returns the device index associated with a specific role, for example the left hand or the right hand. */ + virtual vr::TrackedDeviceIndex_t GetTrackedDeviceIndexForControllerRole( vr::ETrackedControllerRole unDeviceType ) = 0; + + /** Returns the controller type associated with a device index. */ + virtual vr::ETrackedControllerRole GetControllerRoleForTrackedDeviceIndex( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + // ------------------------------------ + // Property methods + // ------------------------------------ + + /** Returns the device class of a tracked device. If there has not been a device connected in this slot + * since the application started this function will return TrackedDevice_Invalid. For previous detected + * devices the function will return the previously observed device class. + * + * To determine which devices exist on the system, just loop from 0 to k_unMaxTrackedDeviceCount and check + * the device class. Every device with something other than TrackedDevice_Invalid is associated with an + * actual tracked device. */ + virtual ETrackedDeviceClass GetTrackedDeviceClass( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns true if there is a device connected in this slot. */ + virtual bool IsTrackedDeviceConnected( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false. */ + virtual bool GetBoolTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a float property. If the device index is not valid or the property is not a float type this function will return 0. */ + virtual float GetFloatTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns an int property. If the device index is not valid or the property is not a int type this function will return 0. */ + virtual int32_t GetInt32TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a uint64 property. If the device index is not valid or the property is not a uint64 type this function will return 0. */ + virtual uint64_t GetUint64TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a matrix property. If the device index is not valid or the property is not a matrix type, this function will return identity. */ + virtual HmdMatrix34_t GetMatrix34TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a string property. If the device index is not valid or the property is not a string type this function will + * return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing + * null. Strings will generally fit in buffers of k_unTrackingStringSize characters. */ + virtual uint32_t GetStringTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, ETrackedPropertyError *pError = 0L ) = 0; + + /** returns a string that corresponds with the specified property error. The string will be the name + * of the error enum value for all valid error codes */ + virtual const char *GetPropErrorNameFromEnum( ETrackedPropertyError error ) = 0; + + // ------------------------------------ + // Event methods + // ------------------------------------ + + /** Returns true and fills the event with the next event on the queue if there is one. If there are no events + * this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextEvent( VREvent_t *pEvent, uint32_t uncbVREvent ) = 0; + + /** Returns true and fills the event with the next event on the queue if there is one. If there are no events + * this method returns false. Fills in the pose of the associated tracked device in the provided pose struct. + * This pose will always be older than the call to this function and should not be used to render the device. + uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextEventWithPose( ETrackingUniverseOrigin eOrigin, VREvent_t *pEvent, uint32_t uncbVREvent, vr::TrackedDevicePose_t *pTrackedDevicePose ) = 0; + + /** returns the name of an EVREvent enum value */ + virtual const char *GetEventTypeNameFromEnum( EVREventType eType ) = 0; + + // ------------------------------------ + // Rendering helper methods + // ------------------------------------ + + /** Returns the stencil mesh information for the current HMD. If this HMD does not have a stencil mesh the vertex data and count will be + * NULL and 0 respectively. This mesh is meant to be rendered into the stencil buffer (or into the depth buffer setting nearz) before rendering + * each eye's view. The pixels covered by this mesh will never be seen by the user after the lens distortion is applied and based on visibility to the panels. + * This will improve perf by letting the GPU early-reject pixels the user will never see before running the pixel shader. + * NOTE: Render this mesh with backface culling disabled since the winding order of the vertices can be different per-HMD or per-eye. + */ + virtual HiddenAreaMesh_t GetHiddenAreaMesh( EVREye eEye ) = 0; + + + // ------------------------------------ + // Controller methods + // ------------------------------------ + + /** Fills the supplied struct with the current state of the controller. Returns false if the controller index + * is invalid. */ + virtual bool GetControllerState( vr::TrackedDeviceIndex_t unControllerDeviceIndex, vr::VRControllerState_t *pControllerState ) = 0; + + /** fills the supplied struct with the current state of the controller and the provided pose with the pose of + * the controller when the controller state was updated most recently. Use this form if you need a precise controller + * pose as input to your application when the user presses or releases a button. */ + virtual bool GetControllerStateWithPose( ETrackingUniverseOrigin eOrigin, vr::TrackedDeviceIndex_t unControllerDeviceIndex, vr::VRControllerState_t *pControllerState, TrackedDevicePose_t *pTrackedDevicePose ) = 0; + + /** Trigger a single haptic pulse on a controller. After this call the application may not trigger another haptic pulse on this controller + * and axis combination for 5ms. */ + virtual void TriggerHapticPulse( vr::TrackedDeviceIndex_t unControllerDeviceIndex, uint32_t unAxisId, unsigned short usDurationMicroSec ) = 0; + + /** returns the name of an EVRButtonId enum value */ + virtual const char *GetButtonIdNameFromEnum( EVRButtonId eButtonId ) = 0; + + /** returns the name of an EVRControllerAxisType enum value */ + virtual const char *GetControllerAxisTypeNameFromEnum( EVRControllerAxisType eAxisType ) = 0; + + /** Tells OpenVR that this process wants exclusive access to controller button states and button events. Other apps will be notified that + * they have lost input focus with a VREvent_InputFocusCaptured event. Returns false if input focus could not be captured for + * some reason. */ + virtual bool CaptureInputFocus() = 0; + + /** Tells OpenVR that this process no longer wants exclusive access to button states and button events. Other apps will be notified + * that input focus has been released with a VREvent_InputFocusReleased event. */ + virtual void ReleaseInputFocus() = 0; + + /** Returns true if input focus is captured by another process. */ + virtual bool IsInputFocusCapturedByAnotherProcess() = 0; + + // ------------------------------------ + // Debug Methods + // ------------------------------------ + + /** Sends a request to the driver for the specified device and returns the response. The maximum response size is 32k, + * but this method can be called with a smaller buffer. If the response exceeds the size of the buffer, it is truncated. + * The size of the response including its terminating null is returned. */ + virtual uint32_t DriverDebugRequest( vr::TrackedDeviceIndex_t unDeviceIndex, const char *pchRequest, char *pchResponseBuffer, uint32_t unResponseBufferSize ) = 0; + + + // ------------------------------------ + // Firmware methods + // ------------------------------------ + + /** Performs the actual firmware update if applicable. + * The following events will be sent, if VRFirmwareError_None was returned: VREvent_FirmwareUpdateStarted, VREvent_FirmwareUpdateFinished + * Use the properties Prop_Firmware_UpdateAvailable_Bool, Prop_Firmware_ManualUpdate_Bool, and Prop_Firmware_ManualUpdateURL_String + * to figure our whether a firmware update is available, and to figure out whether its a manual update + * Prop_Firmware_ManualUpdateURL_String should point to an URL describing the manual update process */ + virtual vr::EVRFirmwareError PerformFirmwareUpdate( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + + // ------------------------------------ + // Application life cycle methods + // ------------------------------------ + + /** Call this to acknowledge to the system that VREvent_Quit has been received and that the process is exiting. + * This extends the timeout until the process is killed. */ + virtual void AcknowledgeQuit_Exiting() = 0; + + /** Call this to tell the system that the user is being prompted to save data. This + * halts the timeout and dismisses the dashboard (if it was up). Applications should be sure to actually + * prompt the user to save and then exit afterward, otherwise the user will be left in a confusing state. */ + virtual void AcknowledgeQuit_UserPrompt() = 0; + +}; + +static const char * const IVRSystem_Version = "IVRSystem_012"; + +} + + +// ivrapplications.h +namespace vr +{ + + /** Used for all errors reported by the IVRApplications interface */ + enum EVRApplicationError + { + VRApplicationError_None = 0, + + VRApplicationError_AppKeyAlreadyExists = 100, // Only one application can use any given key + VRApplicationError_NoManifest = 101, // the running application does not have a manifest + VRApplicationError_NoApplication = 102, // No application is running + VRApplicationError_InvalidIndex = 103, + VRApplicationError_UnknownApplication = 104, // the application could not be found + VRApplicationError_IPCFailed = 105, // An IPC failure caused the request to fail + VRApplicationError_ApplicationAlreadyRunning = 106, + VRApplicationError_InvalidManifest = 107, + VRApplicationError_InvalidApplication = 108, + VRApplicationError_LaunchFailed = 109, // the process didn't start + VRApplicationError_ApplicationAlreadyStarting = 110, // the system was already starting the same application + VRApplicationError_LaunchInProgress = 111, // The system was already starting a different application + VRApplicationError_OldApplicationQuitting = 112, + VRApplicationError_TransitionAborted = 113, + VRApplicationError_IsTemplate = 114, // error when you try to call LaunchApplication() on a template type app (use LaunchTemplateApplication) + + VRApplicationError_BufferTooSmall = 200, // The provided buffer was too small to fit the requested data + VRApplicationError_PropertyNotSet = 201, // The requested property was not set + VRApplicationError_UnknownProperty = 202, + VRApplicationError_InvalidParameter = 203, + }; + + /** The maximum length of an application key */ + static const uint32_t k_unMaxApplicationKeyLength = 128; + + /** these are the properties available on applications. */ + enum EVRApplicationProperty + { + VRApplicationProperty_Name_String = 0, + + VRApplicationProperty_LaunchType_String = 11, + VRApplicationProperty_WorkingDirectory_String = 12, + VRApplicationProperty_BinaryPath_String = 13, + VRApplicationProperty_Arguments_String = 14, + VRApplicationProperty_URL_String = 15, + + VRApplicationProperty_Description_String = 50, + VRApplicationProperty_NewsURL_String = 51, + VRApplicationProperty_ImagePath_String = 52, + VRApplicationProperty_Source_String = 53, + + VRApplicationProperty_IsDashboardOverlay_Bool = 60, + VRApplicationProperty_IsTemplate_Bool = 61, + VRApplicationProperty_IsInstanced_Bool = 62, + + VRApplicationProperty_LastLaunchTime_Uint64 = 70, + }; + + /** These are states the scene application startup process will go through. */ + enum EVRApplicationTransitionState + { + VRApplicationTransition_None = 0, + + VRApplicationTransition_OldAppQuitSent = 10, + VRApplicationTransition_WaitingForExternalLaunch = 11, + + VRApplicationTransition_NewAppLaunched = 20, + }; + + struct AppOverrideKeys_t + { + const char *pchKey; + const char *pchValue; + }; + + class IVRApplications + { + public: + + // --------------- Application management --------------- // + + /** Adds an application manifest to the list to load when building the list of installed applications. + * Temporary manifests are not automatically loaded */ + virtual EVRApplicationError AddApplicationManifest( const char *pchApplicationManifestFullPath, bool bTemporary = false ) = 0; + + /** Removes an application manifest from the list to load when building the list of installed applications. */ + virtual EVRApplicationError RemoveApplicationManifest( const char *pchApplicationManifestFullPath ) = 0; + + /** Returns true if an application is installed */ + virtual bool IsApplicationInstalled( const char *pchAppKey ) = 0; + + /** Returns the number of applications available in the list */ + virtual uint32_t GetApplicationCount() = 0; + + /** Returns the key of the specified application. The index is at least 0 and is less than the return + * value of GetApplicationCount(). The buffer should be at least k_unMaxApplicationKeyLength in order to + * fit the key. */ + virtual EVRApplicationError GetApplicationKeyByIndex( uint32_t unApplicationIndex, char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Returns the key of the application for the specified Process Id. The buffer should be at least + * k_unMaxApplicationKeyLength in order to fit the key. */ + virtual EVRApplicationError GetApplicationKeyByProcessId( uint32_t unProcessId, char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Launches the application. The existing scene application will exit and then the new application will start. + * This call is not valid for dashboard overlay applications. */ + virtual EVRApplicationError LaunchApplication( const char *pchAppKey ) = 0; + + /** Launches an instance of an application of type template, with its app key being pchNewAppKey (which must be unique) and optionally override sections + * from the manifest file via AppOverrideKeys_t + */ + virtual EVRApplicationError LaunchTemplateApplication( const char *pchTemplateAppKey, const char *pchNewAppKey, VR_ARRAY_COUNT( unKeys ) const AppOverrideKeys_t *pKeys, uint32_t unKeys ) = 0; + + /** Launches the dashboard overlay application if it is not already running. This call is only valid for + * dashboard overlay applications. */ + virtual EVRApplicationError LaunchDashboardOverlay( const char *pchAppKey ) = 0; + + /** Cancel a pending launch for an application */ + virtual bool CancelApplicationLaunch( const char *pchAppKey ) = 0; + + /** Identifies a running application. OpenVR can't always tell which process started in response + * to a URL. This function allows a URL handler (or the process itself) to identify the app key + * for the now running application. Passing a process ID of 0 identifies the calling process. + * The application must be one that's known to the system via a call to AddApplicationManifest. */ + virtual EVRApplicationError IdentifyApplication( uint32_t unProcessId, const char *pchAppKey ) = 0; + + /** Returns the process ID for an application. Return 0 if the application was not found or is not running. */ + virtual uint32_t GetApplicationProcessId( const char *pchAppKey ) = 0; + + /** Returns a string for an applications error */ + virtual const char *GetApplicationsErrorNameFromEnum( EVRApplicationError error ) = 0; + + // --------------- Application properties --------------- // + + /** Returns a value for an application property. The required buffer size to fit this value will be returned. */ + virtual uint32_t GetApplicationPropertyString( const char *pchAppKey, EVRApplicationProperty eProperty, char *pchPropertyValueBuffer, uint32_t unPropertyValueBufferLen, EVRApplicationError *peError = nullptr ) = 0; + + /** Returns a bool value for an application property. Returns false in all error cases. */ + virtual bool GetApplicationPropertyBool( const char *pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError *peError = nullptr ) = 0; + + /** Returns a uint64 value for an application property. Returns 0 in all error cases. */ + virtual uint64_t GetApplicationPropertyUint64( const char *pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError *peError = nullptr ) = 0; + + /** Sets the application auto-launch flag. This is only valid for applications which return true for VRApplicationProperty_IsDashboardOverlay_Bool. */ + virtual EVRApplicationError SetApplicationAutoLaunch( const char *pchAppKey, bool bAutoLaunch ) = 0; + + /** Gets the application auto-launch flag. This is only valid for applications which return true for VRApplicationProperty_IsDashboardOverlay_Bool. */ + virtual bool GetApplicationAutoLaunch( const char *pchAppKey ) = 0; + + // --------------- Transition methods --------------- // + + /** Returns the app key for the application that is starting up */ + virtual EVRApplicationError GetStartingApplication( char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Returns the application transition state */ + virtual EVRApplicationTransitionState GetTransitionState() = 0; + + /** Returns errors that would prevent the specified application from launching immediately. Calling this function will + * cause the current scene application to quit, so only call it when you are actually about to launch something else. + * What the caller should do about these failures depends on the failure: + * VRApplicationError_OldApplicationQuitting - An existing application has been told to quit. Wait for a VREvent_ProcessQuit + * and try again. + * VRApplicationError_ApplicationAlreadyStarting - This application is already starting. This is a permanent failure. + * VRApplicationError_LaunchInProgress - A different application is already starting. This is a permanent failure. + * VRApplicationError_None - Go ahead and launch. Everything is clear. + */ + virtual EVRApplicationError PerformApplicationPrelaunchCheck( const char *pchAppKey ) = 0; + + /** Returns a string for an application transition state */ + virtual const char *GetApplicationsTransitionStateNameFromEnum( EVRApplicationTransitionState state ) = 0; + + /** Returns true if the outgoing scene app has requested a save prompt before exiting */ + virtual bool IsQuitUserPromptRequested() = 0; + + /** Starts a subprocess within the calling application. This + * suppresses all application transition UI and automatically identifies the new executable + * as part of the same application. On success the calling process should exit immediately. + * If working directory is NULL or "" the directory portion of the binary path will be + * the working directory. */ + virtual EVRApplicationError LaunchInternalProcess( const char *pchBinaryPath, const char *pchArguments, const char *pchWorkingDirectory ) = 0; + }; + + static const char * const IVRApplications_Version = "IVRApplications_005"; + +} // namespace vr + +// ivrsettings.h +namespace vr +{ + enum EVRSettingsError + { + VRSettingsError_None = 0, + VRSettingsError_IPCFailed = 1, + VRSettingsError_WriteFailed = 2, + VRSettingsError_ReadFailed = 3, + }; + + // The maximum length of a settings key + static const uint32_t k_unMaxSettingsKeyLength = 128; + + class IVRSettings + { + public: + virtual const char *GetSettingsErrorNameFromEnum( EVRSettingsError eError ) = 0; + + // Returns true if file sync occurred (force or settings dirty) + virtual bool Sync( bool bForce = false, EVRSettingsError *peError = nullptr ) = 0; + + virtual bool GetBool( const char *pchSection, const char *pchSettingsKey, bool bDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetBool( const char *pchSection, const char *pchSettingsKey, bool bValue, EVRSettingsError *peError = nullptr ) = 0; + virtual int32_t GetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nValue, EVRSettingsError *peError = nullptr ) = 0; + virtual float GetFloat( const char *pchSection, const char *pchSettingsKey, float flDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetFloat( const char *pchSection, const char *pchSettingsKey, float flValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void GetString( const char *pchSection, const char *pchSettingsKey, char *pchValue, uint32_t unValueLen, const char *pchDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetString( const char *pchSection, const char *pchSettingsKey, const char *pchValue, EVRSettingsError *peError = nullptr ) = 0; + + virtual void RemoveSection( const char *pchSection, EVRSettingsError *peError = nullptr ) = 0; + virtual void RemoveKeyInSection( const char *pchSection, const char *pchSettingsKey, EVRSettingsError *peError = nullptr ) = 0; + }; + + //----------------------------------------------------------------------------- + static const char * const IVRSettings_Version = "IVRSettings_001"; + + //----------------------------------------------------------------------------- + // steamvr keys + + static const char * const k_pch_SteamVR_Section = "steamvr"; + static const char * const k_pch_SteamVR_RequireHmd_String = "requireHmd"; + static const char * const k_pch_SteamVR_ForcedDriverKey_String = "forcedDriver"; + static const char * const k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd"; + static const char * const k_pch_SteamVR_DisplayDebug_Bool = "displayDebug"; + static const char * const k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe"; + static const char * const k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion"; + static const char * const k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX"; + static const char * const k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY"; + static const char * const k_pch_SteamVR_SendSystemButtonToAllApps_Bool= "sendSystemButtonToAllApps"; + static const char * const k_pch_SteamVR_LogLevel_Int32 = "loglevel"; + static const char * const k_pch_SteamVR_IPD_Float = "ipd"; + static const char * const k_pch_SteamVR_Background_String = "background"; + static const char * const k_pch_SteamVR_BackgroundCameraHeight_Float = "backgroundCameraHeight"; + static const char * const k_pch_SteamVR_BackgroundDomeRadius_Float = "backgroundDomeRadius"; + static const char * const k_pch_SteamVR_Environment_String = "environment"; + static const char * const k_pch_SteamVR_GridColor_String = "gridColor"; + static const char * const k_pch_SteamVR_PlayAreaColor_String = "playAreaColor"; + static const char * const k_pch_SteamVR_ShowStage_Bool = "showStage"; + static const char * const k_pch_SteamVR_ActivateMultipleDrivers_Bool = "activateMultipleDrivers"; + static const char * const k_pch_SteamVR_PowerOffOnExit_Bool = "powerOffOnExit"; + static const char * const k_pch_SteamVR_StandbyAppRunningTimeout_Float = "standbyAppRunningTimeout"; + static const char * const k_pch_SteamVR_StandbyNoAppTimeout_Float = "standbyNoAppTimeout"; + static const char * const k_pch_SteamVR_DirectMode_Bool = "directMode"; + static const char * const k_pch_SteamVR_DirectModeEdidVid_Int32 = "directModeEdidVid"; + static const char * const k_pch_SteamVR_DirectModeEdidPid_Int32 = "directModeEdidPid"; + static const char * const k_pch_SteamVR_UsingSpeakers_Bool = "usingSpeakers"; + static const char * const k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees"; + static const char * const k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement"; + static const char * const k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses"; + static const char * const k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier"; + static const char * const k_pch_SteamVR_AllowReprojection_Bool = "allowReprojection"; + static const char * const k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection"; + static const char * const k_pch_SteamVR_ForceFadeOnBadTracking_Bool = "forceFadeOnBadTracking"; + static const char * const k_pch_SteamVR_DefaultMirrorView_Int32 = "defaultMirrorView"; + static const char * const k_pch_SteamVR_ShowMirrorView_Bool = "showMirrorView"; + + //----------------------------------------------------------------------------- + // lighthouse keys + + static const char * const k_pch_Lighthouse_Section = "driver_lighthouse"; + static const char * const k_pch_Lighthouse_DisableIMU_Bool = "disableimu"; + static const char * const k_pch_Lighthouse_UseDisambiguation_String = "usedisambiguation"; + static const char * const k_pch_Lighthouse_DisambiguationDebug_Int32 = "disambiguationdebug"; + + static const char * const k_pch_Lighthouse_PrimaryBasestation_Int32 = "primarybasestation"; + static const char * const k_pch_Lighthouse_LighthouseName_String = "lighthousename"; + static const char * const k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float = "maxincidenceangledegrees"; + static const char * const k_pch_Lighthouse_UseLighthouseDirect_Bool = "uselighthousedirect"; + static const char * const k_pch_Lighthouse_DBHistory_Bool = "dbhistory"; + + //----------------------------------------------------------------------------- + // null keys + + static const char * const k_pch_Null_Section = "driver_null"; + static const char * const k_pch_Null_EnableNullDriver_Bool = "enable"; + static const char * const k_pch_Null_SerialNumber_String = "serialNumber"; + static const char * const k_pch_Null_ModelNumber_String = "modelNumber"; + static const char * const k_pch_Null_WindowX_Int32 = "windowX"; + static const char * const k_pch_Null_WindowY_Int32 = "windowY"; + static const char * const k_pch_Null_WindowWidth_Int32 = "windowWidth"; + static const char * const k_pch_Null_WindowHeight_Int32 = "windowHeight"; + static const char * const k_pch_Null_RenderWidth_Int32 = "renderWidth"; + static const char * const k_pch_Null_RenderHeight_Int32 = "renderHeight"; + static const char * const k_pch_Null_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons"; + static const char * const k_pch_Null_DisplayFrequency_Float = "displayFrequency"; + + //----------------------------------------------------------------------------- + // user interface keys + static const char * const k_pch_UserInterface_Section = "userinterface"; + static const char * const k_pch_UserInterface_StatusAlwaysOnTop_Bool = "StatusAlwaysOnTop"; + static const char * const k_pch_UserInterface_EnableScreenshots_Bool = "EnableScreenshots"; + + //----------------------------------------------------------------------------- + // notification keys + static const char * const k_pch_Notifications_Section = "notifications"; + static const char * const k_pch_Notifications_DoNotDisturb_Bool = "DoNotDisturb"; + + //----------------------------------------------------------------------------- + // keyboard keys + static const char * const k_pch_Keyboard_Section = "keyboard"; + static const char * const k_pch_Keyboard_TutorialCompletions = "TutorialCompletions"; + static const char * const k_pch_Keyboard_ScaleX = "ScaleX"; + static const char * const k_pch_Keyboard_ScaleY = "ScaleY"; + static const char * const k_pch_Keyboard_OffsetLeftX = "OffsetLeftX"; + static const char * const k_pch_Keyboard_OffsetRightX = "OffsetRightX"; + static const char * const k_pch_Keyboard_OffsetY = "OffsetY"; + static const char * const k_pch_Keyboard_Smoothing = "Smoothing"; + + //----------------------------------------------------------------------------- + // perf keys + static const char * const k_pch_Perf_Section = "perfcheck"; + static const char * const k_pch_Perf_HeuristicActive_Bool = "heuristicActive"; + static const char * const k_pch_Perf_NotifyInHMD_Bool = "warnInHMD"; + static const char * const k_pch_Perf_NotifyOnlyOnce_Bool = "warnOnlyOnce"; + static const char * const k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore"; + static const char * const k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit"; + static const char * const k_pch_Perf_TestData_Float = "perfTestData"; + + //----------------------------------------------------------------------------- + // collision bounds keys + static const char * const k_pch_CollisionBounds_Section = "collisionBounds"; + static const char * const k_pch_CollisionBounds_Style_Int32 = "CollisionBoundsStyle"; + static const char * const k_pch_CollisionBounds_GroundPerimeterOn_Bool = "CollisionBoundsGroundPerimeterOn"; + static const char * const k_pch_CollisionBounds_CenterMarkerOn_Bool = "CollisionBoundsCenterMarkerOn"; + static const char * const k_pch_CollisionBounds_PlaySpaceOn_Bool = "CollisionBoundsPlaySpaceOn"; + static const char * const k_pch_CollisionBounds_FadeDistance_Float = "CollisionBoundsFadeDistance"; + static const char * const k_pch_CollisionBounds_ColorGammaR_Int32 = "CollisionBoundsColorGammaR"; + static const char * const k_pch_CollisionBounds_ColorGammaG_Int32 = "CollisionBoundsColorGammaG"; + static const char * const k_pch_CollisionBounds_ColorGammaB_Int32 = "CollisionBoundsColorGammaB"; + static const char * const k_pch_CollisionBounds_ColorGammaA_Int32 = "CollisionBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // camera keys + static const char * const k_pch_Camera_Section = "camera"; + static const char * const k_pch_Camera_EnableCamera_Bool = "enableCamera"; + static const char * const k_pch_Camera_EnableCameraInDashboard_Bool = "enableCameraInDashboard"; + static const char * const k_pch_Camera_EnableCameraForCollisionBounds_Bool = "enableCameraForCollisionBounds"; + static const char * const k_pch_Camera_EnableCameraForRoomView_Bool = "enableCameraForRoomView"; + static const char * const k_pch_Camera_BoundsColorGammaR_Int32 = "cameraBoundsColorGammaR"; + static const char * const k_pch_Camera_BoundsColorGammaG_Int32 = "cameraBoundsColorGammaG"; + static const char * const k_pch_Camera_BoundsColorGammaB_Int32 = "cameraBoundsColorGammaB"; + static const char * const k_pch_Camera_BoundsColorGammaA_Int32 = "cameraBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // audio keys + static const char * const k_pch_audio_Section = "audio"; + static const char * const k_pch_audio_OnPlaybackDevice_String = "onPlaybackDevice"; + static const char * const k_pch_audio_OnRecordDevice_String = "onRecordDevice"; + static const char * const k_pch_audio_OnPlaybackMirrorDevice_String = "onPlaybackMirrorDevice"; + static const char * const k_pch_audio_OffPlaybackDevice_String = "offPlaybackDevice"; + static const char * const k_pch_audio_OffRecordDevice_String = "offRecordDevice"; + static const char * const k_pch_audio_VIVEHDMIGain = "viveHDMIGain"; + + //----------------------------------------------------------------------------- + // model skin keys + static const char * const k_pch_modelskin_Section = "modelskins"; + +} // namespace vr + +// ivrchaperone.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +enum ChaperoneCalibrationState +{ + // OK! + ChaperoneCalibrationState_OK = 1, // Chaperone is fully calibrated and working correctly + + // Warnings + ChaperoneCalibrationState_Warning = 100, + ChaperoneCalibrationState_Warning_BaseStationMayHaveMoved = 101, // A base station thinks that it might have moved + ChaperoneCalibrationState_Warning_BaseStationRemoved = 102, // There are less base stations than when calibrated + ChaperoneCalibrationState_Warning_SeatedBoundsInvalid = 103, // Seated bounds haven't been calibrated for the current tracking center + + // Errors + ChaperoneCalibrationState_Error = 200, // The UniverseID is invalid + ChaperoneCalibrationState_Error_BaseStationUninitalized = 201, // Tracking center hasn't be calibrated for at least one of the base stations + ChaperoneCalibrationState_Error_BaseStationConflict = 202, // Tracking center is calibrated, but base stations disagree on the tracking space + ChaperoneCalibrationState_Error_PlayAreaInvalid = 203, // Play Area hasn't been calibrated for the current tracking center + ChaperoneCalibrationState_Error_CollisionBoundsInvalid = 204, // Collision Bounds haven't been calibrated for the current tracking center +}; + + +/** HIGH LEVEL TRACKING SPACE ASSUMPTIONS: +* 0,0,0 is the preferred standing area center. +* 0Y is the floor height. +* -Z is the preferred forward facing direction. */ +class IVRChaperone +{ +public: + + /** Get the current state of Chaperone calibration. This state can change at any time during a session due to physical base station changes. **/ + virtual ChaperoneCalibrationState GetCalibrationState() = 0; + + /** Returns the width and depth of the Play Area (formerly named Soft Bounds) in X and Z. + * Tracking space center (0,0,0) is the center of the Play Area. **/ + virtual bool GetPlayAreaSize( float *pSizeX, float *pSizeZ ) = 0; + + /** Returns the 4 corner positions of the Play Area (formerly named Soft Bounds). + * Corners are in counter-clockwise order. + * Standing center (0,0,0) is the center of the Play Area. + * It's a rectangle. + * 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. + * Height of every corner is 0Y (on the floor). **/ + virtual bool GetPlayAreaRect( HmdQuad_t *rect ) = 0; + + /** Reload Chaperone data from the .vrchap file on disk. */ + virtual void ReloadInfo( void ) = 0; + + /** Optionally give the chaperone system a hit about the color and brightness in the scene **/ + virtual void SetSceneColor( HmdColor_t color ) = 0; + + /** Get the current chaperone bounds draw color and brightness **/ + virtual void GetBoundsColor( HmdColor_t *pOutputColorArray, int nNumOutputColors, float flCollisionBoundsFadeDistance, HmdColor_t *pOutputCameraColor ) = 0; + + /** Determine whether the bounds are showing right now **/ + virtual bool AreBoundsVisible() = 0; + + /** Force the bounds to show, mostly for utilities **/ + virtual void ForceBoundsVisible( bool bForce ) = 0; +}; + +static const char * const IVRChaperone_Version = "IVRChaperone_003"; + +#pragma pack( pop ) + +} + +// ivrchaperonesetup.h +namespace vr +{ + +enum EChaperoneConfigFile +{ + EChaperoneConfigFile_Live = 1, // The live chaperone config, used by most applications and games + EChaperoneConfigFile_Temp = 2, // The temporary chaperone config, used to live-preview collision bounds in room setup +}; + +enum EChaperoneImportFlags +{ + EChaperoneImport_BoundsOnly = 0x0001, +}; + +/** Manages the working copy of the chaperone info. By default this will be the same as the +* live copy. Any changes made with this interface will stay in the working copy until +* CommitWorkingCopy() is called, at which point the working copy and the live copy will be +* the same again. */ +class IVRChaperoneSetup +{ +public: + + /** Saves the current working copy to disk */ + virtual bool CommitWorkingCopy( EChaperoneConfigFile configFile ) = 0; + + /** Reverts the working copy to match the live chaperone calibration. + * To modify existing data this MUST be do WHILE getting a non-error ChaperoneCalibrationStatus. + * Only after this should you do gets and sets on the existing data. */ + virtual void RevertWorkingCopy() = 0; + + /** Returns the width and depth of the Play Area (formerly named Soft Bounds) in X and Z from the working copy. + * Tracking space center (0,0,0) is the center of the Play Area. */ + virtual bool GetWorkingPlayAreaSize( float *pSizeX, float *pSizeZ ) = 0; + + /** Returns the 4 corner positions of the Play Area (formerly named Soft Bounds) from the working copy. + * Corners are in clockwise order. + * Tracking space center (0,0,0) is the center of the Play Area. + * It's a rectangle. + * 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. + * Height of every corner is 0Y (on the floor). **/ + virtual bool GetWorkingPlayAreaRect( HmdQuad_t *rect ) = 0; + + /** Returns the number of Quads if the buffer points to null. Otherwise it returns Quads + * into the buffer up to the max specified from the working copy. */ + virtual bool GetWorkingCollisionBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + /** Returns the number of Quads if the buffer points to null. Otherwise it returns Quads + * into the buffer up to the max specified. */ + virtual bool GetLiveCollisionBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + /** Returns the preferred seated position from the working copy. */ + virtual bool GetWorkingSeatedZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatSeatedZeroPoseToRawTrackingPose ) = 0; + + /** Returns the standing origin from the working copy. */ + virtual bool GetWorkingStandingZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatStandingZeroPoseToRawTrackingPose ) = 0; + + /** Sets the Play Area in the working copy. */ + virtual void SetWorkingPlayAreaSize( float sizeX, float sizeZ ) = 0; + + /** Sets the Collision Bounds in the working copy. */ + virtual void SetWorkingCollisionBoundsInfo( VR_ARRAY_COUNT(unQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t unQuadsCount ) = 0; + + /** Sets the preferred seated position in the working copy. */ + virtual void SetWorkingSeatedZeroPoseToRawTrackingPose( const HmdMatrix34_t *pMatSeatedZeroPoseToRawTrackingPose ) = 0; + + /** Sets the preferred standing position in the working copy. */ + virtual void SetWorkingStandingZeroPoseToRawTrackingPose( const HmdMatrix34_t *pMatStandingZeroPoseToRawTrackingPose ) = 0; + + /** Tear everything down and reload it from the file on disk */ + virtual void ReloadFromDisk( EChaperoneConfigFile configFile ) = 0; + + /** Returns the preferred seated position. */ + virtual bool GetLiveSeatedZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatSeatedZeroPoseToRawTrackingPose ) = 0; + + virtual void SetWorkingCollisionBoundsTagsInfo( VR_ARRAY_COUNT(unTagCount) uint8_t *pTagsBuffer, uint32_t unTagCount ) = 0; + virtual bool GetLiveCollisionBoundsTagsInfo( VR_OUT_ARRAY_COUNT(punTagCount) uint8_t *pTagsBuffer, uint32_t *punTagCount ) = 0; + + virtual bool SetWorkingPhysicalBoundsInfo( VR_ARRAY_COUNT(unQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t unQuadsCount ) = 0; + virtual bool GetLivePhysicalBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + virtual bool ExportLiveToBuffer( VR_OUT_STRING() char *pBuffer, uint32_t *pnBufferLength ) = 0; + virtual bool ImportFromBufferToWorking( const char *pBuffer, uint32_t nImportFlags ) = 0; +}; + +static const char * const IVRChaperoneSetup_Version = "IVRChaperoneSetup_005"; + + +} + +// ivrcompositor.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +/** Errors that can occur with the VR compositor */ +enum EVRCompositorError +{ + VRCompositorError_None = 0, + VRCompositorError_RequestFailed = 1, + VRCompositorError_IncompatibleVersion = 100, + VRCompositorError_DoNotHaveFocus = 101, + VRCompositorError_InvalidTexture = 102, + VRCompositorError_IsNotSceneApplication = 103, + VRCompositorError_TextureIsOnWrongDevice = 104, + VRCompositorError_TextureUsesUnsupportedFormat = 105, + VRCompositorError_SharedTexturesNotSupported = 106, + VRCompositorError_IndexOutOfRange = 107, +}; + +const uint32_t VRCompositor_ReprojectionReason_Cpu = 0x01; +const uint32_t VRCompositor_ReprojectionReason_Gpu = 0x02; + +/** Provides a single frame's timing information to the app */ +struct Compositor_FrameTiming +{ + uint32_t m_nSize; // Set to sizeof( Compositor_FrameTiming ) + uint32_t m_nFrameIndex; + uint32_t m_nNumFramePresents; // number of times this frame was presented + uint32_t m_nNumDroppedFrames; // number of additional times previous frame was scanned out + + /** Absolute time reference for comparing frames. This aligns with the vsync that running start is relative to. */ + double m_flSystemTimeInSeconds; + + /** These times may include work from other processes due to OS scheduling. + * The fewer packets of work these are broken up into, the less likely this will happen. + * GPU work can be broken up by calling Flush. This can sometimes be useful to get the GPU started + * processing that work earlier in the frame. */ + float m_flSceneRenderGpuMs; // time spent rendering the scene + float m_flTotalRenderGpuMs; // time between work submitted immediately after present (ideally vsync) until the end of compositor submitted work + float m_flCompositorRenderGpuMs; // time spend performing distortion correction, rendering chaperone, overlays, etc. + float m_flCompositorRenderCpuMs; // time spent on cpu submitting the above work for this frame + float m_flCompositorIdleCpuMs; // time spent waiting for running start (application could have used this much more time) + + /** Miscellaneous measured intervals. */ + float m_flClientFrameIntervalMs; // time between calls to WaitGetPoses + float m_flPresentCallCpuMs; // time blocked on call to present (usually 0.0, but can go long) + float m_flWaitForPresentCpuMs; // time spent spin-waiting for frame index to change (not near-zero indicates wait object failure) + float m_flSubmitFrameMs; // time spent in IVRCompositor::Submit (not near-zero indicates driver issue) + + /** The following are all relative to this frame's SystemTimeInSeconds */ + float m_flWaitGetPosesCalledMs; + float m_flNewPosesReadyMs; + float m_flNewFrameReadyMs; // second call to IVRCompositor::Submit + float m_flCompositorUpdateStartMs; + float m_flCompositorUpdateEndMs; + float m_flCompositorRenderStartMs; + + vr::TrackedDevicePose_t m_HmdPose; // pose used by app to render this frame + int32_t m_nFidelityLevel; // app reported value + + uint32_t m_nReprojectionFlags; +}; + +/** Cumulative stats for current application. These are not cleared until a new app connects, +* but they do stop accumulating once the associated app disconnects. */ +struct Compositor_CumulativeStats +{ + uint32_t m_nPid; // Process id associated with these stats (may no longer be running). + uint32_t m_nNumFramePresents; // total number of times we called present (includes reprojected frames) + uint32_t m_nNumDroppedFrames; // total number of times an old frame was re-scanned out (without reprojection) + uint32_t m_nNumReprojectedFrames; // total number of times a frame was scanned out a second time with reprojection + + /** Values recorded at startup before application has fully faded in the first time. */ + uint32_t m_nNumFramePresentsOnStartup; + uint32_t m_nNumDroppedFramesOnStartup; + uint32_t m_nNumReprojectedFramesOnStartup; + + /** Applications may explicitly fade to the compositor. This is usually to handle level transitions, and loading often causes + * system wide hitches. The following stats are collected during this period. Does not include values recorded during startup. */ + uint32_t m_nNumLoading; + uint32_t m_nNumFramePresentsLoading; + uint32_t m_nNumDroppedFramesLoading; + uint32_t m_nNumReprojectedFramesLoading; + + /** If we don't get a new frame from the app in less than 2.5 frames, then we assume the app has hung and start + * fading back to the compositor. The following stats are a result of this, and are a subset of those recorded above. + * Does not include values recorded during start up or loading. */ + uint32_t m_nNumTimedOut; + uint32_t m_nNumFramePresentsTimedOut; + uint32_t m_nNumDroppedFramesTimedOut; + uint32_t m_nNumReprojectedFramesTimedOut; +}; + +#pragma pack( pop ) + +/** Allows the application to interact with the compositor */ +class IVRCompositor +{ +public: + /** Sets tracking space returned by WaitGetPoses */ + virtual void SetTrackingSpace( ETrackingUniverseOrigin eOrigin ) = 0; + + /** Gets current tracking space returned by WaitGetPoses */ + virtual ETrackingUniverseOrigin GetTrackingSpace() = 0; + + /** Returns pose(s) to use to render scene (and optionally poses predicted two frames out for gameplay). */ + virtual EVRCompositorError WaitGetPoses( VR_ARRAY_COUNT(unRenderPoseArrayCount) TrackedDevicePose_t* pRenderPoseArray, uint32_t unRenderPoseArrayCount, + VR_ARRAY_COUNT(unGamePoseArrayCount) TrackedDevicePose_t* pGamePoseArray, uint32_t unGamePoseArrayCount ) = 0; + + /** Get the last set of poses returned by WaitGetPoses. */ + virtual EVRCompositorError GetLastPoses( VR_ARRAY_COUNT( unRenderPoseArrayCount ) TrackedDevicePose_t* pRenderPoseArray, uint32_t unRenderPoseArrayCount, + VR_ARRAY_COUNT( unGamePoseArrayCount ) TrackedDevicePose_t* pGamePoseArray, uint32_t unGamePoseArrayCount ) = 0; + + /** Interface for accessing last set of poses returned by WaitGetPoses one at a time. + * Returns VRCompositorError_IndexOutOfRange if unDeviceIndex not less than k_unMaxTrackedDeviceCount otherwise VRCompositorError_None. + * It is okay to pass NULL for either pose if you only want one of the values. */ + virtual EVRCompositorError GetLastPoseForTrackedDeviceIndex( TrackedDeviceIndex_t unDeviceIndex, TrackedDevicePose_t *pOutputPose, TrackedDevicePose_t *pOutputGamePose ) = 0; + + /** Updated scene texture to display. If pBounds is NULL the entire texture will be used. If called from an OpenGL app, consider adding a glFlush after + * Submitting both frames to signal the driver to start processing, otherwise it may wait until the command buffer fills up, causing the app to miss frames. + * + * OpenGL dirty state: + * glBindTexture + */ + virtual EVRCompositorError Submit( EVREye eEye, const Texture_t *pTexture, const VRTextureBounds_t* pBounds = 0, EVRSubmitFlags nSubmitFlags = Submit_Default ) = 0; + + /** Clears the frame that was sent with the last call to Submit. This will cause the + * compositor to show the grid until Submit is called again. */ + virtual void ClearLastSubmittedFrame() = 0; + + /** Call immediately after presenting your app's window (i.e. companion window) to unblock the compositor. + * This is an optional call, which only needs to be used if you can't instead call WaitGetPoses immediately after Present. + * For example, if your engine's render and game loop are not on separate threads, or blocking the render thread until 3ms before the next vsync would + * introduce a deadlock of some sort. This function tells the compositor that you have finished all rendering after having Submitted buffers for both + * eyes, and it is free to start its rendering work. This should only be called from the same thread you are rendering on. */ + virtual void PostPresentHandoff() = 0; + + /** Returns true if timing data is filled it. Sets oldest timing info if nFramesAgo is larger than the stored history. + * Be sure to set timing.size = sizeof(Compositor_FrameTiming) on struct passed in before calling this function. */ + virtual bool GetFrameTiming( Compositor_FrameTiming *pTiming, uint32_t unFramesAgo = 0 ) = 0; + + /** Returns the time in seconds left in the current (as identified by FrameTiming's frameIndex) frame. + * Due to "running start", this value may roll over to the next frame before ever reaching 0.0. */ + virtual float GetFrameTimeRemaining() = 0; + + /** Fills out stats accumulated for the last connected application. Pass in sizeof( Compositor_CumulativeStats ) as second parameter. */ + virtual void GetCumulativeStats( Compositor_CumulativeStats *pStats, uint32_t nStatsSizeInBytes ) = 0; + + /** Fades the view on the HMD to the specified color. The fade will take fSeconds, and the color values are between + * 0.0 and 1.0. This color is faded on top of the scene based on the alpha parameter. Removing the fade color instantly + * would be FadeToColor( 0.0, 0.0, 0.0, 0.0, 0.0 ). Values are in un-premultiplied alpha space. */ + virtual void FadeToColor( float fSeconds, float fRed, float fGreen, float fBlue, float fAlpha, bool bBackground = false ) = 0; + + /** Fading the Grid in or out in fSeconds */ + virtual void FadeGrid( float fSeconds, bool bFadeIn ) = 0; + + /** Override the skybox used in the compositor (e.g. for during level loads when the app can't feed scene images fast enough) + * Order is Front, Back, Left, Right, Top, Bottom. If only a single texture is passed, it is assumed in lat-long format. + * If two are passed, it is assumed a lat-long stereo pair. */ + virtual EVRCompositorError SetSkyboxOverride( VR_ARRAY_COUNT( unTextureCount ) const Texture_t *pTextures, uint32_t unTextureCount ) = 0; + + /** Resets compositor skybox back to defaults. */ + virtual void ClearSkyboxOverride() = 0; + + /** Brings the compositor window to the front. This is useful for covering any other window that may be on the HMD + * and is obscuring the compositor window. */ + virtual void CompositorBringToFront() = 0; + + /** Pushes the compositor window to the back. This is useful for allowing other applications to draw directly to the HMD. */ + virtual void CompositorGoToBack() = 0; + + /** Tells the compositor process to clean up and exit. You do not need to call this function at shutdown. Under normal + * circumstances the compositor will manage its own life cycle based on what applications are running. */ + virtual void CompositorQuit() = 0; + + /** Return whether the compositor is fullscreen */ + virtual bool IsFullscreen() = 0; + + /** Returns the process ID of the process that is currently rendering the scene */ + virtual uint32_t GetCurrentSceneFocusProcess() = 0; + + /** Returns the process ID of the process that rendered the last frame (or 0 if the compositor itself rendered the frame.) + * Returns 0 when fading out from an app and the app's process Id when fading into an app. */ + virtual uint32_t GetLastFrameRenderer() = 0; + + /** Returns true if the current process has the scene focus */ + virtual bool CanRenderScene() = 0; + + /** Creates a window on the primary monitor to display what is being shown in the headset. */ + virtual void ShowMirrorWindow() = 0; + + /** Closes the mirror window. */ + virtual void HideMirrorWindow() = 0; + + /** Returns true if the mirror window is shown. */ + virtual bool IsMirrorWindowVisible() = 0; + + /** Writes all images that the compositor knows about (including overlays) to a 'screenshots' folder in the SteamVR runtime root. */ + virtual void CompositorDumpImages() = 0; + + /** Let an app know it should be rendering with low resources. */ + virtual bool ShouldAppRenderWithLowResources() = 0; + + /** Override interleaved reprojection logic to force on. */ + virtual void ForceInterleavedReprojectionOn( bool bOverride ) = 0; + + /** Force reconnecting to the compositor process. */ + virtual void ForceReconnectProcess() = 0; + + /** Temporarily suspends rendering (useful for finer control over scene transitions). */ + virtual void SuspendRendering( bool bSuspend ) = 0; + + /** Screenshot support */ + + /** These functions are no longer used and will be removed in + * a future update. Use the functions via the + * IVRScreenshots interface */ + virtual vr::EVRCompositorError RequestScreenshot( vr::EVRScreenshotType type, const char *pchDestinationFileName, const char *pchVRDestinationFileName ) = 0; + virtual vr::EVRScreenshotType GetCurrentScreenshotType() = 0; + + /** Opens a shared D3D11 texture with the undistorted composited image for each eye. */ + virtual vr::EVRCompositorError GetMirrorTextureD3D11( vr::EVREye eEye, void *pD3D11DeviceOrResource, void **ppD3D11ShaderResourceView ) = 0; + + /** Access to mirror textures from OpenGL. */ + virtual vr::EVRCompositorError GetMirrorTextureGL( vr::EVREye eEye, vr::glUInt_t *pglTextureId, vr::glSharedTextureHandle_t *pglSharedTextureHandle ) = 0; + virtual bool ReleaseSharedGLTexture( vr::glUInt_t glTextureId, vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; + virtual void LockGLSharedTextureForAccess( vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; + virtual void UnlockGLSharedTextureForAccess( vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; +}; + +static const char * const IVRCompositor_Version = "IVRCompositor_015"; + +} // namespace vr + + + +// ivrnotifications.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +// Used for passing graphic data +struct NotificationBitmap_t +{ + NotificationBitmap_t() + : m_pImageData( nullptr ) + , m_nWidth( 0 ) + , m_nHeight( 0 ) + , m_nBytesPerPixel( 0 ) + { + }; + + void *m_pImageData; + int32_t m_nWidth; + int32_t m_nHeight; + int32_t m_nBytesPerPixel; +}; + + +/** Be aware that the notification type is used as 'priority' to pick the next notification */ +enum EVRNotificationType +{ + /** Transient notifications are automatically hidden after a period of time set by the user. + * They are used for things like information and chat messages that do not require user interaction. */ + EVRNotificationType_Transient = 0, + + /** Persistent notifications are shown to the user until they are hidden by calling RemoveNotification(). + * They are used for things like phone calls and alarms that require user interaction. */ + EVRNotificationType_Persistent = 1, + + /** System notifications are shown no matter what. It is expected, that the ulUserValue is used as ID. + * If there is already a system notification in the queue with that ID it is not accepted into the queue + * to prevent spamming with system notification */ + EVRNotificationType_Transient_SystemWithUserValue = 2, +}; + +enum EVRNotificationStyle +{ + /** Creates a notification with minimal external styling. */ + EVRNotificationStyle_None = 0, + + /** Used for notifications about overlay-level status. In Steam this is used for events like downloads completing. */ + EVRNotificationStyle_Application = 100, + + /** Used for notifications about contacts that are unknown or not available. In Steam this is used for friend invitations and offline friends. */ + EVRNotificationStyle_Contact_Disabled = 200, + + /** Used for notifications about contacts that are available but inactive. In Steam this is used for friends that are online but not playing a game. */ + EVRNotificationStyle_Contact_Enabled = 201, + + /** Used for notifications about contacts that are available and active. In Steam this is used for friends that are online and currently running a game. */ + EVRNotificationStyle_Contact_Active = 202, +}; + +static const uint32_t k_unNotificationTextMaxSize = 256; + +typedef uint32_t VRNotificationId; + + + +#pragma pack( pop ) + +/** Allows notification sources to interact with the VR system + This current interface is not yet implemented. Do not use yet. */ +class IVRNotifications +{ +public: + /** Create a notification and enqueue it to be shown to the user. + * An overlay handle is required to create a notification, as otherwise it would be impossible for a user to act on it. + * To create a two-line notification, use a line break ('\n') to split the text into two lines. + * The pImage argument may be NULL, in which case the specified overlay's icon will be used instead. */ + virtual EVRNotificationError CreateNotification( VROverlayHandle_t ulOverlayHandle, uint64_t ulUserValue, EVRNotificationType type, const char *pchText, EVRNotificationStyle style, const NotificationBitmap_t *pImage, /* out */ VRNotificationId *pNotificationId ) = 0; + + /** Destroy a notification, hiding it first if it currently shown to the user. */ + virtual EVRNotificationError RemoveNotification( VRNotificationId notificationId ) = 0; + +}; + +static const char * const IVRNotifications_Version = "IVRNotifications_002"; + +} // namespace vr + + + +// ivroverlay.h +namespace vr +{ + + /** The maximum length of an overlay key in bytes, counting the terminating null character. */ + static const uint32_t k_unVROverlayMaxKeyLength = 128; + + /** The maximum length of an overlay name in bytes, counting the terminating null character. */ + static const uint32_t k_unVROverlayMaxNameLength = 128; + + /** The maximum number of overlays that can exist in the system at one time. */ + static const uint32_t k_unMaxOverlayCount = 32; + + /** Types of input supported by VR Overlays */ + enum VROverlayInputMethod + { + VROverlayInputMethod_None = 0, // No input events will be generated automatically for this overlay + VROverlayInputMethod_Mouse = 1, // Tracked controllers will get mouse events automatically + }; + + /** Allows the caller to figure out which overlay transform getter to call. */ + enum VROverlayTransformType + { + VROverlayTransform_Absolute = 0, + VROverlayTransform_TrackedDeviceRelative = 1, + VROverlayTransform_SystemOverlay = 2, + VROverlayTransform_TrackedComponent = 3, + }; + + /** Overlay control settings */ + enum VROverlayFlags + { + VROverlayFlags_None = 0, + + // The following only take effect when rendered using the high quality render path (see SetHighQualityOverlay). + VROverlayFlags_Curved = 1, + VROverlayFlags_RGSS4X = 2, + + // Set this flag on a dashboard overlay to prevent a tab from showing up for that overlay + VROverlayFlags_NoDashboardTab = 3, + + // Set this flag on a dashboard that is able to deal with gamepad focus events + VROverlayFlags_AcceptsGamepadEvents = 4, + + // Indicates that the overlay should dim/brighten to show gamepad focus + VROverlayFlags_ShowGamepadFocus = 5, + + // When in VROverlayInputMethod_Mouse you can optionally enable sending VRScroll_t + VROverlayFlags_SendVRScrollEvents = 6, + VROverlayFlags_SendVRTouchpadEvents = 7, + + // If set this will render a vertical scroll wheel on the primary controller, + // only needed if not using VROverlayFlags_SendVRScrollEvents but you still want to represent a scroll wheel + VROverlayFlags_ShowTouchPadScrollWheel = 8, + + // If this is set ownership and render access to the overlay are transferred + // to the new scene process on a call to IVRApplications::LaunchInternalProcess + VROverlayFlags_TransferOwnershipToInternalProcess = 9, + + // If set, renders 50% of the texture in each eye, side by side + VROverlayFlags_SideBySide_Parallel = 10, // Texture is left/right + VROverlayFlags_SideBySide_Crossed = 11, // Texture is crossed and right/left + + VROverlayFlags_Panorama = 12, // Texture is a panorama + VROverlayFlags_StereoPanorama = 13, // Texture is a stereo panorama + }; + + struct VROverlayIntersectionParams_t + { + HmdVector3_t vSource; + HmdVector3_t vDirection; + ETrackingUniverseOrigin eOrigin; + }; + + struct VROverlayIntersectionResults_t + { + HmdVector3_t vPoint; + HmdVector3_t vNormal; + HmdVector2_t vUVs; + float fDistance; + }; + + // Input modes for the Big Picture gamepad text entry + enum EGamepadTextInputMode + { + k_EGamepadTextInputModeNormal = 0, + k_EGamepadTextInputModePassword = 1, + k_EGamepadTextInputModeSubmit = 2, + }; + + // Controls number of allowed lines for the Big Picture gamepad text entry + enum EGamepadTextInputLineMode + { + k_EGamepadTextInputLineModeSingleLine = 0, + k_EGamepadTextInputLineModeMultipleLines = 1 + }; + + /** Directions for changing focus between overlays with the gamepad */ + enum EOverlayDirection + { + OverlayDirection_Up = 0, + OverlayDirection_Down = 1, + OverlayDirection_Left = 2, + OverlayDirection_Right = 3, + + OverlayDirection_Count = 4, + }; + + class IVROverlay + { + public: + + // --------------------------------------------- + // Overlay management methods + // --------------------------------------------- + + /** Finds an existing overlay with the specified key. */ + virtual EVROverlayError FindOverlay( const char *pchOverlayKey, VROverlayHandle_t * pOverlayHandle ) = 0; + + /** Creates a new named overlay. All overlays start hidden and with default settings. */ + virtual EVROverlayError CreateOverlay( const char *pchOverlayKey, const char *pchOverlayFriendlyName, VROverlayHandle_t * pOverlayHandle ) = 0; + + /** Destroys the specified overlay. When an application calls VR_Shutdown all overlays created by that app are + * automatically destroyed. */ + virtual EVROverlayError DestroyOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Specify which overlay to use the high quality render path. This overlay will be composited in during the distortion pass which + * results in it drawing on top of everything else, but also at a higher quality as it samples the source texture directly rather than + * rasterizing into each eye's render texture first. Because if this, only one of these is supported at any given time. It is most useful + * for overlays that are expected to take up most of the user's view (e.g. streaming video). + * This mode does not support mouse input to your overlay. */ + virtual EVROverlayError SetHighQualityOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns the overlay handle of the current overlay being rendered using the single high quality overlay render path. + * Otherwise it will return k_ulOverlayHandleInvalid. */ + virtual vr::VROverlayHandle_t GetHighQualityOverlay() = 0; + + /** Fills the provided buffer with the string key of the overlay. Returns the size of buffer required to store the key, including + * the terminating null character. k_unVROverlayMaxKeyLength will be enough bytes to fit the string. */ + virtual uint32_t GetOverlayKey( VROverlayHandle_t ulOverlayHandle, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, EVROverlayError *pError = 0L ) = 0; + + /** Fills the provided buffer with the friendly name of the overlay. Returns the size of buffer required to store the key, including + * the terminating null character. k_unVROverlayMaxNameLength will be enough bytes to fit the string. */ + virtual uint32_t GetOverlayName( VROverlayHandle_t ulOverlayHandle, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, EVROverlayError *pError = 0L ) = 0; + + /** Gets the raw image data from an overlay. Overlay image data is always returned as RGBA data, 4 bytes per pixel. If the buffer is not large enough, width and height + * will be set and VROverlayError_ArrayTooSmall is returned. */ + virtual EVROverlayError GetOverlayImageData( VROverlayHandle_t ulOverlayHandle, void *pvBuffer, uint32_t unBufferSize, uint32_t *punWidth, uint32_t *punHeight ) = 0; + + /** returns a string that corresponds with the specified overlay error. The string will be the name + * of the error enum value for all valid error codes */ + virtual const char *GetOverlayErrorNameFromEnum( EVROverlayError error ) = 0; + + + // --------------------------------------------- + // Overlay rendering methods + // --------------------------------------------- + + /** Sets the pid that is allowed to render to this overlay (the creator pid is always allow to render), + * by default this is the pid of the process that made the overlay */ + virtual EVROverlayError SetOverlayRenderingPid( VROverlayHandle_t ulOverlayHandle, uint32_t unPID ) = 0; + + /** Gets the pid that is allowed to render to this overlay */ + virtual uint32_t GetOverlayRenderingPid( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Specify flag setting for a given overlay */ + virtual EVROverlayError SetOverlayFlag( VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool bEnabled ) = 0; + + /** Sets flag setting for a given overlay */ + virtual EVROverlayError GetOverlayFlag( VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool *pbEnabled ) = 0; + + /** Sets the color tint of the overlay quad. Use 0.0 to 1.0 per channel. */ + virtual EVROverlayError SetOverlayColor( VROverlayHandle_t ulOverlayHandle, float fRed, float fGreen, float fBlue ) = 0; + + /** Gets the color tint of the overlay quad. */ + virtual EVROverlayError GetOverlayColor( VROverlayHandle_t ulOverlayHandle, float *pfRed, float *pfGreen, float *pfBlue ) = 0; + + /** Sets the alpha of the overlay quad. Use 1.0 for 100 percent opacity to 0.0 for 0 percent opacity. */ + virtual EVROverlayError SetOverlayAlpha( VROverlayHandle_t ulOverlayHandle, float fAlpha ) = 0; + + /** Gets the alpha of the overlay quad. By default overlays are rendering at 100 percent alpha (1.0). */ + virtual EVROverlayError GetOverlayAlpha( VROverlayHandle_t ulOverlayHandle, float *pfAlpha ) = 0; + + /** Sets the width of the overlay quad in meters. By default overlays are rendered on a quad that is 1 meter across */ + virtual EVROverlayError SetOverlayWidthInMeters( VROverlayHandle_t ulOverlayHandle, float fWidthInMeters ) = 0; + + /** Returns the width of the overlay quad in meters. By default overlays are rendered on a quad that is 1 meter across */ + virtual EVROverlayError GetOverlayWidthInMeters( VROverlayHandle_t ulOverlayHandle, float *pfWidthInMeters ) = 0; + + /** For high-quality curved overlays only, sets the distance range in meters from the overlay used to automatically curve + * the surface around the viewer. Min is distance is when the surface will be most curved. Max is when least curved. */ + virtual EVROverlayError SetOverlayAutoCurveDistanceRangeInMeters( VROverlayHandle_t ulOverlayHandle, float fMinDistanceInMeters, float fMaxDistanceInMeters ) = 0; + + /** For high-quality curved overlays only, gets the distance range in meters from the overlay used to automatically curve + * the surface around the viewer. Min is distance is when the surface will be most curved. Max is when least curved. */ + virtual EVROverlayError GetOverlayAutoCurveDistanceRangeInMeters( VROverlayHandle_t ulOverlayHandle, float *pfMinDistanceInMeters, float *pfMaxDistanceInMeters ) = 0; + + /** Sets the colorspace the overlay texture's data is in. Defaults to 'auto'. + * If the texture needs to be resolved, you should call SetOverlayTexture with the appropriate colorspace instead. */ + virtual EVROverlayError SetOverlayTextureColorSpace( VROverlayHandle_t ulOverlayHandle, EColorSpace eTextureColorSpace ) = 0; + + /** Gets the overlay's current colorspace setting. */ + virtual EVROverlayError GetOverlayTextureColorSpace( VROverlayHandle_t ulOverlayHandle, EColorSpace *peTextureColorSpace ) = 0; + + /** Sets the part of the texture to use for the overlay. UV Min is the upper left corner and UV Max is the lower right corner. */ + virtual EVROverlayError SetOverlayTextureBounds( VROverlayHandle_t ulOverlayHandle, const VRTextureBounds_t *pOverlayTextureBounds ) = 0; + + /** Gets the part of the texture to use for the overlay. UV Min is the upper left corner and UV Max is the lower right corner. */ + virtual EVROverlayError GetOverlayTextureBounds( VROverlayHandle_t ulOverlayHandle, VRTextureBounds_t *pOverlayTextureBounds ) = 0; + + /** Returns the transform type of this overlay. */ + virtual EVROverlayError GetOverlayTransformType( VROverlayHandle_t ulOverlayHandle, VROverlayTransformType *peTransformType ) = 0; + + /** Sets the transform to absolute tracking origin. */ + virtual EVROverlayError SetOverlayTransformAbsolute( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, const HmdMatrix34_t *pmatTrackingOriginToOverlayTransform ) = 0; + + /** Gets the transform if it is absolute. Returns an error if the transform is some other type. */ + virtual EVROverlayError GetOverlayTransformAbsolute( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin *peTrackingOrigin, HmdMatrix34_t *pmatTrackingOriginToOverlayTransform ) = 0; + + /** Sets the transform to relative to the transform of the specified tracked device. */ + virtual EVROverlayError SetOverlayTransformTrackedDeviceRelative( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unTrackedDevice, const HmdMatrix34_t *pmatTrackedDeviceToOverlayTransform ) = 0; + + /** Gets the transform if it is relative to a tracked device. Returns an error if the transform is some other type. */ + virtual EVROverlayError GetOverlayTransformTrackedDeviceRelative( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t *punTrackedDevice, HmdMatrix34_t *pmatTrackedDeviceToOverlayTransform ) = 0; + + /** Sets the transform to draw the overlay on a rendermodel component mesh instead of a quad. This will only draw when the system is + * drawing the device. Overlays with this transform type cannot receive mouse events. */ + virtual EVROverlayError SetOverlayTransformTrackedDeviceComponent( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unDeviceIndex, const char *pchComponentName ) = 0; + + /** Gets the transform information when the overlay is rendering on a component. */ + virtual EVROverlayError GetOverlayTransformTrackedDeviceComponent( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t *punDeviceIndex, char *pchComponentName, uint32_t unComponentNameSize ) = 0; + + /** Shows the VR overlay. For dashboard overlays, only the Dashboard Manager is allowed to call this. */ + virtual EVROverlayError ShowOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Hides the VR overlay. For dashboard overlays, only the Dashboard Manager is allowed to call this. */ + virtual EVROverlayError HideOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns true if the overlay is visible. */ + virtual bool IsOverlayVisible( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Get the transform in 3d space associated with a specific 2d point in the overlay's coordinate space (where 0,0 is the lower left). -Z points out of the overlay */ + virtual EVROverlayError GetTransformForOverlayCoordinates( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, HmdVector2_t coordinatesInOverlay, HmdMatrix34_t *pmatTransform ) = 0; + + // --------------------------------------------- + // Overlay input methods + // --------------------------------------------- + + /** Returns true and fills the event with the next event on the overlay's event queue, if there is one. + * If there are no events this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextOverlayEvent( VROverlayHandle_t ulOverlayHandle, VREvent_t *pEvent, uint32_t uncbVREvent ) = 0; + + /** Returns the current input settings for the specified overlay. */ + virtual EVROverlayError GetOverlayInputMethod( VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod *peInputMethod ) = 0; + + /** Sets the input settings for the specified overlay. */ + virtual EVROverlayError SetOverlayInputMethod( VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod eInputMethod ) = 0; + + /** Gets the mouse scaling factor that is used for mouse events. The actual texture may be a different size, but this is + * typically the size of the underlying UI in pixels. */ + virtual EVROverlayError GetOverlayMouseScale( VROverlayHandle_t ulOverlayHandle, HmdVector2_t *pvecMouseScale ) = 0; + + /** Sets the mouse scaling factor that is used for mouse events. The actual texture may be a different size, but this is + * typically the size of the underlying UI in pixels (not in world space). */ + virtual EVROverlayError SetOverlayMouseScale( VROverlayHandle_t ulOverlayHandle, const HmdVector2_t *pvecMouseScale ) = 0; + + /** Computes the overlay-space pixel coordinates of where the ray intersects the overlay with the + * specified settings. Returns false if there is no intersection. */ + virtual bool ComputeOverlayIntersection( VROverlayHandle_t ulOverlayHandle, const VROverlayIntersectionParams_t *pParams, VROverlayIntersectionResults_t *pResults ) = 0; + + /** Processes mouse input from the specified controller as though it were a mouse pointed at a compositor overlay with the + * specified settings. The controller is treated like a laser pointer on the -z axis. The point where the laser pointer would + * intersect with the overlay is the mouse position, the trigger is left mouse, and the track pad is right mouse. + * + * Return true if the controller is pointed at the overlay and an event was generated. */ + virtual bool HandleControllerOverlayInteractionAsMouse( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unControllerDeviceIndex ) = 0; + + /** Returns true if the specified overlay is the hover target. An overlay is the hover target when it is the last overlay "moused over" + * by the virtual mouse pointer */ + virtual bool IsHoverTargetOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns the current Gamepad focus overlay */ + virtual vr::VROverlayHandle_t GetGamepadFocusOverlay() = 0; + + /** Sets the current Gamepad focus overlay */ + virtual EVROverlayError SetGamepadFocusOverlay( VROverlayHandle_t ulNewFocusOverlay ) = 0; + + /** Sets an overlay's neighbor. This will also set the neighbor of the "to" overlay + * to point back to the "from" overlay. If an overlay's neighbor is set to invalid both + * ends will be cleared */ + virtual EVROverlayError SetOverlayNeighbor( EOverlayDirection eDirection, VROverlayHandle_t ulFrom, VROverlayHandle_t ulTo ) = 0; + + /** Changes the Gamepad focus from one overlay to one of its neighbors. Returns VROverlayError_NoNeighbor if there is no + * neighbor in that direction */ + virtual EVROverlayError MoveGamepadFocusToNeighbor( EOverlayDirection eDirection, VROverlayHandle_t ulFrom ) = 0; + + // --------------------------------------------- + // Overlay texture methods + // --------------------------------------------- + + /** Texture to draw for the overlay. This function can only be called by the overlay's creator or renderer process (see SetOverlayRenderingPid) . + * + * OpenGL dirty state: + * glBindTexture + */ + virtual EVROverlayError SetOverlayTexture( VROverlayHandle_t ulOverlayHandle, const Texture_t *pTexture ) = 0; + + /** Use this to tell the overlay system to release the texture set for this overlay. */ + virtual EVROverlayError ClearOverlayTexture( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Separate interface for providing the data as a stream of bytes, but there is an upper bound on data + * that can be sent. This function can only be called by the overlay's renderer process. */ + virtual EVROverlayError SetOverlayRaw( VROverlayHandle_t ulOverlayHandle, void *pvBuffer, uint32_t unWidth, uint32_t unHeight, uint32_t unDepth ) = 0; + + /** Separate interface for providing the image through a filename: can be png or jpg, and should not be bigger than 1920x1080. + * This function can only be called by the overlay's renderer process */ + virtual EVROverlayError SetOverlayFromFile( VROverlayHandle_t ulOverlayHandle, const char *pchFilePath ) = 0; + + /** Get the native texture handle/device for an overlay you have created. + * On windows this handle will be a ID3D11ShaderResourceView with a ID3D11Texture2D bound. + * + * The texture will always be sized to match the backing texture you supplied in SetOverlayTexture above. + * + * You MUST call ReleaseNativeOverlayHandle() with pNativeTextureHandle once you are done with this texture. + * + * pNativeTextureHandle is an OUTPUT, it will be a pointer to a ID3D11ShaderResourceView *. + * pNativeTextureRef is an INPUT and should be a ID3D11Resource *. The device used by pNativeTextureRef will be used to bind pNativeTextureHandle. + */ + virtual EVROverlayError GetOverlayTexture( VROverlayHandle_t ulOverlayHandle, void **pNativeTextureHandle, void *pNativeTextureRef, uint32_t *pWidth, uint32_t *pHeight, uint32_t *pNativeFormat, EGraphicsAPIConvention *pAPI, EColorSpace *pColorSpace ) = 0; + + /** Release the pNativeTextureHandle provided from the GetOverlayTexture call, this allows the system to free the underlying GPU resources for this object, + * so only do it once you stop rendering this texture. + */ + virtual EVROverlayError ReleaseNativeOverlayHandle( VROverlayHandle_t ulOverlayHandle, void *pNativeTextureHandle ) = 0; + + /** Get the size of the overlay texture */ + virtual EVROverlayError GetOverlayTextureSize( VROverlayHandle_t ulOverlayHandle, uint32_t *pWidth, uint32_t *pHeight ) = 0; + + // ---------------------------------------------- + // Dashboard Overlay Methods + // ---------------------------------------------- + + /** Creates a dashboard overlay and returns its handle */ + virtual EVROverlayError CreateDashboardOverlay( const char *pchOverlayKey, const char *pchOverlayFriendlyName, VROverlayHandle_t * pMainHandle, VROverlayHandle_t *pThumbnailHandle ) = 0; + + /** Returns true if the dashboard is visible */ + virtual bool IsDashboardVisible() = 0; + + /** returns true if the dashboard is visible and the specified overlay is the active system Overlay */ + virtual bool IsActiveDashboardOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Sets the dashboard overlay to only appear when the specified process ID has scene focus */ + virtual EVROverlayError SetDashboardOverlaySceneProcess( VROverlayHandle_t ulOverlayHandle, uint32_t unProcessId ) = 0; + + /** Gets the process ID that this dashboard overlay requires to have scene focus */ + virtual EVROverlayError GetDashboardOverlaySceneProcess( VROverlayHandle_t ulOverlayHandle, uint32_t *punProcessId ) = 0; + + /** Shows the dashboard. */ + virtual void ShowDashboard( const char *pchOverlayToShow ) = 0; + + /** Returns the tracked device that has the laser pointer in the dashboard */ + virtual vr::TrackedDeviceIndex_t GetPrimaryDashboardDevice() = 0; + + // --------------------------------------------- + // Keyboard methods + // --------------------------------------------- + + /** Show the virtual keyboard to accept input **/ + virtual EVROverlayError ShowKeyboard( EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, const char *pchDescription, uint32_t unCharMax, const char *pchExistingText, bool bUseMinimalMode, uint64_t uUserValue ) = 0; + + virtual EVROverlayError ShowKeyboardForOverlay( VROverlayHandle_t ulOverlayHandle, EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, const char *pchDescription, uint32_t unCharMax, const char *pchExistingText, bool bUseMinimalMode, uint64_t uUserValue ) = 0; + + /** Get the text that was entered into the text input **/ + virtual uint32_t GetKeyboardText( VR_OUT_STRING() char *pchText, uint32_t cchText ) = 0; + + /** Hide the virtual keyboard **/ + virtual void HideKeyboard() = 0; + + /** Set the position of the keyboard in world space **/ + virtual void SetKeyboardTransformAbsolute( ETrackingUniverseOrigin eTrackingOrigin, const HmdMatrix34_t *pmatTrackingOriginToKeyboardTransform ) = 0; + + /** Set the position of the keyboard in overlay space by telling it to avoid a rectangle in the overlay. Rectangle coords have (0,0) in the bottom left **/ + virtual void SetKeyboardPositionForOverlay( VROverlayHandle_t ulOverlayHandle, HmdRect2_t avoidRect ) = 0; + + }; + + static const char * const IVROverlay_Version = "IVROverlay_012"; + +} // namespace vr + +// ivrrendermodels.h +namespace vr +{ + +static const char * const k_pch_Controller_Component_GDC2015 = "gdc2015"; // Canonical coordinate system of the gdc 2015 wired controller, provided for backwards compatibility +static const char * const k_pch_Controller_Component_Base = "base"; // For controllers with an unambiguous 'base'. +static const char * const k_pch_Controller_Component_Tip = "tip"; // For controllers with an unambiguous 'tip' (used for 'laser-pointing') +static const char * const k_pch_Controller_Component_HandGrip = "handgrip"; // Neutral, ambidextrous hand-pose when holding controller. On plane between neutrally posed index finger and thumb +static const char * const k_pch_Controller_Component_Status = "status"; // 1:1 aspect ratio status area, with canonical [0,1] uv mapping + +#if defined(__linux__) || defined(__APPLE__) +// The 32-bit version of gcc has the alignment requirement for uint64 and double set to +// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. +// The 64-bit version of gcc has the alignment requirement for these types set to +// 8 meaning that unless we use #pragma pack(4) our structures will get bigger. +// The 64-bit structure packing has to match the 32-bit structure packing for each platform. +#pragma pack( push, 4 ) +#else +#pragma pack( push, 8 ) +#endif + +/** Errors that can occur with the VR compositor */ +enum EVRRenderModelError +{ + VRRenderModelError_None = 0, + VRRenderModelError_Loading = 100, + VRRenderModelError_NotSupported = 200, + VRRenderModelError_InvalidArg = 300, + VRRenderModelError_InvalidModel = 301, + VRRenderModelError_NoShapes = 302, + VRRenderModelError_MultipleShapes = 303, + VRRenderModelError_TooManyVertices = 304, + VRRenderModelError_MultipleTextures = 305, + VRRenderModelError_BufferTooSmall = 306, + VRRenderModelError_NotEnoughNormals = 307, + VRRenderModelError_NotEnoughTexCoords = 308, + + VRRenderModelError_InvalidTexture = 400, +}; + +typedef uint32_t VRComponentProperties; + +enum EVRComponentProperty +{ + VRComponentProperty_IsStatic = (1 << 0), + VRComponentProperty_IsVisible = (1 << 1), + VRComponentProperty_IsTouched = (1 << 2), + VRComponentProperty_IsPressed = (1 << 3), + VRComponentProperty_IsScrolled = (1 << 4), +}; + +/** Describes state information about a render-model component, including transforms and other dynamic properties */ +struct RenderModel_ComponentState_t +{ + HmdMatrix34_t mTrackingToComponentRenderModel; // Transform required when drawing the component render model + HmdMatrix34_t mTrackingToComponentLocal; // Transform available for attaching to a local component coordinate system (-Z out from surface ) + VRComponentProperties uProperties; +}; + +/** A single vertex in a render model */ +struct RenderModel_Vertex_t +{ + HmdVector3_t vPosition; // position in meters in device space + HmdVector3_t vNormal; + float rfTextureCoord[2]; +}; + +/** A texture map for use on a render model */ +struct RenderModel_TextureMap_t +{ + uint16_t unWidth, unHeight; // width and height of the texture map in pixels + const uint8_t *rubTextureMapData; // Map texture data. All textures are RGBA with 8 bits per channel per pixel. Data size is width * height * 4ub +}; + +/** Session unique texture identifier. Rendermodels which share the same texture will have the same id. +IDs <0 denote the texture is not present */ + +typedef int32_t TextureID_t; + +const TextureID_t INVALID_TEXTURE_ID = -1; + +struct RenderModel_t +{ + const RenderModel_Vertex_t *rVertexData; // Vertex data for the mesh + uint32_t unVertexCount; // Number of vertices in the vertex data + const uint16_t *rIndexData; // Indices into the vertex data for each triangle + uint32_t unTriangleCount; // Number of triangles in the mesh. Index count is 3 * TriangleCount + TextureID_t diffuseTextureId; // Session unique texture identifier. Rendermodels which share the same texture will have the same id. <0 == texture not present +}; + +struct RenderModel_ControllerMode_State_t +{ + bool bScrollWheelVisible; // is this controller currently set to be in a scroll wheel mode +}; + +#pragma pack( pop ) + +class IVRRenderModels +{ +public: + + /** Loads and returns a render model for use in the application. pchRenderModelName should be a render model name + * from the Prop_RenderModelName_String property or an absolute path name to a render model on disk. + * + * The resulting render model is valid until VR_Shutdown() is called or until FreeRenderModel() is called. When the + * application is finished with the render model it should call FreeRenderModel() to free the memory associated + * with the model. + * + * The method returns VRRenderModelError_Loading while the render model is still being loaded. + * The method returns VRRenderModelError_None once loaded successfully, otherwise will return an error. */ + virtual EVRRenderModelError LoadRenderModel_Async( const char *pchRenderModelName, RenderModel_t **ppRenderModel ) = 0; + + /** Frees a previously returned render model + * It is safe to call this on a null ptr. */ + virtual void FreeRenderModel( RenderModel_t *pRenderModel ) = 0; + + /** Loads and returns a texture for use in the application. */ + virtual EVRRenderModelError LoadTexture_Async( TextureID_t textureId, RenderModel_TextureMap_t **ppTexture ) = 0; + + /** Frees a previously returned texture + * It is safe to call this on a null ptr. */ + virtual void FreeTexture( RenderModel_TextureMap_t *pTexture ) = 0; + + /** Creates a D3D11 texture and loads data into it. */ + virtual EVRRenderModelError LoadTextureD3D11_Async( TextureID_t textureId, void *pD3D11Device, void **ppD3D11Texture2D ) = 0; + + /** Helper function to copy the bits into an existing texture. */ + virtual EVRRenderModelError LoadIntoTextureD3D11_Async( TextureID_t textureId, void *pDstTexture ) = 0; + + /** Use this to free textures created with LoadTextureD3D11_Async instead of calling Release on them. */ + virtual void FreeTextureD3D11( void *pD3D11Texture2D ) = 0; + + /** Use this to get the names of available render models. Index does not correlate to a tracked device index, but + * is only used for iterating over all available render models. If the index is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetRenderModelName( uint32_t unRenderModelIndex, VR_OUT_STRING() char *pchRenderModelName, uint32_t unRenderModelNameLen ) = 0; + + /** Returns the number of available render models. */ + virtual uint32_t GetRenderModelCount() = 0; + + + /** Returns the number of components of the specified render model. + * Components are useful when client application wish to draw, label, or otherwise interact with components of tracked objects. + * Examples controller components: + * renderable things such as triggers, buttons + * non-renderable things which include coordinate systems such as 'tip', 'base', a neutral controller agnostic hand-pose + * If all controller components are enumerated and rendered, it will be equivalent to drawing the traditional render model + * Returns 0 if components not supported, >0 otherwise */ + virtual uint32_t GetComponentCount( const char *pchRenderModelName ) = 0; + + /** Use this to get the names of available components. Index does not correlate to a tracked device index, but + * is only used for iterating over all available components. If the index is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetComponentName( const char *pchRenderModelName, uint32_t unComponentIndex, VR_OUT_STRING( ) char *pchComponentName, uint32_t unComponentNameLen ) = 0; + + /** Get the button mask for all buttons associated with this component + * If no buttons (or axes) are associated with this component, return 0 + * Note: multiple components may be associated with the same button. Ex: two grip buttons on a single controller. + * Note: A single component may be associated with multiple buttons. Ex: A trackpad which also provides "D-pad" functionality */ + virtual uint64_t GetComponentButtonMask( const char *pchRenderModelName, const char *pchComponentName ) = 0; + + /** Use this to get the render model name for the specified rendermode/component combination, to be passed to LoadRenderModel. + * If the component name is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetComponentRenderModelName( const char *pchRenderModelName, const char *pchComponentName, VR_OUT_STRING( ) char *pchComponentRenderModelName, uint32_t unComponentRenderModelNameLen ) = 0; + + /** Use this to query information about the component, as a function of the controller state. + * + * For dynamic controller components (ex: trigger) values will reflect component motions + * For static components this will return a consistent value independent of the VRControllerState_t + * + * If the pchRenderModelName or pchComponentName is invalid, this will return false (and transforms will be set to identity). + * Otherwise, return true + * Note: For dynamic objects, visibility may be dynamic. (I.e., true/false will be returned based on controller state and controller mode state ) */ + virtual bool GetComponentState( const char *pchRenderModelName, const char *pchComponentName, const vr::VRControllerState_t *pControllerState, const RenderModel_ControllerMode_State_t *pState, RenderModel_ComponentState_t *pComponentState ) = 0; + + /** Returns true if the render model has a component with the specified name */ + virtual bool RenderModelHasComponent( const char *pchRenderModelName, const char *pchComponentName ) = 0; + + /** Returns the URL of the thumbnail image for this rendermodel */ + virtual uint32_t GetRenderModelThumbnailURL( const char *pchRenderModelName, VR_OUT_STRING() char *pchThumbnailURL, uint32_t unThumbnailURLLen, vr::EVRRenderModelError *peError ) = 0; + + /** Provides a render model path that will load the unskinned model if the model name provided has been replace by the user. If the model + * hasn't been replaced the path value will still be a valid path to load the model. Pass this to LoadRenderModel_Async, etc. to load the + * model. */ + virtual uint32_t GetRenderModelOriginalPath( const char *pchRenderModelName, VR_OUT_STRING() char *pchOriginalPath, uint32_t unOriginalPathLen, vr::EVRRenderModelError *peError ) = 0; + + /** Returns a string for a render model error */ + virtual const char *GetRenderModelErrorNameFromEnum( vr::EVRRenderModelError error ) = 0; +}; + +static const char * const IVRRenderModels_Version = "IVRRenderModels_005"; + +} + + +// ivrextendeddisplay.h +namespace vr +{ + + /** NOTE: Use of this interface is not recommended in production applications. It will not work for displays which use + * direct-to-display mode. It is also incompatible with the VR compositor and is not available when the compositor is running. */ + class IVRExtendedDisplay + { + public: + + /** Size and position that the window needs to be on the VR display. */ + virtual void GetWindowBounds( int32_t *pnX, int32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** Gets the viewport in the frame buffer to draw the output of the distortion into */ + virtual void GetEyeOutputViewport( EVREye eEye, uint32_t *pnX, uint32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** [D3D10/11 Only] + * Returns the adapter index and output index that the user should pass into EnumAdapters and EnumOutputs + * to create the device and swap chain in DX10 and DX11. If an error occurs both indices will be set to -1. + */ + virtual void GetDXGIOutputInfo( int32_t *pnAdapterIndex, int32_t *pnAdapterOutputIndex ) = 0; + + }; + + static const char * const IVRExtendedDisplay_Version = "IVRExtendedDisplay_001"; + +} + + +// ivrtrackedcamera.h +namespace vr +{ + +class IVRTrackedCamera +{ +public: + /** Returns a string for an error */ + virtual const char *GetCameraErrorNameFromEnum( vr::EVRTrackedCameraError eCameraError ) = 0; + + /** For convenience, same as tracked property request Prop_HasCamera_Bool */ + virtual vr::EVRTrackedCameraError HasCamera( vr::TrackedDeviceIndex_t nDeviceIndex, bool *pHasCamera ) = 0; + + /** Gets size of the image frame. */ + virtual vr::EVRTrackedCameraError GetCameraFrameSize( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, uint32_t *pnWidth, uint32_t *pnHeight, uint32_t *pnFrameBufferSize ) = 0; + + virtual vr::EVRTrackedCameraError GetCameraIntrinisics( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, vr::HmdVector2_t *pFocalLength, vr::HmdVector2_t *pCenter ) = 0; + + virtual vr::EVRTrackedCameraError GetCameraProjection( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, float flZNear, float flZFar, vr::HmdMatrix44_t *pProjection ) = 0; + + /** Acquiring streaming service permits video streaming for the caller. Releasing hints the system that video services do not need to be maintained for this client. + * If the camera has not already been activated, a one time spin up may incur some auto exposure as well as initial streaming frame delays. + * The camera should be considered a global resource accessible for shared consumption but not exclusive to any caller. + * The camera may go inactive due to lack of active consumers or headset idleness. */ + virtual vr::EVRTrackedCameraError AcquireVideoStreamingService( vr::TrackedDeviceIndex_t nDeviceIndex, vr::TrackedCameraHandle_t *pHandle ) = 0; + virtual vr::EVRTrackedCameraError ReleaseVideoStreamingService( vr::TrackedCameraHandle_t hTrackedCamera ) = 0; + + /** Copies the image frame into a caller's provided buffer. The image data is currently provided as RGBA data, 4 bytes per pixel. + * A caller can provide null for the framebuffer or frameheader if not desired. Requesting the frame header first, followed by the frame buffer allows + * the caller to determine if the frame as advanced per the frame header sequence. + * If there is no frame available yet, due to initial camera spinup or re-activation, the error will be VRTrackedCameraError_NoFrameAvailable. + * Ideally a caller should be polling at ~16ms intervals */ + virtual vr::EVRTrackedCameraError GetVideoStreamFrameBuffer( vr::TrackedCameraHandle_t hTrackedCamera, vr::EVRTrackedCameraFrameType eFrameType, void *pFrameBuffer, uint32_t nFrameBufferSize, vr::CameraVideoStreamFrameHeader_t *pFrameHeader, uint32_t nFrameHeaderSize ) = 0; +}; + +static const char * const IVRTrackedCamera_Version = "IVRTrackedCamera_003"; + +} // namespace vr + + +// ivrscreenshots.h +namespace vr +{ + +/** Errors that can occur with the VR compositor */ +enum EVRScreenshotError +{ + VRScreenshotError_None = 0, + VRScreenshotError_RequestFailed = 1, + VRScreenshotError_IncompatibleVersion = 100, + VRScreenshotError_NotFound = 101, + VRScreenshotError_BufferTooSmall = 102, + VRScreenshotError_ScreenshotAlreadyInProgress = 108, +}; + +/** Allows the application to generate screenshots */ +class IVRScreenshots +{ +public: + /** Request a screenshot of the requested type. + * A request of the VRScreenshotType_Stereo type will always + * work. Other types will depend on the underlying application + * support. + * The first file name is for the preview image and should be a + * regular screenshot (ideally from the left eye). The second + * is the VR screenshot in the correct format. They should be + * in the same aspect ratio. Formats per type: + * VRScreenshotType_Mono: the VR filename is ignored (can be + * nullptr), this is a normal flat single shot. + * VRScreenshotType_Stereo: The VR image should be a + * side-by-side with the left eye image on the left. + * VRScreenshotType_Cubemap: The VR image should be six square + * images composited horizontally. + * VRScreenshotType_StereoPanorama: above/below with left eye + * panorama being the above image. Image is typically square + * with the panorama being 2x horizontal. + * + * Note that the VR dashboard will call this function when + * the user presses the screenshot binding (currently System + * Button + Trigger). If Steam is running, the destination + * file names will be in %TEMP% and will be copied into + * Steam's screenshot library for the running application + * once SubmitScreenshot() is called. + * If Steam is not running, the paths will be in the user's + * documents folder under Documents\SteamVR\Screenshots. + * Other VR applications can call this to initate a + * screenshot outside of user control. + * The destination file names do not need an extension, + * will be replaced with the correct one for the format + * which is currently .png. */ + virtual vr::EVRScreenshotError RequestScreenshot( vr::ScreenshotHandle_t *pOutScreenshotHandle, vr::EVRScreenshotType type, const char *pchPreviewFilename, const char *pchVRFilename ) = 0; + + /** Called by the running VR application to indicate that it + * wishes to be in charge of screenshots. If the + * application does not call this, the Compositor will only + * support VRScreenshotType_Stereo screenshots that will be + * captured without notification to the running app. + * Once hooked your application will receive a + * VREvent_RequestScreenshot event when the user presses the + * buttons to take a screenshot. */ + virtual vr::EVRScreenshotError HookScreenshot( VR_ARRAY_COUNT( numTypes ) const vr::EVRScreenshotType *pSupportedTypes, int numTypes ) = 0; + + /** When your application receives a + * VREvent_RequestScreenshot event, call these functions to get + * the details of the screenshot request. */ + virtual vr::EVRScreenshotType GetScreenshotPropertyType( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotError *pError ) = 0; + + /** Get the filename for the preview or vr image (see + * vr::EScreenshotPropertyFilenames). The return value is + * the size of the string. */ + virtual uint32_t GetScreenshotPropertyFilename( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotPropertyFilenames filenameType, VR_OUT_STRING() char *pchFilename, uint32_t cchFilename, vr::EVRScreenshotError *pError ) = 0; + + /** Call this if the application is taking the screen shot + * will take more than a few ms processing. This will result + * in an overlay being presented that shows a completion + * bar. */ + virtual vr::EVRScreenshotError UpdateScreenshotProgress( vr::ScreenshotHandle_t screenshotHandle, float flProgress ) = 0; + + /** Tells the compositor to take an internal screenshot of + * type VRScreenshotType_Stereo. It will take the current + * submitted scene textures of the running application and + * write them into the preview image and a side-by-side file + * for the VR image. + * This is similiar to request screenshot, but doesn't ever + * talk to the application, just takes the shot and submits. */ + virtual vr::EVRScreenshotError TakeStereoScreenshot( vr::ScreenshotHandle_t *pOutScreenshotHandle, const char *pchPreviewFilename, const char *pchVRFilename ) = 0; + + /** Submit the completed screenshot. If Steam is running + * this will call into the Steam client and upload the + * screenshot to the screenshots section of the library for + * the running application. If Steam is not running, this + * function will display a notification to the user that the + * screenshot was taken. The paths should be full paths with + * extensions. + * File paths should be absolute including + * exntensions. + * screenshotHandle can be k_unScreenshotHandleInvalid if this + * was a new shot taking by the app to be saved and not + * initiated by a user (achievement earned or something) */ + virtual vr::EVRScreenshotError SubmitScreenshot( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotType type, const char *pchSourcePreviewFilename, const char *pchSourceVRFilename ) = 0; +}; + +static const char * const IVRScreenshots_Version = "IVRScreenshots_001"; + +} // namespace vr + + +// End + +#endif // _OPENVR_API + + +namespace vr +{ + /** Finds the active installation of the VR API and initializes it. The provided path must be absolute + * or relative to the current working directory. These are the local install versions of the equivalent + * functions in steamvr.h and will work without a local Steam install. + * + * This path is to the "root" of the VR API install. That's the directory with + * the "drivers" directory and a platform (i.e. "win32") directory in it, not the directory with the DLL itself. + */ + inline IVRSystem *VR_Init( EVRInitError *peError, EVRApplicationType eApplicationType ); + + /** unloads vrclient.dll. Any interface pointers from the interface are + * invalid after this point */ + inline void VR_Shutdown(); + + /** Returns true if there is an HMD attached. This check is as lightweight as possible and + * can be called outside of VR_Init/VR_Shutdown. It should be used when an application wants + * to know if initializing VR is a possibility but isn't ready to take that step yet. + */ + VR_INTERFACE bool VR_CALLTYPE VR_IsHmdPresent(); + + /** Returns true if the OpenVR runtime is installed. */ + VR_INTERFACE bool VR_CALLTYPE VR_IsRuntimeInstalled(); + + /** Returns where the OpenVR runtime is installed. */ + VR_INTERFACE const char *VR_CALLTYPE VR_RuntimePath(); + + /** Returns the name of the enum value for an EVRInitError. This function may be called outside of VR_Init()/VR_Shutdown(). */ + VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsSymbol( EVRInitError error ); + + /** Returns an english string for an EVRInitError. Applications should call VR_GetVRInitErrorAsSymbol instead and + * use that as a key to look up their own localized error message. This function may be called outside of VR_Init()/VR_Shutdown(). */ + VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsEnglishDescription( EVRInitError error ); + + /** Returns the interface of the specified version. This method must be called after VR_Init. The + * pointer returned is valid until VR_Shutdown is called. + */ + VR_INTERFACE void *VR_CALLTYPE VR_GetGenericInterface( const char *pchInterfaceVersion, EVRInitError *peError ); + + /** Returns whether the interface of the specified version exists. + */ + VR_INTERFACE bool VR_CALLTYPE VR_IsInterfaceVersionValid( const char *pchInterfaceVersion ); + + /** Returns a token that represents whether the VR interface handles need to be reloaded */ + VR_INTERFACE uint32_t VR_CALLTYPE VR_GetInitToken(); + + // These typedefs allow old enum names from SDK 0.9.11 to be used in applications. + // They will go away in the future. + typedef EVRInitError HmdError; + typedef EVREye Hmd_Eye; + typedef EGraphicsAPIConvention GraphicsAPIConvention; + typedef EColorSpace ColorSpace; + typedef ETrackingResult HmdTrackingResult; + typedef ETrackedDeviceClass TrackedDeviceClass; + typedef ETrackingUniverseOrigin TrackingUniverseOrigin; + typedef ETrackedDeviceProperty TrackedDeviceProperty; + typedef ETrackedPropertyError TrackedPropertyError; + typedef EVRSubmitFlags VRSubmitFlags_t; + typedef EVRState VRState_t; + typedef ECollisionBoundsStyle CollisionBoundsStyle_t; + typedef EVROverlayError VROverlayError; + typedef EVRFirmwareError VRFirmwareError; + typedef EVRCompositorError VRCompositorError; + typedef EVRScreenshotError VRScreenshotsError; + + inline uint32_t &VRToken() + { + static uint32_t token; + return token; + } + + class COpenVRContext + { + public: + COpenVRContext() { Clear(); } + void Clear(); + + inline void CheckClear() + { + if ( VRToken() != VR_GetInitToken() ) + { + Clear(); + VRToken() = VR_GetInitToken(); + } + } + + IVRSystem *VRSystem() + { + CheckClear(); + if ( m_pVRSystem == nullptr ) + { + EVRInitError eError; + m_pVRSystem = ( IVRSystem * )VR_GetGenericInterface( IVRSystem_Version, &eError ); + } + return m_pVRSystem; + } + IVRChaperone *VRChaperone() + { + CheckClear(); + if ( m_pVRChaperone == nullptr ) + { + EVRInitError eError; + m_pVRChaperone = ( IVRChaperone * )VR_GetGenericInterface( IVRChaperone_Version, &eError ); + } + return m_pVRChaperone; + } + + IVRChaperoneSetup *VRChaperoneSetup() + { + CheckClear(); + if ( m_pVRChaperoneSetup == nullptr ) + { + EVRInitError eError; + m_pVRChaperoneSetup = ( IVRChaperoneSetup * )VR_GetGenericInterface( IVRChaperoneSetup_Version, &eError ); + } + return m_pVRChaperoneSetup; + } + + IVRCompositor *VRCompositor() + { + CheckClear(); + if ( m_pVRCompositor == nullptr ) + { + EVRInitError eError; + m_pVRCompositor = ( IVRCompositor * )VR_GetGenericInterface( IVRCompositor_Version, &eError ); + } + return m_pVRCompositor; + } + + IVROverlay *VROverlay() + { + CheckClear(); + if ( m_pVROverlay == nullptr ) + { + EVRInitError eError; + m_pVROverlay = ( IVROverlay * )VR_GetGenericInterface( IVROverlay_Version, &eError ); + } + return m_pVROverlay; + } + + IVRScreenshots *VRScreenshots() + { + CheckClear(); + if ( m_pVRScreenshots == nullptr ) + { + EVRInitError eError; + m_pVRScreenshots = ( IVRScreenshots * )VR_GetGenericInterface( IVRScreenshots_Version, &eError ); + } + return m_pVRScreenshots; + } + + IVRRenderModels *VRRenderModels() + { + CheckClear(); + if ( m_pVRRenderModels == nullptr ) + { + EVRInitError eError; + m_pVRRenderModels = ( IVRRenderModels * )VR_GetGenericInterface( IVRRenderModels_Version, &eError ); + } + return m_pVRRenderModels; + } + + IVRExtendedDisplay *VRExtendedDisplay() + { + CheckClear(); + if ( m_pVRExtendedDisplay == nullptr ) + { + EVRInitError eError; + m_pVRExtendedDisplay = ( IVRExtendedDisplay * )VR_GetGenericInterface( IVRExtendedDisplay_Version, &eError ); + } + return m_pVRExtendedDisplay; + } + + IVRSettings *VRSettings() + { + CheckClear(); + if ( m_pVRSettings == nullptr ) + { + EVRInitError eError; + m_pVRSettings = ( IVRSettings * )VR_GetGenericInterface( IVRSettings_Version, &eError ); + } + return m_pVRSettings; + } + + IVRApplications *VRApplications() + { + CheckClear(); + if ( m_pVRApplications == nullptr ) + { + EVRInitError eError; + m_pVRApplications = ( IVRApplications * )VR_GetGenericInterface( IVRApplications_Version, &eError ); + } + return m_pVRApplications; + } + + IVRTrackedCamera *VRTrackedCamera() + { + CheckClear(); + if ( m_pVRTrackedCamera == nullptr ) + { + EVRInitError eError; + m_pVRTrackedCamera = ( IVRTrackedCamera * )VR_GetGenericInterface( IVRTrackedCamera_Version, &eError ); + } + return m_pVRTrackedCamera; + } + + private: + IVRSystem *m_pVRSystem; + IVRChaperone *m_pVRChaperone; + IVRChaperoneSetup *m_pVRChaperoneSetup; + IVRCompositor *m_pVRCompositor; + IVROverlay *m_pVROverlay; + IVRRenderModels *m_pVRRenderModels; + IVRExtendedDisplay *m_pVRExtendedDisplay; + IVRSettings *m_pVRSettings; + IVRApplications *m_pVRApplications; + IVRTrackedCamera *m_pVRTrackedCamera; + IVRScreenshots *m_pVRScreenshots; + }; + + inline COpenVRContext &OpenVRInternal_ModuleContext() + { + static void *ctx[ sizeof( COpenVRContext ) / sizeof( void * ) ]; + return *( COpenVRContext * )ctx; // bypass zero-init constructor + } + + inline IVRSystem *VR_CALLTYPE VRSystem() { return OpenVRInternal_ModuleContext().VRSystem(); } + inline IVRChaperone *VR_CALLTYPE VRChaperone() { return OpenVRInternal_ModuleContext().VRChaperone(); } + inline IVRChaperoneSetup *VR_CALLTYPE VRChaperoneSetup() { return OpenVRInternal_ModuleContext().VRChaperoneSetup(); } + inline IVRCompositor *VR_CALLTYPE VRCompositor() { return OpenVRInternal_ModuleContext().VRCompositor(); } + inline IVROverlay *VR_CALLTYPE VROverlay() { return OpenVRInternal_ModuleContext().VROverlay(); } + inline IVRScreenshots *VR_CALLTYPE VRScreenshots() { return OpenVRInternal_ModuleContext().VRScreenshots(); } + inline IVRRenderModels *VR_CALLTYPE VRRenderModels() { return OpenVRInternal_ModuleContext().VRRenderModels(); } + inline IVRApplications *VR_CALLTYPE VRApplications() { return OpenVRInternal_ModuleContext().VRApplications(); } + inline IVRSettings *VR_CALLTYPE VRSettings() { return OpenVRInternal_ModuleContext().VRSettings(); } + inline IVRExtendedDisplay *VR_CALLTYPE VRExtendedDisplay() { return OpenVRInternal_ModuleContext().VRExtendedDisplay(); } + inline IVRTrackedCamera *VR_CALLTYPE VRTrackedCamera() { return OpenVRInternal_ModuleContext().VRTrackedCamera(); } + + inline void COpenVRContext::Clear() + { + m_pVRSystem = nullptr; + m_pVRChaperone = nullptr; + m_pVRChaperoneSetup = nullptr; + m_pVRCompositor = nullptr; + m_pVROverlay = nullptr; + m_pVRRenderModels = nullptr; + m_pVRExtendedDisplay = nullptr; + m_pVRSettings = nullptr; + m_pVRApplications = nullptr; + m_pVRTrackedCamera = nullptr; + m_pVRScreenshots = nullptr; + } + + VR_INTERFACE uint32_t VR_CALLTYPE VR_InitInternal( EVRInitError *peError, EVRApplicationType eApplicationType ); + VR_INTERFACE void VR_CALLTYPE VR_ShutdownInternal(); + + /** Finds the active installation of vrclient.dll and initializes it */ + inline IVRSystem *VR_Init( EVRInitError *peError, EVRApplicationType eApplicationType ) + { + IVRSystem *pVRSystem = nullptr; + + EVRInitError eError; + VRToken() = VR_InitInternal( &eError, eApplicationType ); + COpenVRContext &ctx = OpenVRInternal_ModuleContext(); + ctx.Clear(); + + if ( eError == VRInitError_None ) + { + if ( VR_IsInterfaceVersionValid( IVRSystem_Version ) ) + { + pVRSystem = VRSystem(); + } + else + { + VR_ShutdownInternal(); + eError = VRInitError_Init_InterfaceNotFound; + } + } + + if ( peError ) + *peError = eError; + return pVRSystem; + } + + /** unloads vrclient.dll. Any interface pointers from the interface are + * invalid after this point */ + inline void VR_Shutdown() + { + VR_ShutdownInternal(); + } +} diff --git a/examples/ThirdPartyLibs/openvr/headers/openvr_api.cs b/examples/ThirdPartyLibs/openvr/headers/openvr_api.cs new file mode 100644 index 000000000..a4d71f826 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/headers/openvr_api.cs @@ -0,0 +1,4187 @@ +//======= Copyright (c) Valve Corporation, All rights reserved. =============== +// +// Purpose: This file contains C#/managed code bindings for the OpenVR interfaces +// This file is auto-generated, do not edit it. +// +//============================================================================= + +using System; +using System.Runtime.InteropServices; +using Valve.VR; + +namespace Valve.VR +{ + +[StructLayout(LayoutKind.Sequential)] +public struct IVRSystem +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetRecommendedRenderTargetSize(ref uint pnWidth, ref uint pnHeight); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRecommendedRenderTargetSize GetRecommendedRenderTargetSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HmdMatrix44_t _GetProjectionMatrix(EVREye eEye, float fNearZ, float fFarZ, EGraphicsAPIConvention eProjType); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetProjectionMatrix GetProjectionMatrix; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetProjectionRaw(EVREye eEye, ref float pfLeft, ref float pfRight, ref float pfTop, ref float pfBottom); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetProjectionRaw GetProjectionRaw; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate DistortionCoordinates_t _ComputeDistortion(EVREye eEye, float fU, float fV); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ComputeDistortion ComputeDistortion; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HmdMatrix34_t _GetEyeToHeadTransform(EVREye eEye); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetEyeToHeadTransform GetEyeToHeadTransform; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetTimeSinceLastVsync(ref float pfSecondsSinceLastVsync, ref ulong pulFrameCounter); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTimeSinceLastVsync GetTimeSinceLastVsync; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate int _GetD3D9AdapterIndex(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetD3D9AdapterIndex GetD3D9AdapterIndex; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetDXGIOutputInfo(ref int pnAdapterIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetDXGIOutputInfo GetDXGIOutputInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsDisplayOnDesktop(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsDisplayOnDesktop IsDisplayOnDesktop; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _SetDisplayVisibility(bool bIsVisibleOnDesktop); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetDisplayVisibility SetDisplayVisibility; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetDeviceToAbsoluteTrackingPose(ETrackingUniverseOrigin eOrigin, float fPredictedSecondsToPhotonsFromNow, [In, Out] TrackedDevicePose_t[] pTrackedDevicePoseArray, uint unTrackedDevicePoseArrayCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetDeviceToAbsoluteTrackingPose GetDeviceToAbsoluteTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ResetSeatedZeroPose(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ResetSeatedZeroPose ResetSeatedZeroPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HmdMatrix34_t _GetSeatedZeroPoseToStandingAbsoluteTrackingPose(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetSeatedZeroPoseToStandingAbsoluteTrackingPose GetSeatedZeroPoseToStandingAbsoluteTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HmdMatrix34_t _GetRawZeroPoseToStandingAbsoluteTrackingPose(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRawZeroPoseToStandingAbsoluteTrackingPose GetRawZeroPoseToStandingAbsoluteTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetSortedTrackedDeviceIndicesOfClass(ETrackedDeviceClass eTrackedDeviceClass, [In, Out] uint[] punTrackedDeviceIndexArray, uint unTrackedDeviceIndexArrayCount, uint unRelativeToTrackedDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetSortedTrackedDeviceIndicesOfClass GetSortedTrackedDeviceIndicesOfClass; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EDeviceActivityLevel _GetTrackedDeviceActivityLevel(uint unDeviceId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTrackedDeviceActivityLevel GetTrackedDeviceActivityLevel; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ApplyTransform(ref TrackedDevicePose_t pOutputPose, ref TrackedDevicePose_t pTrackedDevicePose, ref HmdMatrix34_t pTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ApplyTransform ApplyTransform; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetTrackedDeviceIndexForControllerRole(ETrackedControllerRole unDeviceType); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTrackedDeviceIndexForControllerRole GetTrackedDeviceIndexForControllerRole; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ETrackedControllerRole _GetControllerRoleForTrackedDeviceIndex(uint unDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetControllerRoleForTrackedDeviceIndex GetControllerRoleForTrackedDeviceIndex; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ETrackedDeviceClass _GetTrackedDeviceClass(uint unDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTrackedDeviceClass GetTrackedDeviceClass; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsTrackedDeviceConnected(uint unDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsTrackedDeviceConnected IsTrackedDeviceConnected; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetBoolTrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetBoolTrackedDeviceProperty GetBoolTrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate float _GetFloatTrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetFloatTrackedDeviceProperty GetFloatTrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate int _GetInt32TrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetInt32TrackedDeviceProperty GetInt32TrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ulong _GetUint64TrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetUint64TrackedDeviceProperty GetUint64TrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HmdMatrix34_t _GetMatrix34TrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetMatrix34TrackedDeviceProperty GetMatrix34TrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetStringTrackedDeviceProperty(uint unDeviceIndex, ETrackedDeviceProperty prop, System.Text.StringBuilder pchValue, uint unBufferSize, ref ETrackedPropertyError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetStringTrackedDeviceProperty GetStringTrackedDeviceProperty; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetPropErrorNameFromEnum(ETrackedPropertyError error); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetPropErrorNameFromEnum GetPropErrorNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _PollNextEvent(ref VREvent_t pEvent, uint uncbVREvent); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PollNextEvent PollNextEvent; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _PollNextEventWithPose(ETrackingUniverseOrigin eOrigin, ref VREvent_t pEvent, uint uncbVREvent, ref TrackedDevicePose_t pTrackedDevicePose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PollNextEventWithPose PollNextEventWithPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetEventTypeNameFromEnum(EVREventType eType); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetEventTypeNameFromEnum GetEventTypeNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate HiddenAreaMesh_t _GetHiddenAreaMesh(EVREye eEye); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetHiddenAreaMesh GetHiddenAreaMesh; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetControllerState(uint unControllerDeviceIndex, ref VRControllerState_t pControllerState); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetControllerState GetControllerState; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetControllerStateWithPose(ETrackingUniverseOrigin eOrigin, uint unControllerDeviceIndex, ref VRControllerState_t pControllerState, ref TrackedDevicePose_t pTrackedDevicePose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetControllerStateWithPose GetControllerStateWithPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _TriggerHapticPulse(uint unControllerDeviceIndex, uint unAxisId, char usDurationMicroSec); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _TriggerHapticPulse TriggerHapticPulse; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetButtonIdNameFromEnum(EVRButtonId eButtonId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetButtonIdNameFromEnum GetButtonIdNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetControllerAxisTypeNameFromEnum(EVRControllerAxisType eAxisType); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetControllerAxisTypeNameFromEnum GetControllerAxisTypeNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _CaptureInputFocus(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CaptureInputFocus CaptureInputFocus; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ReleaseInputFocus(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReleaseInputFocus ReleaseInputFocus; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsInputFocusCapturedByAnotherProcess(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsInputFocusCapturedByAnotherProcess IsInputFocusCapturedByAnotherProcess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _DriverDebugRequest(uint unDeviceIndex, string pchRequest, string pchResponseBuffer, uint unResponseBufferSize); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _DriverDebugRequest DriverDebugRequest; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRFirmwareError _PerformFirmwareUpdate(uint unDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PerformFirmwareUpdate PerformFirmwareUpdate; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _AcknowledgeQuit_Exiting(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _AcknowledgeQuit_Exiting AcknowledgeQuit_Exiting; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _AcknowledgeQuit_UserPrompt(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _AcknowledgeQuit_UserPrompt AcknowledgeQuit_UserPrompt; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRExtendedDisplay +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetWindowBounds(ref int pnX, ref int pnY, ref uint pnWidth, ref uint pnHeight); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWindowBounds GetWindowBounds; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetEyeOutputViewport(EVREye eEye, ref uint pnX, ref uint pnY, ref uint pnWidth, ref uint pnHeight); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetEyeOutputViewport GetEyeOutputViewport; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetDXGIOutputInfo(ref int pnAdapterIndex, ref int pnAdapterOutputIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetDXGIOutputInfo GetDXGIOutputInfo; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRTrackedCamera +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetCameraErrorNameFromEnum(EVRTrackedCameraError eCameraError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCameraErrorNameFromEnum GetCameraErrorNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _HasCamera(uint nDeviceIndex, ref bool pHasCamera); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HasCamera HasCamera; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _GetCameraFrameSize(uint nDeviceIndex, EVRTrackedCameraFrameType eFrameType, ref uint pnWidth, ref uint pnHeight, ref uint pnFrameBufferSize); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCameraFrameSize GetCameraFrameSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _GetCameraIntrinisics(uint nDeviceIndex, EVRTrackedCameraFrameType eFrameType, ref HmdVector2_t pFocalLength, ref HmdVector2_t pCenter); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCameraIntrinisics GetCameraIntrinisics; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _GetCameraProjection(uint nDeviceIndex, EVRTrackedCameraFrameType eFrameType, float flZNear, float flZFar, ref HmdMatrix44_t pProjection); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCameraProjection GetCameraProjection; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _AcquireVideoStreamingService(uint nDeviceIndex, ref ulong pHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _AcquireVideoStreamingService AcquireVideoStreamingService; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _ReleaseVideoStreamingService(ulong hTrackedCamera); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReleaseVideoStreamingService ReleaseVideoStreamingService; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRTrackedCameraError _GetVideoStreamFrameBuffer(ulong hTrackedCamera, EVRTrackedCameraFrameType eFrameType, IntPtr pFrameBuffer, uint nFrameBufferSize, ref CameraVideoStreamFrameHeader_t pFrameHeader, uint nFrameHeaderSize); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetVideoStreamFrameBuffer GetVideoStreamFrameBuffer; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRApplications +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _AddApplicationManifest(string pchApplicationManifestFullPath, bool bTemporary); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _AddApplicationManifest AddApplicationManifest; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _RemoveApplicationManifest(string pchApplicationManifestFullPath); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RemoveApplicationManifest RemoveApplicationManifest; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsApplicationInstalled(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsApplicationInstalled IsApplicationInstalled; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetApplicationCount(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationCount GetApplicationCount; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _GetApplicationKeyByIndex(uint unApplicationIndex, string pchAppKeyBuffer, uint unAppKeyBufferLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationKeyByIndex GetApplicationKeyByIndex; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _GetApplicationKeyByProcessId(uint unProcessId, string pchAppKeyBuffer, uint unAppKeyBufferLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationKeyByProcessId GetApplicationKeyByProcessId; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _LaunchApplication(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LaunchApplication LaunchApplication; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _LaunchTemplateApplication(string pchTemplateAppKey, string pchNewAppKey, [In, Out] AppOverrideKeys_t[] pKeys, uint unKeys); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LaunchTemplateApplication LaunchTemplateApplication; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _LaunchDashboardOverlay(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LaunchDashboardOverlay LaunchDashboardOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _CancelApplicationLaunch(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CancelApplicationLaunch CancelApplicationLaunch; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _IdentifyApplication(uint unProcessId, string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IdentifyApplication IdentifyApplication; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetApplicationProcessId(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationProcessId GetApplicationProcessId; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetApplicationsErrorNameFromEnum(EVRApplicationError error); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationsErrorNameFromEnum GetApplicationsErrorNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetApplicationPropertyString(string pchAppKey, EVRApplicationProperty eProperty, string pchPropertyValueBuffer, uint unPropertyValueBufferLen, ref EVRApplicationError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationPropertyString GetApplicationPropertyString; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetApplicationPropertyBool(string pchAppKey, EVRApplicationProperty eProperty, ref EVRApplicationError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationPropertyBool GetApplicationPropertyBool; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ulong _GetApplicationPropertyUint64(string pchAppKey, EVRApplicationProperty eProperty, ref EVRApplicationError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationPropertyUint64 GetApplicationPropertyUint64; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _SetApplicationAutoLaunch(string pchAppKey, bool bAutoLaunch); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetApplicationAutoLaunch SetApplicationAutoLaunch; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetApplicationAutoLaunch(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationAutoLaunch GetApplicationAutoLaunch; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _GetStartingApplication(string pchAppKeyBuffer, uint unAppKeyBufferLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetStartingApplication GetStartingApplication; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationTransitionState _GetTransitionState(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTransitionState GetTransitionState; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _PerformApplicationPrelaunchCheck(string pchAppKey); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PerformApplicationPrelaunchCheck PerformApplicationPrelaunchCheck; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetApplicationsTransitionStateNameFromEnum(EVRApplicationTransitionState state); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetApplicationsTransitionStateNameFromEnum GetApplicationsTransitionStateNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsQuitUserPromptRequested(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsQuitUserPromptRequested IsQuitUserPromptRequested; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRApplicationError _LaunchInternalProcess(string pchBinaryPath, string pchArguments, string pchWorkingDirectory); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LaunchInternalProcess LaunchInternalProcess; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRChaperone +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ChaperoneCalibrationState _GetCalibrationState(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCalibrationState GetCalibrationState; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetPlayAreaSize(ref float pSizeX, ref float pSizeZ); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetPlayAreaSize GetPlayAreaSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetPlayAreaRect(ref HmdQuad_t rect); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetPlayAreaRect GetPlayAreaRect; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ReloadInfo(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReloadInfo ReloadInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetSceneColor(HmdColor_t color); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetSceneColor SetSceneColor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetBoundsColor(ref HmdColor_t pOutputColorArray, int nNumOutputColors, float flCollisionBoundsFadeDistance, ref HmdColor_t pOutputCameraColor); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetBoundsColor GetBoundsColor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _AreBoundsVisible(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _AreBoundsVisible AreBoundsVisible; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ForceBoundsVisible(bool bForce); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ForceBoundsVisible ForceBoundsVisible; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRChaperoneSetup +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _CommitWorkingCopy(EChaperoneConfigFile configFile); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CommitWorkingCopy CommitWorkingCopy; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _RevertWorkingCopy(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RevertWorkingCopy RevertWorkingCopy; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetWorkingPlayAreaSize(ref float pSizeX, ref float pSizeZ); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWorkingPlayAreaSize GetWorkingPlayAreaSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetWorkingPlayAreaRect(ref HmdQuad_t rect); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWorkingPlayAreaRect GetWorkingPlayAreaRect; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetWorkingCollisionBoundsInfo([In, Out] HmdQuad_t[] pQuadsBuffer, ref uint punQuadsCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWorkingCollisionBoundsInfo GetWorkingCollisionBoundsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetLiveCollisionBoundsInfo([In, Out] HmdQuad_t[] pQuadsBuffer, ref uint punQuadsCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLiveCollisionBoundsInfo GetLiveCollisionBoundsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetWorkingSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatSeatedZeroPoseToRawTrackingPose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWorkingSeatedZeroPoseToRawTrackingPose GetWorkingSeatedZeroPoseToRawTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatStandingZeroPoseToRawTrackingPose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetWorkingStandingZeroPoseToRawTrackingPose GetWorkingStandingZeroPoseToRawTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetWorkingPlayAreaSize(float sizeX, float sizeZ); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingPlayAreaSize SetWorkingPlayAreaSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetWorkingCollisionBoundsInfo([In, Out] HmdQuad_t[] pQuadsBuffer, uint unQuadsCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingCollisionBoundsInfo SetWorkingCollisionBoundsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetWorkingSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatSeatedZeroPoseToRawTrackingPose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingSeatedZeroPoseToRawTrackingPose SetWorkingSeatedZeroPoseToRawTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatStandingZeroPoseToRawTrackingPose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingStandingZeroPoseToRawTrackingPose SetWorkingStandingZeroPoseToRawTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ReloadFromDisk(EChaperoneConfigFile configFile); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReloadFromDisk ReloadFromDisk; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetLiveSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatSeatedZeroPoseToRawTrackingPose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLiveSeatedZeroPoseToRawTrackingPose GetLiveSeatedZeroPoseToRawTrackingPose; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetWorkingCollisionBoundsTagsInfo([In, Out] byte[] pTagsBuffer, uint unTagCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingCollisionBoundsTagsInfo SetWorkingCollisionBoundsTagsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetLiveCollisionBoundsTagsInfo([In, Out] byte[] pTagsBuffer, ref uint punTagCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLiveCollisionBoundsTagsInfo GetLiveCollisionBoundsTagsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _SetWorkingPhysicalBoundsInfo([In, Out] HmdQuad_t[] pQuadsBuffer, uint unQuadsCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetWorkingPhysicalBoundsInfo SetWorkingPhysicalBoundsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetLivePhysicalBoundsInfo([In, Out] HmdQuad_t[] pQuadsBuffer, ref uint punQuadsCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLivePhysicalBoundsInfo GetLivePhysicalBoundsInfo; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _ExportLiveToBuffer(System.Text.StringBuilder pBuffer, ref uint pnBufferLength); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ExportLiveToBuffer ExportLiveToBuffer; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _ImportFromBufferToWorking(string pBuffer, uint nImportFlags); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ImportFromBufferToWorking ImportFromBufferToWorking; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRCompositor +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetTrackingSpace(ETrackingUniverseOrigin eOrigin); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetTrackingSpace SetTrackingSpace; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ETrackingUniverseOrigin _GetTrackingSpace(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTrackingSpace GetTrackingSpace; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _WaitGetPoses([In, Out] TrackedDevicePose_t[] pRenderPoseArray, uint unRenderPoseArrayCount, [In, Out] TrackedDevicePose_t[] pGamePoseArray, uint unGamePoseArrayCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _WaitGetPoses WaitGetPoses; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _GetLastPoses([In, Out] TrackedDevicePose_t[] pRenderPoseArray, uint unRenderPoseArrayCount, [In, Out] TrackedDevicePose_t[] pGamePoseArray, uint unGamePoseArrayCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLastPoses GetLastPoses; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _GetLastPoseForTrackedDeviceIndex(uint unDeviceIndex, ref TrackedDevicePose_t pOutputPose, ref TrackedDevicePose_t pOutputGamePose); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLastPoseForTrackedDeviceIndex GetLastPoseForTrackedDeviceIndex; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _Submit(EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _Submit Submit; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ClearLastSubmittedFrame(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ClearLastSubmittedFrame ClearLastSubmittedFrame; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _PostPresentHandoff(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PostPresentHandoff PostPresentHandoff; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetFrameTiming(ref Compositor_FrameTiming pTiming, uint unFramesAgo); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetFrameTiming GetFrameTiming; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate float _GetFrameTimeRemaining(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetFrameTimeRemaining GetFrameTimeRemaining; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetCumulativeStats(ref Compositor_CumulativeStats pStats, uint nStatsSizeInBytes); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCumulativeStats GetCumulativeStats; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _FadeToColor(float fSeconds, float fRed, float fGreen, float fBlue, float fAlpha, bool bBackground); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FadeToColor FadeToColor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _FadeGrid(float fSeconds, bool bFadeIn); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FadeGrid FadeGrid; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _SetSkyboxOverride([In, Out] Texture_t[] pTextures, uint unTextureCount); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetSkyboxOverride SetSkyboxOverride; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ClearSkyboxOverride(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ClearSkyboxOverride ClearSkyboxOverride; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _CompositorBringToFront(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CompositorBringToFront CompositorBringToFront; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _CompositorGoToBack(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CompositorGoToBack CompositorGoToBack; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _CompositorQuit(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CompositorQuit CompositorQuit; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsFullscreen(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsFullscreen IsFullscreen; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetCurrentSceneFocusProcess(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCurrentSceneFocusProcess GetCurrentSceneFocusProcess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetLastFrameRenderer(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetLastFrameRenderer GetLastFrameRenderer; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _CanRenderScene(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CanRenderScene CanRenderScene; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ShowMirrorWindow(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShowMirrorWindow ShowMirrorWindow; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _HideMirrorWindow(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HideMirrorWindow HideMirrorWindow; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsMirrorWindowVisible(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsMirrorWindowVisible IsMirrorWindowVisible; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _CompositorDumpImages(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CompositorDumpImages CompositorDumpImages; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _ShouldAppRenderWithLowResources(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShouldAppRenderWithLowResources ShouldAppRenderWithLowResources; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ForceInterleavedReprojectionOn(bool bOverride); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ForceInterleavedReprojectionOn ForceInterleavedReprojectionOn; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ForceReconnectProcess(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ForceReconnectProcess ForceReconnectProcess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SuspendRendering(bool bSuspend); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SuspendRendering SuspendRendering; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _RequestScreenshot(EVRScreenshotType type, string pchDestinationFileName, string pchVRDestinationFileName); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RequestScreenshot RequestScreenshot; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotType _GetCurrentScreenshotType(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetCurrentScreenshotType GetCurrentScreenshotType; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _GetMirrorTextureD3D11(EVREye eEye, IntPtr pD3D11DeviceOrResource, ref IntPtr ppD3D11ShaderResourceView); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetMirrorTextureD3D11 GetMirrorTextureD3D11; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRCompositorError _GetMirrorTextureGL(EVREye eEye, ref uint pglTextureId, IntPtr pglSharedTextureHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetMirrorTextureGL GetMirrorTextureGL; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _ReleaseSharedGLTexture(uint glTextureId, IntPtr glSharedTextureHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReleaseSharedGLTexture ReleaseSharedGLTexture; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _LockGLSharedTextureForAccess(IntPtr glSharedTextureHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LockGLSharedTextureForAccess LockGLSharedTextureForAccess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _UnlockGLSharedTextureForAccess(IntPtr glSharedTextureHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _UnlockGLSharedTextureForAccess UnlockGLSharedTextureForAccess; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVROverlay +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _FindOverlay(string pchOverlayKey, ref ulong pOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FindOverlay FindOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _CreateOverlay(string pchOverlayKey, string pchOverlayFriendlyName, ref ulong pOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CreateOverlay CreateOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _DestroyOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _DestroyOverlay DestroyOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetHighQualityOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetHighQualityOverlay SetHighQualityOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ulong _GetHighQualityOverlay(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetHighQualityOverlay GetHighQualityOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetOverlayKey(ulong ulOverlayHandle, System.Text.StringBuilder pchValue, uint unBufferSize, ref EVROverlayError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayKey GetOverlayKey; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetOverlayName(ulong ulOverlayHandle, System.Text.StringBuilder pchValue, uint unBufferSize, ref EVROverlayError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayName GetOverlayName; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayImageData(ulong ulOverlayHandle, IntPtr pvBuffer, uint unBufferSize, ref uint punWidth, ref uint punHeight); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayImageData GetOverlayImageData; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetOverlayErrorNameFromEnum(EVROverlayError error); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayErrorNameFromEnum GetOverlayErrorNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayRenderingPid(ulong ulOverlayHandle, uint unPID); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayRenderingPid SetOverlayRenderingPid; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetOverlayRenderingPid(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayRenderingPid GetOverlayRenderingPid; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayFlag(ulong ulOverlayHandle, VROverlayFlags eOverlayFlag, bool bEnabled); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayFlag SetOverlayFlag; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayFlag(ulong ulOverlayHandle, VROverlayFlags eOverlayFlag, ref bool pbEnabled); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayFlag GetOverlayFlag; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayColor(ulong ulOverlayHandle, float fRed, float fGreen, float fBlue); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayColor SetOverlayColor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayColor(ulong ulOverlayHandle, ref float pfRed, ref float pfGreen, ref float pfBlue); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayColor GetOverlayColor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayAlpha(ulong ulOverlayHandle, float fAlpha); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayAlpha SetOverlayAlpha; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayAlpha(ulong ulOverlayHandle, ref float pfAlpha); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayAlpha GetOverlayAlpha; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayWidthInMeters(ulong ulOverlayHandle, float fWidthInMeters); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayWidthInMeters SetOverlayWidthInMeters; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayWidthInMeters(ulong ulOverlayHandle, ref float pfWidthInMeters); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayWidthInMeters GetOverlayWidthInMeters; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayAutoCurveDistanceRangeInMeters(ulong ulOverlayHandle, float fMinDistanceInMeters, float fMaxDistanceInMeters); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayAutoCurveDistanceRangeInMeters SetOverlayAutoCurveDistanceRangeInMeters; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayAutoCurveDistanceRangeInMeters(ulong ulOverlayHandle, ref float pfMinDistanceInMeters, ref float pfMaxDistanceInMeters); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayAutoCurveDistanceRangeInMeters GetOverlayAutoCurveDistanceRangeInMeters; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTextureColorSpace(ulong ulOverlayHandle, EColorSpace eTextureColorSpace); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTextureColorSpace SetOverlayTextureColorSpace; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTextureColorSpace(ulong ulOverlayHandle, ref EColorSpace peTextureColorSpace); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTextureColorSpace GetOverlayTextureColorSpace; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTextureBounds(ulong ulOverlayHandle, ref VRTextureBounds_t pOverlayTextureBounds); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTextureBounds SetOverlayTextureBounds; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTextureBounds(ulong ulOverlayHandle, ref VRTextureBounds_t pOverlayTextureBounds); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTextureBounds GetOverlayTextureBounds; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTransformType(ulong ulOverlayHandle, ref VROverlayTransformType peTransformType); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTransformType GetOverlayTransformType; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTransformAbsolute(ulong ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTransformAbsolute SetOverlayTransformAbsolute; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTransformAbsolute(ulong ulOverlayHandle, ref ETrackingUniverseOrigin peTrackingOrigin, ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTransformAbsolute GetOverlayTransformAbsolute; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle, uint unTrackedDevice, ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTransformTrackedDeviceRelative SetOverlayTransformTrackedDeviceRelative; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle, ref uint punTrackedDevice, ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTransformTrackedDeviceRelative GetOverlayTransformTrackedDeviceRelative; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTransformTrackedDeviceComponent(ulong ulOverlayHandle, uint unDeviceIndex, string pchComponentName); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTransformTrackedDeviceComponent SetOverlayTransformTrackedDeviceComponent; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTransformTrackedDeviceComponent(ulong ulOverlayHandle, ref uint punDeviceIndex, string pchComponentName, uint unComponentNameSize); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTransformTrackedDeviceComponent GetOverlayTransformTrackedDeviceComponent; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _ShowOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShowOverlay ShowOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _HideOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HideOverlay HideOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsOverlayVisible(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsOverlayVisible IsOverlayVisible; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetTransformForOverlayCoordinates(ulong ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, HmdVector2_t coordinatesInOverlay, ref HmdMatrix34_t pmatTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetTransformForOverlayCoordinates GetTransformForOverlayCoordinates; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _PollNextOverlayEvent(ulong ulOverlayHandle, ref VREvent_t pEvent, uint uncbVREvent); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _PollNextOverlayEvent PollNextOverlayEvent; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayInputMethod(ulong ulOverlayHandle, ref VROverlayInputMethod peInputMethod); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayInputMethod GetOverlayInputMethod; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayInputMethod(ulong ulOverlayHandle, VROverlayInputMethod eInputMethod); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayInputMethod SetOverlayInputMethod; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayMouseScale(ulong ulOverlayHandle, ref HmdVector2_t pvecMouseScale); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayMouseScale GetOverlayMouseScale; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayMouseScale(ulong ulOverlayHandle, ref HmdVector2_t pvecMouseScale); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayMouseScale SetOverlayMouseScale; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _ComputeOverlayIntersection(ulong ulOverlayHandle, ref VROverlayIntersectionParams_t pParams, ref VROverlayIntersectionResults_t pResults); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ComputeOverlayIntersection ComputeOverlayIntersection; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _HandleControllerOverlayInteractionAsMouse(ulong ulOverlayHandle, uint unControllerDeviceIndex); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HandleControllerOverlayInteractionAsMouse HandleControllerOverlayInteractionAsMouse; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsHoverTargetOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsHoverTargetOverlay IsHoverTargetOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ulong _GetGamepadFocusOverlay(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetGamepadFocusOverlay GetGamepadFocusOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetGamepadFocusOverlay(ulong ulNewFocusOverlay); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetGamepadFocusOverlay SetGamepadFocusOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayNeighbor(EOverlayDirection eDirection, ulong ulFrom, ulong ulTo); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayNeighbor SetOverlayNeighbor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _MoveGamepadFocusToNeighbor(EOverlayDirection eDirection, ulong ulFrom); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _MoveGamepadFocusToNeighbor MoveGamepadFocusToNeighbor; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayTexture(ulong ulOverlayHandle, ref Texture_t pTexture); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayTexture SetOverlayTexture; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _ClearOverlayTexture(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ClearOverlayTexture ClearOverlayTexture; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayRaw(ulong ulOverlayHandle, IntPtr pvBuffer, uint unWidth, uint unHeight, uint unDepth); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayRaw SetOverlayRaw; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetOverlayFromFile(ulong ulOverlayHandle, string pchFilePath); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetOverlayFromFile SetOverlayFromFile; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTexture(ulong ulOverlayHandle, ref IntPtr pNativeTextureHandle, IntPtr pNativeTextureRef, ref uint pWidth, ref uint pHeight, ref uint pNativeFormat, ref EGraphicsAPIConvention pAPI, ref EColorSpace pColorSpace); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTexture GetOverlayTexture; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _ReleaseNativeOverlayHandle(ulong ulOverlayHandle, IntPtr pNativeTextureHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ReleaseNativeOverlayHandle ReleaseNativeOverlayHandle; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetOverlayTextureSize(ulong ulOverlayHandle, ref uint pWidth, ref uint pHeight); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetOverlayTextureSize GetOverlayTextureSize; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _CreateDashboardOverlay(string pchOverlayKey, string pchOverlayFriendlyName, ref ulong pMainHandle, ref ulong pThumbnailHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CreateDashboardOverlay CreateDashboardOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsDashboardVisible(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsDashboardVisible IsDashboardVisible; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _IsActiveDashboardOverlay(ulong ulOverlayHandle); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _IsActiveDashboardOverlay IsActiveDashboardOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _SetDashboardOverlaySceneProcess(ulong ulOverlayHandle, uint unProcessId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetDashboardOverlaySceneProcess SetDashboardOverlaySceneProcess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _GetDashboardOverlaySceneProcess(ulong ulOverlayHandle, ref uint punProcessId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetDashboardOverlaySceneProcess GetDashboardOverlaySceneProcess; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _ShowDashboard(string pchOverlayToShow); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShowDashboard ShowDashboard; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetPrimaryDashboardDevice(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetPrimaryDashboardDevice GetPrimaryDashboardDevice; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _ShowKeyboard(int eInputMode, int eLineInputMode, string pchDescription, uint unCharMax, string pchExistingText, bool bUseMinimalMode, ulong uUserValue); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShowKeyboard ShowKeyboard; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVROverlayError _ShowKeyboardForOverlay(ulong ulOverlayHandle, int eInputMode, int eLineInputMode, string pchDescription, uint unCharMax, string pchExistingText, bool bUseMinimalMode, ulong uUserValue); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _ShowKeyboardForOverlay ShowKeyboardForOverlay; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetKeyboardText(System.Text.StringBuilder pchText, uint cchText); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetKeyboardText GetKeyboardText; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _HideKeyboard(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HideKeyboard HideKeyboard; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetKeyboardTransformAbsolute(ETrackingUniverseOrigin eTrackingOrigin, ref HmdMatrix34_t pmatTrackingOriginToKeyboardTransform); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetKeyboardTransformAbsolute SetKeyboardTransformAbsolute; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetKeyboardPositionForOverlay(ulong ulOverlayHandle, HmdRect2_t avoidRect); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetKeyboardPositionForOverlay SetKeyboardPositionForOverlay; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRRenderModels +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRRenderModelError _LoadRenderModel_Async(string pchRenderModelName, ref IntPtr ppRenderModel); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LoadRenderModel_Async LoadRenderModel_Async; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _FreeRenderModel(IntPtr pRenderModel); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FreeRenderModel FreeRenderModel; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRRenderModelError _LoadTexture_Async(int textureId, ref IntPtr ppTexture); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LoadTexture_Async LoadTexture_Async; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _FreeTexture(IntPtr pTexture); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FreeTexture FreeTexture; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRRenderModelError _LoadTextureD3D11_Async(int textureId, IntPtr pD3D11Device, ref IntPtr ppD3D11Texture2D); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LoadTextureD3D11_Async LoadTextureD3D11_Async; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRRenderModelError _LoadIntoTextureD3D11_Async(int textureId, IntPtr pDstTexture); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _LoadIntoTextureD3D11_Async LoadIntoTextureD3D11_Async; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _FreeTextureD3D11(IntPtr pD3D11Texture2D); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _FreeTextureD3D11 FreeTextureD3D11; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetRenderModelName(uint unRenderModelIndex, System.Text.StringBuilder pchRenderModelName, uint unRenderModelNameLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRenderModelName GetRenderModelName; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetRenderModelCount(); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRenderModelCount GetRenderModelCount; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetComponentCount(string pchRenderModelName); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetComponentCount GetComponentCount; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetComponentName(string pchRenderModelName, uint unComponentIndex, System.Text.StringBuilder pchComponentName, uint unComponentNameLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetComponentName GetComponentName; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate ulong _GetComponentButtonMask(string pchRenderModelName, string pchComponentName); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetComponentButtonMask GetComponentButtonMask; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetComponentRenderModelName(string pchRenderModelName, string pchComponentName, System.Text.StringBuilder pchComponentRenderModelName, uint unComponentRenderModelNameLen); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetComponentRenderModelName GetComponentRenderModelName; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetComponentState(string pchRenderModelName, string pchComponentName, ref VRControllerState_t pControllerState, ref RenderModel_ControllerMode_State_t pState, ref RenderModel_ComponentState_t pComponentState); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetComponentState GetComponentState; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _RenderModelHasComponent(string pchRenderModelName, string pchComponentName); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RenderModelHasComponent RenderModelHasComponent; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetRenderModelThumbnailURL(string pchRenderModelName, System.Text.StringBuilder pchThumbnailURL, uint unThumbnailURLLen, ref EVRRenderModelError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRenderModelThumbnailURL GetRenderModelThumbnailURL; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetRenderModelOriginalPath(string pchRenderModelName, System.Text.StringBuilder pchOriginalPath, uint unOriginalPathLen, ref EVRRenderModelError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRenderModelOriginalPath GetRenderModelOriginalPath; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetRenderModelErrorNameFromEnum(EVRRenderModelError error); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetRenderModelErrorNameFromEnum GetRenderModelErrorNameFromEnum; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRNotifications +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRNotificationError _CreateNotification(ulong ulOverlayHandle, ulong ulUserValue, EVRNotificationType type, string pchText, EVRNotificationStyle style, ref NotificationBitmap_t pImage, ref uint pNotificationId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _CreateNotification CreateNotification; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRNotificationError _RemoveNotification(uint notificationId); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RemoveNotification RemoveNotification; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRSettings +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate IntPtr _GetSettingsErrorNameFromEnum(EVRSettingsError eError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetSettingsErrorNameFromEnum GetSettingsErrorNameFromEnum; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _Sync(bool bForce, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _Sync Sync; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate bool _GetBool(string pchSection, string pchSettingsKey, bool bDefaultValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetBool GetBool; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetBool(string pchSection, string pchSettingsKey, bool bValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetBool SetBool; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate int _GetInt32(string pchSection, string pchSettingsKey, int nDefaultValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetInt32 GetInt32; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetInt32(string pchSection, string pchSettingsKey, int nValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetInt32 SetInt32; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate float _GetFloat(string pchSection, string pchSettingsKey, float flDefaultValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetFloat GetFloat; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetFloat(string pchSection, string pchSettingsKey, float flValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetFloat SetFloat; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _GetString(string pchSection, string pchSettingsKey, string pchValue, uint unValueLen, string pchDefaultValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetString GetString; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _SetString(string pchSection, string pchSettingsKey, string pchValue, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SetString SetString; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _RemoveSection(string pchSection, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RemoveSection RemoveSection; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate void _RemoveKeyInSection(string pchSection, string pchSettingsKey, ref EVRSettingsError peError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RemoveKeyInSection RemoveKeyInSection; + +} + +[StructLayout(LayoutKind.Sequential)] +public struct IVRScreenshots +{ + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotError _RequestScreenshot(ref uint pOutScreenshotHandle, EVRScreenshotType type, string pchPreviewFilename, string pchVRFilename); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _RequestScreenshot RequestScreenshot; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotError _HookScreenshot([In, Out] EVRScreenshotType[] pSupportedTypes, int numTypes); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _HookScreenshot HookScreenshot; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotType _GetScreenshotPropertyType(uint screenshotHandle, ref EVRScreenshotError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetScreenshotPropertyType GetScreenshotPropertyType; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate uint _GetScreenshotPropertyFilename(uint screenshotHandle, EVRScreenshotPropertyFilenames filenameType, System.Text.StringBuilder pchFilename, uint cchFilename, ref EVRScreenshotError pError); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _GetScreenshotPropertyFilename GetScreenshotPropertyFilename; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotError _UpdateScreenshotProgress(uint screenshotHandle, float flProgress); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _UpdateScreenshotProgress UpdateScreenshotProgress; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotError _TakeStereoScreenshot(ref uint pOutScreenshotHandle, string pchPreviewFilename, string pchVRFilename); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _TakeStereoScreenshot TakeStereoScreenshot; + + [UnmanagedFunctionPointer(CallingConvention.StdCall)] + internal delegate EVRScreenshotError _SubmitScreenshot(uint screenshotHandle, EVRScreenshotType type, string pchSourcePreviewFilename, string pchSourceVRFilename); + [MarshalAs(UnmanagedType.FunctionPtr)] + internal _SubmitScreenshot SubmitScreenshot; + +} + + +public class CVRSystem +{ + IVRSystem FnTable; + internal CVRSystem(IntPtr pInterface) + { + FnTable = (IVRSystem)Marshal.PtrToStructure(pInterface, typeof(IVRSystem)); + } + public void GetRecommendedRenderTargetSize(ref uint pnWidth,ref uint pnHeight) + { + pnWidth = 0; + pnHeight = 0; + FnTable.GetRecommendedRenderTargetSize(ref pnWidth,ref pnHeight); + } + public HmdMatrix44_t GetProjectionMatrix(EVREye eEye,float fNearZ,float fFarZ,EGraphicsAPIConvention eProjType) + { + HmdMatrix44_t result = FnTable.GetProjectionMatrix(eEye,fNearZ,fFarZ,eProjType); + return result; + } + public void GetProjectionRaw(EVREye eEye,ref float pfLeft,ref float pfRight,ref float pfTop,ref float pfBottom) + { + pfLeft = 0; + pfRight = 0; + pfTop = 0; + pfBottom = 0; + FnTable.GetProjectionRaw(eEye,ref pfLeft,ref pfRight,ref pfTop,ref pfBottom); + } + public DistortionCoordinates_t ComputeDistortion(EVREye eEye,float fU,float fV) + { + DistortionCoordinates_t result = FnTable.ComputeDistortion(eEye,fU,fV); + return result; + } + public HmdMatrix34_t GetEyeToHeadTransform(EVREye eEye) + { + HmdMatrix34_t result = FnTable.GetEyeToHeadTransform(eEye); + return result; + } + public bool GetTimeSinceLastVsync(ref float pfSecondsSinceLastVsync,ref ulong pulFrameCounter) + { + pfSecondsSinceLastVsync = 0; + pulFrameCounter = 0; + bool result = FnTable.GetTimeSinceLastVsync(ref pfSecondsSinceLastVsync,ref pulFrameCounter); + return result; + } + public int GetD3D9AdapterIndex() + { + int result = FnTable.GetD3D9AdapterIndex(); + return result; + } + public void GetDXGIOutputInfo(ref int pnAdapterIndex) + { + pnAdapterIndex = 0; + FnTable.GetDXGIOutputInfo(ref pnAdapterIndex); + } + public bool IsDisplayOnDesktop() + { + bool result = FnTable.IsDisplayOnDesktop(); + return result; + } + public bool SetDisplayVisibility(bool bIsVisibleOnDesktop) + { + bool result = FnTable.SetDisplayVisibility(bIsVisibleOnDesktop); + return result; + } + public void GetDeviceToAbsoluteTrackingPose(ETrackingUniverseOrigin eOrigin,float fPredictedSecondsToPhotonsFromNow,TrackedDevicePose_t [] pTrackedDevicePoseArray) + { + FnTable.GetDeviceToAbsoluteTrackingPose(eOrigin,fPredictedSecondsToPhotonsFromNow,pTrackedDevicePoseArray,(uint) pTrackedDevicePoseArray.Length); + } + public void ResetSeatedZeroPose() + { + FnTable.ResetSeatedZeroPose(); + } + public HmdMatrix34_t GetSeatedZeroPoseToStandingAbsoluteTrackingPose() + { + HmdMatrix34_t result = FnTable.GetSeatedZeroPoseToStandingAbsoluteTrackingPose(); + return result; + } + public HmdMatrix34_t GetRawZeroPoseToStandingAbsoluteTrackingPose() + { + HmdMatrix34_t result = FnTable.GetRawZeroPoseToStandingAbsoluteTrackingPose(); + return result; + } + public uint GetSortedTrackedDeviceIndicesOfClass(ETrackedDeviceClass eTrackedDeviceClass,uint [] punTrackedDeviceIndexArray,uint unRelativeToTrackedDeviceIndex) + { + uint result = FnTable.GetSortedTrackedDeviceIndicesOfClass(eTrackedDeviceClass,punTrackedDeviceIndexArray,(uint) punTrackedDeviceIndexArray.Length,unRelativeToTrackedDeviceIndex); + return result; + } + public EDeviceActivityLevel GetTrackedDeviceActivityLevel(uint unDeviceId) + { + EDeviceActivityLevel result = FnTable.GetTrackedDeviceActivityLevel(unDeviceId); + return result; + } + public void ApplyTransform(ref TrackedDevicePose_t pOutputPose,ref TrackedDevicePose_t pTrackedDevicePose,ref HmdMatrix34_t pTransform) + { + FnTable.ApplyTransform(ref pOutputPose,ref pTrackedDevicePose,ref pTransform); + } + public uint GetTrackedDeviceIndexForControllerRole(ETrackedControllerRole unDeviceType) + { + uint result = FnTable.GetTrackedDeviceIndexForControllerRole(unDeviceType); + return result; + } + public ETrackedControllerRole GetControllerRoleForTrackedDeviceIndex(uint unDeviceIndex) + { + ETrackedControllerRole result = FnTable.GetControllerRoleForTrackedDeviceIndex(unDeviceIndex); + return result; + } + public ETrackedDeviceClass GetTrackedDeviceClass(uint unDeviceIndex) + { + ETrackedDeviceClass result = FnTable.GetTrackedDeviceClass(unDeviceIndex); + return result; + } + public bool IsTrackedDeviceConnected(uint unDeviceIndex) + { + bool result = FnTable.IsTrackedDeviceConnected(unDeviceIndex); + return result; + } + public bool GetBoolTrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,ref ETrackedPropertyError pError) + { + bool result = FnTable.GetBoolTrackedDeviceProperty(unDeviceIndex,prop,ref pError); + return result; + } + public float GetFloatTrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,ref ETrackedPropertyError pError) + { + float result = FnTable.GetFloatTrackedDeviceProperty(unDeviceIndex,prop,ref pError); + return result; + } + public int GetInt32TrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,ref ETrackedPropertyError pError) + { + int result = FnTable.GetInt32TrackedDeviceProperty(unDeviceIndex,prop,ref pError); + return result; + } + public ulong GetUint64TrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,ref ETrackedPropertyError pError) + { + ulong result = FnTable.GetUint64TrackedDeviceProperty(unDeviceIndex,prop,ref pError); + return result; + } + public HmdMatrix34_t GetMatrix34TrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,ref ETrackedPropertyError pError) + { + HmdMatrix34_t result = FnTable.GetMatrix34TrackedDeviceProperty(unDeviceIndex,prop,ref pError); + return result; + } + public uint GetStringTrackedDeviceProperty(uint unDeviceIndex,ETrackedDeviceProperty prop,System.Text.StringBuilder pchValue,uint unBufferSize,ref ETrackedPropertyError pError) + { + uint result = FnTable.GetStringTrackedDeviceProperty(unDeviceIndex,prop,pchValue,unBufferSize,ref pError); + return result; + } + public string GetPropErrorNameFromEnum(ETrackedPropertyError error) + { + IntPtr result = FnTable.GetPropErrorNameFromEnum(error); + return Marshal.PtrToStringAnsi(result); + } + public bool PollNextEvent(ref VREvent_t pEvent,uint uncbVREvent) + { + bool result = FnTable.PollNextEvent(ref pEvent,uncbVREvent); + return result; + } + public bool PollNextEventWithPose(ETrackingUniverseOrigin eOrigin,ref VREvent_t pEvent,uint uncbVREvent,ref TrackedDevicePose_t pTrackedDevicePose) + { + bool result = FnTable.PollNextEventWithPose(eOrigin,ref pEvent,uncbVREvent,ref pTrackedDevicePose); + return result; + } + public string GetEventTypeNameFromEnum(EVREventType eType) + { + IntPtr result = FnTable.GetEventTypeNameFromEnum(eType); + return Marshal.PtrToStringAnsi(result); + } + public HiddenAreaMesh_t GetHiddenAreaMesh(EVREye eEye) + { + HiddenAreaMesh_t result = FnTable.GetHiddenAreaMesh(eEye); + return result; + } + public bool GetControllerState(uint unControllerDeviceIndex,ref VRControllerState_t pControllerState) + { + bool result = FnTable.GetControllerState(unControllerDeviceIndex,ref pControllerState); + return result; + } + public bool GetControllerStateWithPose(ETrackingUniverseOrigin eOrigin,uint unControllerDeviceIndex,ref VRControllerState_t pControllerState,ref TrackedDevicePose_t pTrackedDevicePose) + { + bool result = FnTable.GetControllerStateWithPose(eOrigin,unControllerDeviceIndex,ref pControllerState,ref pTrackedDevicePose); + return result; + } + public void TriggerHapticPulse(uint unControllerDeviceIndex,uint unAxisId,char usDurationMicroSec) + { + FnTable.TriggerHapticPulse(unControllerDeviceIndex,unAxisId,usDurationMicroSec); + } + public string GetButtonIdNameFromEnum(EVRButtonId eButtonId) + { + IntPtr result = FnTable.GetButtonIdNameFromEnum(eButtonId); + return Marshal.PtrToStringAnsi(result); + } + public string GetControllerAxisTypeNameFromEnum(EVRControllerAxisType eAxisType) + { + IntPtr result = FnTable.GetControllerAxisTypeNameFromEnum(eAxisType); + return Marshal.PtrToStringAnsi(result); + } + public bool CaptureInputFocus() + { + bool result = FnTable.CaptureInputFocus(); + return result; + } + public void ReleaseInputFocus() + { + FnTable.ReleaseInputFocus(); + } + public bool IsInputFocusCapturedByAnotherProcess() + { + bool result = FnTable.IsInputFocusCapturedByAnotherProcess(); + return result; + } + public uint DriverDebugRequest(uint unDeviceIndex,string pchRequest,string pchResponseBuffer,uint unResponseBufferSize) + { + uint result = FnTable.DriverDebugRequest(unDeviceIndex,pchRequest,pchResponseBuffer,unResponseBufferSize); + return result; + } + public EVRFirmwareError PerformFirmwareUpdate(uint unDeviceIndex) + { + EVRFirmwareError result = FnTable.PerformFirmwareUpdate(unDeviceIndex); + return result; + } + public void AcknowledgeQuit_Exiting() + { + FnTable.AcknowledgeQuit_Exiting(); + } + public void AcknowledgeQuit_UserPrompt() + { + FnTable.AcknowledgeQuit_UserPrompt(); + } +} + + +public class CVRExtendedDisplay +{ + IVRExtendedDisplay FnTable; + internal CVRExtendedDisplay(IntPtr pInterface) + { + FnTable = (IVRExtendedDisplay)Marshal.PtrToStructure(pInterface, typeof(IVRExtendedDisplay)); + } + public void GetWindowBounds(ref int pnX,ref int pnY,ref uint pnWidth,ref uint pnHeight) + { + pnX = 0; + pnY = 0; + pnWidth = 0; + pnHeight = 0; + FnTable.GetWindowBounds(ref pnX,ref pnY,ref pnWidth,ref pnHeight); + } + public void GetEyeOutputViewport(EVREye eEye,ref uint pnX,ref uint pnY,ref uint pnWidth,ref uint pnHeight) + { + pnX = 0; + pnY = 0; + pnWidth = 0; + pnHeight = 0; + FnTable.GetEyeOutputViewport(eEye,ref pnX,ref pnY,ref pnWidth,ref pnHeight); + } + public void GetDXGIOutputInfo(ref int pnAdapterIndex,ref int pnAdapterOutputIndex) + { + pnAdapterIndex = 0; + pnAdapterOutputIndex = 0; + FnTable.GetDXGIOutputInfo(ref pnAdapterIndex,ref pnAdapterOutputIndex); + } +} + + +public class CVRTrackedCamera +{ + IVRTrackedCamera FnTable; + internal CVRTrackedCamera(IntPtr pInterface) + { + FnTable = (IVRTrackedCamera)Marshal.PtrToStructure(pInterface, typeof(IVRTrackedCamera)); + } + public string GetCameraErrorNameFromEnum(EVRTrackedCameraError eCameraError) + { + IntPtr result = FnTable.GetCameraErrorNameFromEnum(eCameraError); + return Marshal.PtrToStringAnsi(result); + } + public EVRTrackedCameraError HasCamera(uint nDeviceIndex,ref bool pHasCamera) + { + pHasCamera = false; + EVRTrackedCameraError result = FnTable.HasCamera(nDeviceIndex,ref pHasCamera); + return result; + } + public EVRTrackedCameraError GetCameraFrameSize(uint nDeviceIndex,EVRTrackedCameraFrameType eFrameType,ref uint pnWidth,ref uint pnHeight,ref uint pnFrameBufferSize) + { + pnWidth = 0; + pnHeight = 0; + pnFrameBufferSize = 0; + EVRTrackedCameraError result = FnTable.GetCameraFrameSize(nDeviceIndex,eFrameType,ref pnWidth,ref pnHeight,ref pnFrameBufferSize); + return result; + } + public EVRTrackedCameraError GetCameraIntrinisics(uint nDeviceIndex,EVRTrackedCameraFrameType eFrameType,ref HmdVector2_t pFocalLength,ref HmdVector2_t pCenter) + { + EVRTrackedCameraError result = FnTable.GetCameraIntrinisics(nDeviceIndex,eFrameType,ref pFocalLength,ref pCenter); + return result; + } + public EVRTrackedCameraError GetCameraProjection(uint nDeviceIndex,EVRTrackedCameraFrameType eFrameType,float flZNear,float flZFar,ref HmdMatrix44_t pProjection) + { + EVRTrackedCameraError result = FnTable.GetCameraProjection(nDeviceIndex,eFrameType,flZNear,flZFar,ref pProjection); + return result; + } + public EVRTrackedCameraError AcquireVideoStreamingService(uint nDeviceIndex,ref ulong pHandle) + { + pHandle = 0; + EVRTrackedCameraError result = FnTable.AcquireVideoStreamingService(nDeviceIndex,ref pHandle); + return result; + } + public EVRTrackedCameraError ReleaseVideoStreamingService(ulong hTrackedCamera) + { + EVRTrackedCameraError result = FnTable.ReleaseVideoStreamingService(hTrackedCamera); + return result; + } + public EVRTrackedCameraError GetVideoStreamFrameBuffer(ulong hTrackedCamera,EVRTrackedCameraFrameType eFrameType,IntPtr pFrameBuffer,uint nFrameBufferSize,ref CameraVideoStreamFrameHeader_t pFrameHeader,uint nFrameHeaderSize) + { + EVRTrackedCameraError result = FnTable.GetVideoStreamFrameBuffer(hTrackedCamera,eFrameType,pFrameBuffer,nFrameBufferSize,ref pFrameHeader,nFrameHeaderSize); + return result; + } +} + + +public class CVRApplications +{ + IVRApplications FnTable; + internal CVRApplications(IntPtr pInterface) + { + FnTable = (IVRApplications)Marshal.PtrToStructure(pInterface, typeof(IVRApplications)); + } + public EVRApplicationError AddApplicationManifest(string pchApplicationManifestFullPath,bool bTemporary) + { + EVRApplicationError result = FnTable.AddApplicationManifest(pchApplicationManifestFullPath,bTemporary); + return result; + } + public EVRApplicationError RemoveApplicationManifest(string pchApplicationManifestFullPath) + { + EVRApplicationError result = FnTable.RemoveApplicationManifest(pchApplicationManifestFullPath); + return result; + } + public bool IsApplicationInstalled(string pchAppKey) + { + bool result = FnTable.IsApplicationInstalled(pchAppKey); + return result; + } + public uint GetApplicationCount() + { + uint result = FnTable.GetApplicationCount(); + return result; + } + public EVRApplicationError GetApplicationKeyByIndex(uint unApplicationIndex,string pchAppKeyBuffer,uint unAppKeyBufferLen) + { + EVRApplicationError result = FnTable.GetApplicationKeyByIndex(unApplicationIndex,pchAppKeyBuffer,unAppKeyBufferLen); + return result; + } + public EVRApplicationError GetApplicationKeyByProcessId(uint unProcessId,string pchAppKeyBuffer,uint unAppKeyBufferLen) + { + EVRApplicationError result = FnTable.GetApplicationKeyByProcessId(unProcessId,pchAppKeyBuffer,unAppKeyBufferLen); + return result; + } + public EVRApplicationError LaunchApplication(string pchAppKey) + { + EVRApplicationError result = FnTable.LaunchApplication(pchAppKey); + return result; + } + public EVRApplicationError LaunchTemplateApplication(string pchTemplateAppKey,string pchNewAppKey,AppOverrideKeys_t [] pKeys) + { + EVRApplicationError result = FnTable.LaunchTemplateApplication(pchTemplateAppKey,pchNewAppKey,pKeys,(uint) pKeys.Length); + return result; + } + public EVRApplicationError LaunchDashboardOverlay(string pchAppKey) + { + EVRApplicationError result = FnTable.LaunchDashboardOverlay(pchAppKey); + return result; + } + public bool CancelApplicationLaunch(string pchAppKey) + { + bool result = FnTable.CancelApplicationLaunch(pchAppKey); + return result; + } + public EVRApplicationError IdentifyApplication(uint unProcessId,string pchAppKey) + { + EVRApplicationError result = FnTable.IdentifyApplication(unProcessId,pchAppKey); + return result; + } + public uint GetApplicationProcessId(string pchAppKey) + { + uint result = FnTable.GetApplicationProcessId(pchAppKey); + return result; + } + public string GetApplicationsErrorNameFromEnum(EVRApplicationError error) + { + IntPtr result = FnTable.GetApplicationsErrorNameFromEnum(error); + return Marshal.PtrToStringAnsi(result); + } + public uint GetApplicationPropertyString(string pchAppKey,EVRApplicationProperty eProperty,string pchPropertyValueBuffer,uint unPropertyValueBufferLen,ref EVRApplicationError peError) + { + uint result = FnTable.GetApplicationPropertyString(pchAppKey,eProperty,pchPropertyValueBuffer,unPropertyValueBufferLen,ref peError); + return result; + } + public bool GetApplicationPropertyBool(string pchAppKey,EVRApplicationProperty eProperty,ref EVRApplicationError peError) + { + bool result = FnTable.GetApplicationPropertyBool(pchAppKey,eProperty,ref peError); + return result; + } + public ulong GetApplicationPropertyUint64(string pchAppKey,EVRApplicationProperty eProperty,ref EVRApplicationError peError) + { + ulong result = FnTable.GetApplicationPropertyUint64(pchAppKey,eProperty,ref peError); + return result; + } + public EVRApplicationError SetApplicationAutoLaunch(string pchAppKey,bool bAutoLaunch) + { + EVRApplicationError result = FnTable.SetApplicationAutoLaunch(pchAppKey,bAutoLaunch); + return result; + } + public bool GetApplicationAutoLaunch(string pchAppKey) + { + bool result = FnTable.GetApplicationAutoLaunch(pchAppKey); + return result; + } + public EVRApplicationError GetStartingApplication(string pchAppKeyBuffer,uint unAppKeyBufferLen) + { + EVRApplicationError result = FnTable.GetStartingApplication(pchAppKeyBuffer,unAppKeyBufferLen); + return result; + } + public EVRApplicationTransitionState GetTransitionState() + { + EVRApplicationTransitionState result = FnTable.GetTransitionState(); + return result; + } + public EVRApplicationError PerformApplicationPrelaunchCheck(string pchAppKey) + { + EVRApplicationError result = FnTable.PerformApplicationPrelaunchCheck(pchAppKey); + return result; + } + public string GetApplicationsTransitionStateNameFromEnum(EVRApplicationTransitionState state) + { + IntPtr result = FnTable.GetApplicationsTransitionStateNameFromEnum(state); + return Marshal.PtrToStringAnsi(result); + } + public bool IsQuitUserPromptRequested() + { + bool result = FnTable.IsQuitUserPromptRequested(); + return result; + } + public EVRApplicationError LaunchInternalProcess(string pchBinaryPath,string pchArguments,string pchWorkingDirectory) + { + EVRApplicationError result = FnTable.LaunchInternalProcess(pchBinaryPath,pchArguments,pchWorkingDirectory); + return result; + } +} + + +public class CVRChaperone +{ + IVRChaperone FnTable; + internal CVRChaperone(IntPtr pInterface) + { + FnTable = (IVRChaperone)Marshal.PtrToStructure(pInterface, typeof(IVRChaperone)); + } + public ChaperoneCalibrationState GetCalibrationState() + { + ChaperoneCalibrationState result = FnTable.GetCalibrationState(); + return result; + } + public bool GetPlayAreaSize(ref float pSizeX,ref float pSizeZ) + { + pSizeX = 0; + pSizeZ = 0; + bool result = FnTable.GetPlayAreaSize(ref pSizeX,ref pSizeZ); + return result; + } + public bool GetPlayAreaRect(ref HmdQuad_t rect) + { + bool result = FnTable.GetPlayAreaRect(ref rect); + return result; + } + public void ReloadInfo() + { + FnTable.ReloadInfo(); + } + public void SetSceneColor(HmdColor_t color) + { + FnTable.SetSceneColor(color); + } + public void GetBoundsColor(ref HmdColor_t pOutputColorArray,int nNumOutputColors,float flCollisionBoundsFadeDistance,ref HmdColor_t pOutputCameraColor) + { + FnTable.GetBoundsColor(ref pOutputColorArray,nNumOutputColors,flCollisionBoundsFadeDistance,ref pOutputCameraColor); + } + public bool AreBoundsVisible() + { + bool result = FnTable.AreBoundsVisible(); + return result; + } + public void ForceBoundsVisible(bool bForce) + { + FnTable.ForceBoundsVisible(bForce); + } +} + + +public class CVRChaperoneSetup +{ + IVRChaperoneSetup FnTable; + internal CVRChaperoneSetup(IntPtr pInterface) + { + FnTable = (IVRChaperoneSetup)Marshal.PtrToStructure(pInterface, typeof(IVRChaperoneSetup)); + } + public bool CommitWorkingCopy(EChaperoneConfigFile configFile) + { + bool result = FnTable.CommitWorkingCopy(configFile); + return result; + } + public void RevertWorkingCopy() + { + FnTable.RevertWorkingCopy(); + } + public bool GetWorkingPlayAreaSize(ref float pSizeX,ref float pSizeZ) + { + pSizeX = 0; + pSizeZ = 0; + bool result = FnTable.GetWorkingPlayAreaSize(ref pSizeX,ref pSizeZ); + return result; + } + public bool GetWorkingPlayAreaRect(ref HmdQuad_t rect) + { + bool result = FnTable.GetWorkingPlayAreaRect(ref rect); + return result; + } + public bool GetWorkingCollisionBoundsInfo(out HmdQuad_t [] pQuadsBuffer) + { + uint punQuadsCount = 0; + bool result = FnTable.GetWorkingCollisionBoundsInfo(null,ref punQuadsCount); + pQuadsBuffer= new HmdQuad_t[punQuadsCount]; + result = FnTable.GetWorkingCollisionBoundsInfo(pQuadsBuffer,ref punQuadsCount); + return result; + } + public bool GetLiveCollisionBoundsInfo(out HmdQuad_t [] pQuadsBuffer) + { + uint punQuadsCount = 0; + bool result = FnTable.GetLiveCollisionBoundsInfo(null,ref punQuadsCount); + pQuadsBuffer= new HmdQuad_t[punQuadsCount]; + result = FnTable.GetLiveCollisionBoundsInfo(pQuadsBuffer,ref punQuadsCount); + return result; + } + public bool GetWorkingSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatSeatedZeroPoseToRawTrackingPose) + { + bool result = FnTable.GetWorkingSeatedZeroPoseToRawTrackingPose(ref pmatSeatedZeroPoseToRawTrackingPose); + return result; + } + public bool GetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatStandingZeroPoseToRawTrackingPose) + { + bool result = FnTable.GetWorkingStandingZeroPoseToRawTrackingPose(ref pmatStandingZeroPoseToRawTrackingPose); + return result; + } + public void SetWorkingPlayAreaSize(float sizeX,float sizeZ) + { + FnTable.SetWorkingPlayAreaSize(sizeX,sizeZ); + } + public void SetWorkingCollisionBoundsInfo(HmdQuad_t [] pQuadsBuffer) + { + FnTable.SetWorkingCollisionBoundsInfo(pQuadsBuffer,(uint) pQuadsBuffer.Length); + } + public void SetWorkingSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatSeatedZeroPoseToRawTrackingPose) + { + FnTable.SetWorkingSeatedZeroPoseToRawTrackingPose(ref pMatSeatedZeroPoseToRawTrackingPose); + } + public void SetWorkingStandingZeroPoseToRawTrackingPose(ref HmdMatrix34_t pMatStandingZeroPoseToRawTrackingPose) + { + FnTable.SetWorkingStandingZeroPoseToRawTrackingPose(ref pMatStandingZeroPoseToRawTrackingPose); + } + public void ReloadFromDisk(EChaperoneConfigFile configFile) + { + FnTable.ReloadFromDisk(configFile); + } + public bool GetLiveSeatedZeroPoseToRawTrackingPose(ref HmdMatrix34_t pmatSeatedZeroPoseToRawTrackingPose) + { + bool result = FnTable.GetLiveSeatedZeroPoseToRawTrackingPose(ref pmatSeatedZeroPoseToRawTrackingPose); + return result; + } + public void SetWorkingCollisionBoundsTagsInfo(byte [] pTagsBuffer) + { + FnTable.SetWorkingCollisionBoundsTagsInfo(pTagsBuffer,(uint) pTagsBuffer.Length); + } + public bool GetLiveCollisionBoundsTagsInfo(out byte [] pTagsBuffer) + { + uint punTagCount = 0; + bool result = FnTable.GetLiveCollisionBoundsTagsInfo(null,ref punTagCount); + pTagsBuffer= new byte[punTagCount]; + result = FnTable.GetLiveCollisionBoundsTagsInfo(pTagsBuffer,ref punTagCount); + return result; + } + public bool SetWorkingPhysicalBoundsInfo(HmdQuad_t [] pQuadsBuffer) + { + bool result = FnTable.SetWorkingPhysicalBoundsInfo(pQuadsBuffer,(uint) pQuadsBuffer.Length); + return result; + } + public bool GetLivePhysicalBoundsInfo(out HmdQuad_t [] pQuadsBuffer) + { + uint punQuadsCount = 0; + bool result = FnTable.GetLivePhysicalBoundsInfo(null,ref punQuadsCount); + pQuadsBuffer= new HmdQuad_t[punQuadsCount]; + result = FnTable.GetLivePhysicalBoundsInfo(pQuadsBuffer,ref punQuadsCount); + return result; + } + public bool ExportLiveToBuffer(System.Text.StringBuilder pBuffer,ref uint pnBufferLength) + { + pnBufferLength = 0; + bool result = FnTable.ExportLiveToBuffer(pBuffer,ref pnBufferLength); + return result; + } + public bool ImportFromBufferToWorking(string pBuffer,uint nImportFlags) + { + bool result = FnTable.ImportFromBufferToWorking(pBuffer,nImportFlags); + return result; + } +} + + +public class CVRCompositor +{ + IVRCompositor FnTable; + internal CVRCompositor(IntPtr pInterface) + { + FnTable = (IVRCompositor)Marshal.PtrToStructure(pInterface, typeof(IVRCompositor)); + } + public void SetTrackingSpace(ETrackingUniverseOrigin eOrigin) + { + FnTable.SetTrackingSpace(eOrigin); + } + public ETrackingUniverseOrigin GetTrackingSpace() + { + ETrackingUniverseOrigin result = FnTable.GetTrackingSpace(); + return result; + } + public EVRCompositorError WaitGetPoses(TrackedDevicePose_t [] pRenderPoseArray,TrackedDevicePose_t [] pGamePoseArray) + { + EVRCompositorError result = FnTable.WaitGetPoses(pRenderPoseArray,(uint) pRenderPoseArray.Length,pGamePoseArray,(uint) pGamePoseArray.Length); + return result; + } + public EVRCompositorError GetLastPoses(TrackedDevicePose_t [] pRenderPoseArray,TrackedDevicePose_t [] pGamePoseArray) + { + EVRCompositorError result = FnTable.GetLastPoses(pRenderPoseArray,(uint) pRenderPoseArray.Length,pGamePoseArray,(uint) pGamePoseArray.Length); + return result; + } + public EVRCompositorError GetLastPoseForTrackedDeviceIndex(uint unDeviceIndex,ref TrackedDevicePose_t pOutputPose,ref TrackedDevicePose_t pOutputGamePose) + { + EVRCompositorError result = FnTable.GetLastPoseForTrackedDeviceIndex(unDeviceIndex,ref pOutputPose,ref pOutputGamePose); + return result; + } + public EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags) + { + EVRCompositorError result = FnTable.Submit(eEye,ref pTexture,ref pBounds,nSubmitFlags); + return result; + } + public void ClearLastSubmittedFrame() + { + FnTable.ClearLastSubmittedFrame(); + } + public void PostPresentHandoff() + { + FnTable.PostPresentHandoff(); + } + public bool GetFrameTiming(ref Compositor_FrameTiming pTiming,uint unFramesAgo) + { + bool result = FnTable.GetFrameTiming(ref pTiming,unFramesAgo); + return result; + } + public float GetFrameTimeRemaining() + { + float result = FnTable.GetFrameTimeRemaining(); + return result; + } + public void GetCumulativeStats(ref Compositor_CumulativeStats pStats,uint nStatsSizeInBytes) + { + FnTable.GetCumulativeStats(ref pStats,nStatsSizeInBytes); + } + public void FadeToColor(float fSeconds,float fRed,float fGreen,float fBlue,float fAlpha,bool bBackground) + { + FnTable.FadeToColor(fSeconds,fRed,fGreen,fBlue,fAlpha,bBackground); + } + public void FadeGrid(float fSeconds,bool bFadeIn) + { + FnTable.FadeGrid(fSeconds,bFadeIn); + } + public EVRCompositorError SetSkyboxOverride(Texture_t [] pTextures) + { + EVRCompositorError result = FnTable.SetSkyboxOverride(pTextures,(uint) pTextures.Length); + return result; + } + public void ClearSkyboxOverride() + { + FnTable.ClearSkyboxOverride(); + } + public void CompositorBringToFront() + { + FnTable.CompositorBringToFront(); + } + public void CompositorGoToBack() + { + FnTable.CompositorGoToBack(); + } + public void CompositorQuit() + { + FnTable.CompositorQuit(); + } + public bool IsFullscreen() + { + bool result = FnTable.IsFullscreen(); + return result; + } + public uint GetCurrentSceneFocusProcess() + { + uint result = FnTable.GetCurrentSceneFocusProcess(); + return result; + } + public uint GetLastFrameRenderer() + { + uint result = FnTable.GetLastFrameRenderer(); + return result; + } + public bool CanRenderScene() + { + bool result = FnTable.CanRenderScene(); + return result; + } + public void ShowMirrorWindow() + { + FnTable.ShowMirrorWindow(); + } + public void HideMirrorWindow() + { + FnTable.HideMirrorWindow(); + } + public bool IsMirrorWindowVisible() + { + bool result = FnTable.IsMirrorWindowVisible(); + return result; + } + public void CompositorDumpImages() + { + FnTable.CompositorDumpImages(); + } + public bool ShouldAppRenderWithLowResources() + { + bool result = FnTable.ShouldAppRenderWithLowResources(); + return result; + } + public void ForceInterleavedReprojectionOn(bool bOverride) + { + FnTable.ForceInterleavedReprojectionOn(bOverride); + } + public void ForceReconnectProcess() + { + FnTable.ForceReconnectProcess(); + } + public void SuspendRendering(bool bSuspend) + { + FnTable.SuspendRendering(bSuspend); + } + public EVRCompositorError RequestScreenshot(EVRScreenshotType type,string pchDestinationFileName,string pchVRDestinationFileName) + { + EVRCompositorError result = FnTable.RequestScreenshot(type,pchDestinationFileName,pchVRDestinationFileName); + return result; + } + public EVRScreenshotType GetCurrentScreenshotType() + { + EVRScreenshotType result = FnTable.GetCurrentScreenshotType(); + return result; + } + public EVRCompositorError GetMirrorTextureD3D11(EVREye eEye,IntPtr pD3D11DeviceOrResource,ref IntPtr ppD3D11ShaderResourceView) + { + EVRCompositorError result = FnTable.GetMirrorTextureD3D11(eEye,pD3D11DeviceOrResource,ref ppD3D11ShaderResourceView); + return result; + } + public EVRCompositorError GetMirrorTextureGL(EVREye eEye,ref uint pglTextureId,IntPtr pglSharedTextureHandle) + { + pglTextureId = 0; + EVRCompositorError result = FnTable.GetMirrorTextureGL(eEye,ref pglTextureId,pglSharedTextureHandle); + return result; + } + public bool ReleaseSharedGLTexture(uint glTextureId,IntPtr glSharedTextureHandle) + { + bool result = FnTable.ReleaseSharedGLTexture(glTextureId,glSharedTextureHandle); + return result; + } + public void LockGLSharedTextureForAccess(IntPtr glSharedTextureHandle) + { + FnTable.LockGLSharedTextureForAccess(glSharedTextureHandle); + } + public void UnlockGLSharedTextureForAccess(IntPtr glSharedTextureHandle) + { + FnTable.UnlockGLSharedTextureForAccess(glSharedTextureHandle); + } +} + + +public class CVROverlay +{ + IVROverlay FnTable; + internal CVROverlay(IntPtr pInterface) + { + FnTable = (IVROverlay)Marshal.PtrToStructure(pInterface, typeof(IVROverlay)); + } + public EVROverlayError FindOverlay(string pchOverlayKey,ref ulong pOverlayHandle) + { + pOverlayHandle = 0; + EVROverlayError result = FnTable.FindOverlay(pchOverlayKey,ref pOverlayHandle); + return result; + } + public EVROverlayError CreateOverlay(string pchOverlayKey,string pchOverlayFriendlyName,ref ulong pOverlayHandle) + { + pOverlayHandle = 0; + EVROverlayError result = FnTable.CreateOverlay(pchOverlayKey,pchOverlayFriendlyName,ref pOverlayHandle); + return result; + } + public EVROverlayError DestroyOverlay(ulong ulOverlayHandle) + { + EVROverlayError result = FnTable.DestroyOverlay(ulOverlayHandle); + return result; + } + public EVROverlayError SetHighQualityOverlay(ulong ulOverlayHandle) + { + EVROverlayError result = FnTable.SetHighQualityOverlay(ulOverlayHandle); + return result; + } + public ulong GetHighQualityOverlay() + { + ulong result = FnTable.GetHighQualityOverlay(); + return result; + } + public uint GetOverlayKey(ulong ulOverlayHandle,System.Text.StringBuilder pchValue,uint unBufferSize,ref EVROverlayError pError) + { + uint result = FnTable.GetOverlayKey(ulOverlayHandle,pchValue,unBufferSize,ref pError); + return result; + } + public uint GetOverlayName(ulong ulOverlayHandle,System.Text.StringBuilder pchValue,uint unBufferSize,ref EVROverlayError pError) + { + uint result = FnTable.GetOverlayName(ulOverlayHandle,pchValue,unBufferSize,ref pError); + return result; + } + public EVROverlayError GetOverlayImageData(ulong ulOverlayHandle,IntPtr pvBuffer,uint unBufferSize,ref uint punWidth,ref uint punHeight) + { + punWidth = 0; + punHeight = 0; + EVROverlayError result = FnTable.GetOverlayImageData(ulOverlayHandle,pvBuffer,unBufferSize,ref punWidth,ref punHeight); + return result; + } + public string GetOverlayErrorNameFromEnum(EVROverlayError error) + { + IntPtr result = FnTable.GetOverlayErrorNameFromEnum(error); + return Marshal.PtrToStringAnsi(result); + } + public EVROverlayError SetOverlayRenderingPid(ulong ulOverlayHandle,uint unPID) + { + EVROverlayError result = FnTable.SetOverlayRenderingPid(ulOverlayHandle,unPID); + return result; + } + public uint GetOverlayRenderingPid(ulong ulOverlayHandle) + { + uint result = FnTable.GetOverlayRenderingPid(ulOverlayHandle); + return result; + } + public EVROverlayError SetOverlayFlag(ulong ulOverlayHandle,VROverlayFlags eOverlayFlag,bool bEnabled) + { + EVROverlayError result = FnTable.SetOverlayFlag(ulOverlayHandle,eOverlayFlag,bEnabled); + return result; + } + public EVROverlayError GetOverlayFlag(ulong ulOverlayHandle,VROverlayFlags eOverlayFlag,ref bool pbEnabled) + { + pbEnabled = false; + EVROverlayError result = FnTable.GetOverlayFlag(ulOverlayHandle,eOverlayFlag,ref pbEnabled); + return result; + } + public EVROverlayError SetOverlayColor(ulong ulOverlayHandle,float fRed,float fGreen,float fBlue) + { + EVROverlayError result = FnTable.SetOverlayColor(ulOverlayHandle,fRed,fGreen,fBlue); + return result; + } + public EVROverlayError GetOverlayColor(ulong ulOverlayHandle,ref float pfRed,ref float pfGreen,ref float pfBlue) + { + pfRed = 0; + pfGreen = 0; + pfBlue = 0; + EVROverlayError result = FnTable.GetOverlayColor(ulOverlayHandle,ref pfRed,ref pfGreen,ref pfBlue); + return result; + } + public EVROverlayError SetOverlayAlpha(ulong ulOverlayHandle,float fAlpha) + { + EVROverlayError result = FnTable.SetOverlayAlpha(ulOverlayHandle,fAlpha); + return result; + } + public EVROverlayError GetOverlayAlpha(ulong ulOverlayHandle,ref float pfAlpha) + { + pfAlpha = 0; + EVROverlayError result = FnTable.GetOverlayAlpha(ulOverlayHandle,ref pfAlpha); + return result; + } + public EVROverlayError SetOverlayWidthInMeters(ulong ulOverlayHandle,float fWidthInMeters) + { + EVROverlayError result = FnTable.SetOverlayWidthInMeters(ulOverlayHandle,fWidthInMeters); + return result; + } + public EVROverlayError GetOverlayWidthInMeters(ulong ulOverlayHandle,ref float pfWidthInMeters) + { + pfWidthInMeters = 0; + EVROverlayError result = FnTable.GetOverlayWidthInMeters(ulOverlayHandle,ref pfWidthInMeters); + return result; + } + public EVROverlayError SetOverlayAutoCurveDistanceRangeInMeters(ulong ulOverlayHandle,float fMinDistanceInMeters,float fMaxDistanceInMeters) + { + EVROverlayError result = FnTable.SetOverlayAutoCurveDistanceRangeInMeters(ulOverlayHandle,fMinDistanceInMeters,fMaxDistanceInMeters); + return result; + } + public EVROverlayError GetOverlayAutoCurveDistanceRangeInMeters(ulong ulOverlayHandle,ref float pfMinDistanceInMeters,ref float pfMaxDistanceInMeters) + { + pfMinDistanceInMeters = 0; + pfMaxDistanceInMeters = 0; + EVROverlayError result = FnTable.GetOverlayAutoCurveDistanceRangeInMeters(ulOverlayHandle,ref pfMinDistanceInMeters,ref pfMaxDistanceInMeters); + return result; + } + public EVROverlayError SetOverlayTextureColorSpace(ulong ulOverlayHandle,EColorSpace eTextureColorSpace) + { + EVROverlayError result = FnTable.SetOverlayTextureColorSpace(ulOverlayHandle,eTextureColorSpace); + return result; + } + public EVROverlayError GetOverlayTextureColorSpace(ulong ulOverlayHandle,ref EColorSpace peTextureColorSpace) + { + EVROverlayError result = FnTable.GetOverlayTextureColorSpace(ulOverlayHandle,ref peTextureColorSpace); + return result; + } + public EVROverlayError SetOverlayTextureBounds(ulong ulOverlayHandle,ref VRTextureBounds_t pOverlayTextureBounds) + { + EVROverlayError result = FnTable.SetOverlayTextureBounds(ulOverlayHandle,ref pOverlayTextureBounds); + return result; + } + public EVROverlayError GetOverlayTextureBounds(ulong ulOverlayHandle,ref VRTextureBounds_t pOverlayTextureBounds) + { + EVROverlayError result = FnTable.GetOverlayTextureBounds(ulOverlayHandle,ref pOverlayTextureBounds); + return result; + } + public EVROverlayError GetOverlayTransformType(ulong ulOverlayHandle,ref VROverlayTransformType peTransformType) + { + EVROverlayError result = FnTable.GetOverlayTransformType(ulOverlayHandle,ref peTransformType); + return result; + } + public EVROverlayError SetOverlayTransformAbsolute(ulong ulOverlayHandle,ETrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform) + { + EVROverlayError result = FnTable.SetOverlayTransformAbsolute(ulOverlayHandle,eTrackingOrigin,ref pmatTrackingOriginToOverlayTransform); + return result; + } + public EVROverlayError GetOverlayTransformAbsolute(ulong ulOverlayHandle,ref ETrackingUniverseOrigin peTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToOverlayTransform) + { + EVROverlayError result = FnTable.GetOverlayTransformAbsolute(ulOverlayHandle,ref peTrackingOrigin,ref pmatTrackingOriginToOverlayTransform); + return result; + } + public EVROverlayError SetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle,uint unTrackedDevice,ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform) + { + EVROverlayError result = FnTable.SetOverlayTransformTrackedDeviceRelative(ulOverlayHandle,unTrackedDevice,ref pmatTrackedDeviceToOverlayTransform); + return result; + } + public EVROverlayError GetOverlayTransformTrackedDeviceRelative(ulong ulOverlayHandle,ref uint punTrackedDevice,ref HmdMatrix34_t pmatTrackedDeviceToOverlayTransform) + { + punTrackedDevice = 0; + EVROverlayError result = FnTable.GetOverlayTransformTrackedDeviceRelative(ulOverlayHandle,ref punTrackedDevice,ref pmatTrackedDeviceToOverlayTransform); + return result; + } + public EVROverlayError SetOverlayTransformTrackedDeviceComponent(ulong ulOverlayHandle,uint unDeviceIndex,string pchComponentName) + { + EVROverlayError result = FnTable.SetOverlayTransformTrackedDeviceComponent(ulOverlayHandle,unDeviceIndex,pchComponentName); + return result; + } + public EVROverlayError GetOverlayTransformTrackedDeviceComponent(ulong ulOverlayHandle,ref uint punDeviceIndex,string pchComponentName,uint unComponentNameSize) + { + punDeviceIndex = 0; + EVROverlayError result = FnTable.GetOverlayTransformTrackedDeviceComponent(ulOverlayHandle,ref punDeviceIndex,pchComponentName,unComponentNameSize); + return result; + } + public EVROverlayError ShowOverlay(ulong ulOverlayHandle) + { + EVROverlayError result = FnTable.ShowOverlay(ulOverlayHandle); + return result; + } + public EVROverlayError HideOverlay(ulong ulOverlayHandle) + { + EVROverlayError result = FnTable.HideOverlay(ulOverlayHandle); + return result; + } + public bool IsOverlayVisible(ulong ulOverlayHandle) + { + bool result = FnTable.IsOverlayVisible(ulOverlayHandle); + return result; + } + public EVROverlayError GetTransformForOverlayCoordinates(ulong ulOverlayHandle,ETrackingUniverseOrigin eTrackingOrigin,HmdVector2_t coordinatesInOverlay,ref HmdMatrix34_t pmatTransform) + { + EVROverlayError result = FnTable.GetTransformForOverlayCoordinates(ulOverlayHandle,eTrackingOrigin,coordinatesInOverlay,ref pmatTransform); + return result; + } + public bool PollNextOverlayEvent(ulong ulOverlayHandle,ref VREvent_t pEvent,uint uncbVREvent) + { + bool result = FnTable.PollNextOverlayEvent(ulOverlayHandle,ref pEvent,uncbVREvent); + return result; + } + public EVROverlayError GetOverlayInputMethod(ulong ulOverlayHandle,ref VROverlayInputMethod peInputMethod) + { + EVROverlayError result = FnTable.GetOverlayInputMethod(ulOverlayHandle,ref peInputMethod); + return result; + } + public EVROverlayError SetOverlayInputMethod(ulong ulOverlayHandle,VROverlayInputMethod eInputMethod) + { + EVROverlayError result = FnTable.SetOverlayInputMethod(ulOverlayHandle,eInputMethod); + return result; + } + public EVROverlayError GetOverlayMouseScale(ulong ulOverlayHandle,ref HmdVector2_t pvecMouseScale) + { + EVROverlayError result = FnTable.GetOverlayMouseScale(ulOverlayHandle,ref pvecMouseScale); + return result; + } + public EVROverlayError SetOverlayMouseScale(ulong ulOverlayHandle,ref HmdVector2_t pvecMouseScale) + { + EVROverlayError result = FnTable.SetOverlayMouseScale(ulOverlayHandle,ref pvecMouseScale); + return result; + } + public bool ComputeOverlayIntersection(ulong ulOverlayHandle,ref VROverlayIntersectionParams_t pParams,ref VROverlayIntersectionResults_t pResults) + { + bool result = FnTable.ComputeOverlayIntersection(ulOverlayHandle,ref pParams,ref pResults); + return result; + } + public bool HandleControllerOverlayInteractionAsMouse(ulong ulOverlayHandle,uint unControllerDeviceIndex) + { + bool result = FnTable.HandleControllerOverlayInteractionAsMouse(ulOverlayHandle,unControllerDeviceIndex); + return result; + } + public bool IsHoverTargetOverlay(ulong ulOverlayHandle) + { + bool result = FnTable.IsHoverTargetOverlay(ulOverlayHandle); + return result; + } + public ulong GetGamepadFocusOverlay() + { + ulong result = FnTable.GetGamepadFocusOverlay(); + return result; + } + public EVROverlayError SetGamepadFocusOverlay(ulong ulNewFocusOverlay) + { + EVROverlayError result = FnTable.SetGamepadFocusOverlay(ulNewFocusOverlay); + return result; + } + public EVROverlayError SetOverlayNeighbor(EOverlayDirection eDirection,ulong ulFrom,ulong ulTo) + { + EVROverlayError result = FnTable.SetOverlayNeighbor(eDirection,ulFrom,ulTo); + return result; + } + public EVROverlayError MoveGamepadFocusToNeighbor(EOverlayDirection eDirection,ulong ulFrom) + { + EVROverlayError result = FnTable.MoveGamepadFocusToNeighbor(eDirection,ulFrom); + return result; + } + public EVROverlayError SetOverlayTexture(ulong ulOverlayHandle,ref Texture_t pTexture) + { + EVROverlayError result = FnTable.SetOverlayTexture(ulOverlayHandle,ref pTexture); + return result; + } + public EVROverlayError ClearOverlayTexture(ulong ulOverlayHandle) + { + EVROverlayError result = FnTable.ClearOverlayTexture(ulOverlayHandle); + return result; + } + public EVROverlayError SetOverlayRaw(ulong ulOverlayHandle,IntPtr pvBuffer,uint unWidth,uint unHeight,uint unDepth) + { + EVROverlayError result = FnTable.SetOverlayRaw(ulOverlayHandle,pvBuffer,unWidth,unHeight,unDepth); + return result; + } + public EVROverlayError SetOverlayFromFile(ulong ulOverlayHandle,string pchFilePath) + { + EVROverlayError result = FnTable.SetOverlayFromFile(ulOverlayHandle,pchFilePath); + return result; + } + public EVROverlayError GetOverlayTexture(ulong ulOverlayHandle,ref IntPtr pNativeTextureHandle,IntPtr pNativeTextureRef,ref uint pWidth,ref uint pHeight,ref uint pNativeFormat,ref EGraphicsAPIConvention pAPI,ref EColorSpace pColorSpace) + { + pWidth = 0; + pHeight = 0; + pNativeFormat = 0; + EVROverlayError result = FnTable.GetOverlayTexture(ulOverlayHandle,ref pNativeTextureHandle,pNativeTextureRef,ref pWidth,ref pHeight,ref pNativeFormat,ref pAPI,ref pColorSpace); + return result; + } + public EVROverlayError ReleaseNativeOverlayHandle(ulong ulOverlayHandle,IntPtr pNativeTextureHandle) + { + EVROverlayError result = FnTable.ReleaseNativeOverlayHandle(ulOverlayHandle,pNativeTextureHandle); + return result; + } + public EVROverlayError GetOverlayTextureSize(ulong ulOverlayHandle,ref uint pWidth,ref uint pHeight) + { + pWidth = 0; + pHeight = 0; + EVROverlayError result = FnTable.GetOverlayTextureSize(ulOverlayHandle,ref pWidth,ref pHeight); + return result; + } + public EVROverlayError CreateDashboardOverlay(string pchOverlayKey,string pchOverlayFriendlyName,ref ulong pMainHandle,ref ulong pThumbnailHandle) + { + pMainHandle = 0; + pThumbnailHandle = 0; + EVROverlayError result = FnTable.CreateDashboardOverlay(pchOverlayKey,pchOverlayFriendlyName,ref pMainHandle,ref pThumbnailHandle); + return result; + } + public bool IsDashboardVisible() + { + bool result = FnTable.IsDashboardVisible(); + return result; + } + public bool IsActiveDashboardOverlay(ulong ulOverlayHandle) + { + bool result = FnTable.IsActiveDashboardOverlay(ulOverlayHandle); + return result; + } + public EVROverlayError SetDashboardOverlaySceneProcess(ulong ulOverlayHandle,uint unProcessId) + { + EVROverlayError result = FnTable.SetDashboardOverlaySceneProcess(ulOverlayHandle,unProcessId); + return result; + } + public EVROverlayError GetDashboardOverlaySceneProcess(ulong ulOverlayHandle,ref uint punProcessId) + { + punProcessId = 0; + EVROverlayError result = FnTable.GetDashboardOverlaySceneProcess(ulOverlayHandle,ref punProcessId); + return result; + } + public void ShowDashboard(string pchOverlayToShow) + { + FnTable.ShowDashboard(pchOverlayToShow); + } + public uint GetPrimaryDashboardDevice() + { + uint result = FnTable.GetPrimaryDashboardDevice(); + return result; + } + public EVROverlayError ShowKeyboard(int eInputMode,int eLineInputMode,string pchDescription,uint unCharMax,string pchExistingText,bool bUseMinimalMode,ulong uUserValue) + { + EVROverlayError result = FnTable.ShowKeyboard(eInputMode,eLineInputMode,pchDescription,unCharMax,pchExistingText,bUseMinimalMode,uUserValue); + return result; + } + public EVROverlayError ShowKeyboardForOverlay(ulong ulOverlayHandle,int eInputMode,int eLineInputMode,string pchDescription,uint unCharMax,string pchExistingText,bool bUseMinimalMode,ulong uUserValue) + { + EVROverlayError result = FnTable.ShowKeyboardForOverlay(ulOverlayHandle,eInputMode,eLineInputMode,pchDescription,unCharMax,pchExistingText,bUseMinimalMode,uUserValue); + return result; + } + public uint GetKeyboardText(System.Text.StringBuilder pchText,uint cchText) + { + uint result = FnTable.GetKeyboardText(pchText,cchText); + return result; + } + public void HideKeyboard() + { + FnTable.HideKeyboard(); + } + public void SetKeyboardTransformAbsolute(ETrackingUniverseOrigin eTrackingOrigin,ref HmdMatrix34_t pmatTrackingOriginToKeyboardTransform) + { + FnTable.SetKeyboardTransformAbsolute(eTrackingOrigin,ref pmatTrackingOriginToKeyboardTransform); + } + public void SetKeyboardPositionForOverlay(ulong ulOverlayHandle,HmdRect2_t avoidRect) + { + FnTable.SetKeyboardPositionForOverlay(ulOverlayHandle,avoidRect); + } +} + + +public class CVRRenderModels +{ + IVRRenderModels FnTable; + internal CVRRenderModels(IntPtr pInterface) + { + FnTable = (IVRRenderModels)Marshal.PtrToStructure(pInterface, typeof(IVRRenderModels)); + } + public EVRRenderModelError LoadRenderModel_Async(string pchRenderModelName,ref IntPtr ppRenderModel) + { + EVRRenderModelError result = FnTable.LoadRenderModel_Async(pchRenderModelName,ref ppRenderModel); + return result; + } + public void FreeRenderModel(IntPtr pRenderModel) + { + FnTable.FreeRenderModel(pRenderModel); + } + public EVRRenderModelError LoadTexture_Async(int textureId,ref IntPtr ppTexture) + { + EVRRenderModelError result = FnTable.LoadTexture_Async(textureId,ref ppTexture); + return result; + } + public void FreeTexture(IntPtr pTexture) + { + FnTable.FreeTexture(pTexture); + } + public EVRRenderModelError LoadTextureD3D11_Async(int textureId,IntPtr pD3D11Device,ref IntPtr ppD3D11Texture2D) + { + EVRRenderModelError result = FnTable.LoadTextureD3D11_Async(textureId,pD3D11Device,ref ppD3D11Texture2D); + return result; + } + public EVRRenderModelError LoadIntoTextureD3D11_Async(int textureId,IntPtr pDstTexture) + { + EVRRenderModelError result = FnTable.LoadIntoTextureD3D11_Async(textureId,pDstTexture); + return result; + } + public void FreeTextureD3D11(IntPtr pD3D11Texture2D) + { + FnTable.FreeTextureD3D11(pD3D11Texture2D); + } + public uint GetRenderModelName(uint unRenderModelIndex,System.Text.StringBuilder pchRenderModelName,uint unRenderModelNameLen) + { + uint result = FnTable.GetRenderModelName(unRenderModelIndex,pchRenderModelName,unRenderModelNameLen); + return result; + } + public uint GetRenderModelCount() + { + uint result = FnTable.GetRenderModelCount(); + return result; + } + public uint GetComponentCount(string pchRenderModelName) + { + uint result = FnTable.GetComponentCount(pchRenderModelName); + return result; + } + public uint GetComponentName(string pchRenderModelName,uint unComponentIndex,System.Text.StringBuilder pchComponentName,uint unComponentNameLen) + { + uint result = FnTable.GetComponentName(pchRenderModelName,unComponentIndex,pchComponentName,unComponentNameLen); + return result; + } + public ulong GetComponentButtonMask(string pchRenderModelName,string pchComponentName) + { + ulong result = FnTable.GetComponentButtonMask(pchRenderModelName,pchComponentName); + return result; + } + public uint GetComponentRenderModelName(string pchRenderModelName,string pchComponentName,System.Text.StringBuilder pchComponentRenderModelName,uint unComponentRenderModelNameLen) + { + uint result = FnTable.GetComponentRenderModelName(pchRenderModelName,pchComponentName,pchComponentRenderModelName,unComponentRenderModelNameLen); + return result; + } + public bool GetComponentState(string pchRenderModelName,string pchComponentName,ref VRControllerState_t pControllerState,ref RenderModel_ControllerMode_State_t pState,ref RenderModel_ComponentState_t pComponentState) + { + bool result = FnTable.GetComponentState(pchRenderModelName,pchComponentName,ref pControllerState,ref pState,ref pComponentState); + return result; + } + public bool RenderModelHasComponent(string pchRenderModelName,string pchComponentName) + { + bool result = FnTable.RenderModelHasComponent(pchRenderModelName,pchComponentName); + return result; + } + public uint GetRenderModelThumbnailURL(string pchRenderModelName,System.Text.StringBuilder pchThumbnailURL,uint unThumbnailURLLen,ref EVRRenderModelError peError) + { + uint result = FnTable.GetRenderModelThumbnailURL(pchRenderModelName,pchThumbnailURL,unThumbnailURLLen,ref peError); + return result; + } + public uint GetRenderModelOriginalPath(string pchRenderModelName,System.Text.StringBuilder pchOriginalPath,uint unOriginalPathLen,ref EVRRenderModelError peError) + { + uint result = FnTable.GetRenderModelOriginalPath(pchRenderModelName,pchOriginalPath,unOriginalPathLen,ref peError); + return result; + } + public string GetRenderModelErrorNameFromEnum(EVRRenderModelError error) + { + IntPtr result = FnTable.GetRenderModelErrorNameFromEnum(error); + return Marshal.PtrToStringAnsi(result); + } +} + + +public class CVRNotifications +{ + IVRNotifications FnTable; + internal CVRNotifications(IntPtr pInterface) + { + FnTable = (IVRNotifications)Marshal.PtrToStructure(pInterface, typeof(IVRNotifications)); + } + public EVRNotificationError CreateNotification(ulong ulOverlayHandle,ulong ulUserValue,EVRNotificationType type,string pchText,EVRNotificationStyle style,ref NotificationBitmap_t pImage,ref uint pNotificationId) + { + pNotificationId = 0; + EVRNotificationError result = FnTable.CreateNotification(ulOverlayHandle,ulUserValue,type,pchText,style,ref pImage,ref pNotificationId); + return result; + } + public EVRNotificationError RemoveNotification(uint notificationId) + { + EVRNotificationError result = FnTable.RemoveNotification(notificationId); + return result; + } +} + + +public class CVRSettings +{ + IVRSettings FnTable; + internal CVRSettings(IntPtr pInterface) + { + FnTable = (IVRSettings)Marshal.PtrToStructure(pInterface, typeof(IVRSettings)); + } + public string GetSettingsErrorNameFromEnum(EVRSettingsError eError) + { + IntPtr result = FnTable.GetSettingsErrorNameFromEnum(eError); + return Marshal.PtrToStringAnsi(result); + } + public bool Sync(bool bForce,ref EVRSettingsError peError) + { + bool result = FnTable.Sync(bForce,ref peError); + return result; + } + public bool GetBool(string pchSection,string pchSettingsKey,bool bDefaultValue,ref EVRSettingsError peError) + { + bool result = FnTable.GetBool(pchSection,pchSettingsKey,bDefaultValue,ref peError); + return result; + } + public void SetBool(string pchSection,string pchSettingsKey,bool bValue,ref EVRSettingsError peError) + { + FnTable.SetBool(pchSection,pchSettingsKey,bValue,ref peError); + } + public int GetInt32(string pchSection,string pchSettingsKey,int nDefaultValue,ref EVRSettingsError peError) + { + int result = FnTable.GetInt32(pchSection,pchSettingsKey,nDefaultValue,ref peError); + return result; + } + public void SetInt32(string pchSection,string pchSettingsKey,int nValue,ref EVRSettingsError peError) + { + FnTable.SetInt32(pchSection,pchSettingsKey,nValue,ref peError); + } + public float GetFloat(string pchSection,string pchSettingsKey,float flDefaultValue,ref EVRSettingsError peError) + { + float result = FnTable.GetFloat(pchSection,pchSettingsKey,flDefaultValue,ref peError); + return result; + } + public void SetFloat(string pchSection,string pchSettingsKey,float flValue,ref EVRSettingsError peError) + { + FnTable.SetFloat(pchSection,pchSettingsKey,flValue,ref peError); + } + public void GetString(string pchSection,string pchSettingsKey,string pchValue,uint unValueLen,string pchDefaultValue,ref EVRSettingsError peError) + { + FnTable.GetString(pchSection,pchSettingsKey,pchValue,unValueLen,pchDefaultValue,ref peError); + } + public void SetString(string pchSection,string pchSettingsKey,string pchValue,ref EVRSettingsError peError) + { + FnTable.SetString(pchSection,pchSettingsKey,pchValue,ref peError); + } + public void RemoveSection(string pchSection,ref EVRSettingsError peError) + { + FnTable.RemoveSection(pchSection,ref peError); + } + public void RemoveKeyInSection(string pchSection,string pchSettingsKey,ref EVRSettingsError peError) + { + FnTable.RemoveKeyInSection(pchSection,pchSettingsKey,ref peError); + } +} + + +public class CVRScreenshots +{ + IVRScreenshots FnTable; + internal CVRScreenshots(IntPtr pInterface) + { + FnTable = (IVRScreenshots)Marshal.PtrToStructure(pInterface, typeof(IVRScreenshots)); + } + public EVRScreenshotError RequestScreenshot(ref uint pOutScreenshotHandle,EVRScreenshotType type,string pchPreviewFilename,string pchVRFilename) + { + pOutScreenshotHandle = 0; + EVRScreenshotError result = FnTable.RequestScreenshot(ref pOutScreenshotHandle,type,pchPreviewFilename,pchVRFilename); + return result; + } + public EVRScreenshotError HookScreenshot(EVRScreenshotType [] pSupportedTypes) + { + EVRScreenshotError result = FnTable.HookScreenshot(pSupportedTypes,(int) pSupportedTypes.Length); + return result; + } + public EVRScreenshotType GetScreenshotPropertyType(uint screenshotHandle,ref EVRScreenshotError pError) + { + EVRScreenshotType result = FnTable.GetScreenshotPropertyType(screenshotHandle,ref pError); + return result; + } + public uint GetScreenshotPropertyFilename(uint screenshotHandle,EVRScreenshotPropertyFilenames filenameType,System.Text.StringBuilder pchFilename,uint cchFilename,ref EVRScreenshotError pError) + { + uint result = FnTable.GetScreenshotPropertyFilename(screenshotHandle,filenameType,pchFilename,cchFilename,ref pError); + return result; + } + public EVRScreenshotError UpdateScreenshotProgress(uint screenshotHandle,float flProgress) + { + EVRScreenshotError result = FnTable.UpdateScreenshotProgress(screenshotHandle,flProgress); + return result; + } + public EVRScreenshotError TakeStereoScreenshot(ref uint pOutScreenshotHandle,string pchPreviewFilename,string pchVRFilename) + { + pOutScreenshotHandle = 0; + EVRScreenshotError result = FnTable.TakeStereoScreenshot(ref pOutScreenshotHandle,pchPreviewFilename,pchVRFilename); + return result; + } + public EVRScreenshotError SubmitScreenshot(uint screenshotHandle,EVRScreenshotType type,string pchSourcePreviewFilename,string pchSourceVRFilename) + { + EVRScreenshotError result = FnTable.SubmitScreenshot(screenshotHandle,type,pchSourcePreviewFilename,pchSourceVRFilename); + return result; + } +} + + +public class OpenVRInterop +{ + [DllImportAttribute("openvr_api", EntryPoint = "VR_InitInternal")] + internal static extern uint InitInternal(ref EVRInitError peError, EVRApplicationType eApplicationType); + [DllImportAttribute("openvr_api", EntryPoint = "VR_ShutdownInternal")] + internal static extern void ShutdownInternal(); + [DllImportAttribute("openvr_api", EntryPoint = "VR_IsHmdPresent")] + internal static extern bool IsHmdPresent(); + [DllImportAttribute("openvr_api", EntryPoint = "VR_IsRuntimeInstalled")] + internal static extern bool IsRuntimeInstalled(); + [DllImportAttribute("openvr_api", EntryPoint = "VR_GetStringForHmdError")] + internal static extern IntPtr GetStringForHmdError(EVRInitError error); + [DllImportAttribute("openvr_api", EntryPoint = "VR_GetGenericInterface")] + internal static extern IntPtr GetGenericInterface([In, MarshalAs(UnmanagedType.LPStr)] string pchInterfaceVersion, ref EVRInitError peError); + [DllImportAttribute("openvr_api", EntryPoint = "VR_IsInterfaceVersionValid")] + internal static extern bool IsInterfaceVersionValid([In, MarshalAs(UnmanagedType.LPStr)] string pchInterfaceVersion); + [DllImportAttribute("openvr_api", EntryPoint = "VR_GetInitToken")] + internal static extern uint GetInitToken(); +} + + +public enum EVREye +{ + Eye_Left = 0, + Eye_Right = 1, +} +public enum EGraphicsAPIConvention +{ + API_DirectX = 0, + API_OpenGL = 1, +} +public enum EColorSpace +{ + Auto = 0, + Gamma = 1, + Linear = 2, +} +public enum ETrackingResult +{ + Uninitialized = 1, + Calibrating_InProgress = 100, + Calibrating_OutOfRange = 101, + Running_OK = 200, + Running_OutOfRange = 201, +} +public enum ETrackedDeviceClass +{ + Invalid = 0, + HMD = 1, + Controller = 2, + TrackingReference = 4, + Other = 1000, +} +public enum ETrackedControllerRole +{ + Invalid = 0, + LeftHand = 1, + RightHand = 2, +} +public enum ETrackingUniverseOrigin +{ + TrackingUniverseSeated = 0, + TrackingUniverseStanding = 1, + TrackingUniverseRawAndUncalibrated = 2, +} +public enum ETrackedDeviceProperty +{ + Prop_TrackingSystemName_String = 1000, + Prop_ModelNumber_String = 1001, + Prop_SerialNumber_String = 1002, + Prop_RenderModelName_String = 1003, + Prop_WillDriftInYaw_Bool = 1004, + Prop_ManufacturerName_String = 1005, + Prop_TrackingFirmwareVersion_String = 1006, + Prop_HardwareRevision_String = 1007, + Prop_AllWirelessDongleDescriptions_String = 1008, + Prop_ConnectedWirelessDongle_String = 1009, + Prop_DeviceIsWireless_Bool = 1010, + Prop_DeviceIsCharging_Bool = 1011, + Prop_DeviceBatteryPercentage_Float = 1012, + Prop_StatusDisplayTransform_Matrix34 = 1013, + Prop_Firmware_UpdateAvailable_Bool = 1014, + Prop_Firmware_ManualUpdate_Bool = 1015, + Prop_Firmware_ManualUpdateURL_String = 1016, + Prop_HardwareRevision_Uint64 = 1017, + Prop_FirmwareVersion_Uint64 = 1018, + Prop_FPGAVersion_Uint64 = 1019, + Prop_VRCVersion_Uint64 = 1020, + Prop_RadioVersion_Uint64 = 1021, + Prop_DongleVersion_Uint64 = 1022, + Prop_BlockServerShutdown_Bool = 1023, + Prop_CanUnifyCoordinateSystemWithHmd_Bool = 1024, + Prop_ContainsProximitySensor_Bool = 1025, + Prop_DeviceProvidesBatteryStatus_Bool = 1026, + Prop_DeviceCanPowerOff_Bool = 1027, + Prop_Firmware_ProgrammingTarget_String = 1028, + Prop_DeviceClass_Int32 = 1029, + Prop_HasCamera_Bool = 1030, + Prop_DriverVersion_String = 1031, + Prop_Firmware_ForceUpdateRequired_Bool = 1032, + Prop_ReportsTimeSinceVSync_Bool = 2000, + Prop_SecondsFromVsyncToPhotons_Float = 2001, + Prop_DisplayFrequency_Float = 2002, + Prop_UserIpdMeters_Float = 2003, + Prop_CurrentUniverseId_Uint64 = 2004, + Prop_PreviousUniverseId_Uint64 = 2005, + Prop_DisplayFirmwareVersion_Uint64 = 2006, + Prop_IsOnDesktop_Bool = 2007, + Prop_DisplayMCType_Int32 = 2008, + Prop_DisplayMCOffset_Float = 2009, + Prop_DisplayMCScale_Float = 2010, + Prop_EdidVendorID_Int32 = 2011, + Prop_DisplayMCImageLeft_String = 2012, + Prop_DisplayMCImageRight_String = 2013, + Prop_DisplayGCBlackClamp_Float = 2014, + Prop_EdidProductID_Int32 = 2015, + Prop_CameraToHeadTransform_Matrix34 = 2016, + Prop_DisplayGCType_Int32 = 2017, + Prop_DisplayGCOffset_Float = 2018, + Prop_DisplayGCScale_Float = 2019, + Prop_DisplayGCPrescale_Float = 2020, + Prop_DisplayGCImage_String = 2021, + Prop_LensCenterLeftU_Float = 2022, + Prop_LensCenterLeftV_Float = 2023, + Prop_LensCenterRightU_Float = 2024, + Prop_LensCenterRightV_Float = 2025, + Prop_UserHeadToEyeDepthMeters_Float = 2026, + Prop_CameraFirmwareVersion_Uint64 = 2027, + Prop_CameraFirmwareDescription_String = 2028, + Prop_DisplayFPGAVersion_Uint64 = 2029, + Prop_DisplayBootloaderVersion_Uint64 = 2030, + Prop_DisplayHardwareVersion_Uint64 = 2031, + Prop_AudioFirmwareVersion_Uint64 = 2032, + Prop_CameraCompatibilityMode_Int32 = 2033, + Prop_ScreenshotHorizontalFieldOfViewDegrees_Float = 2034, + Prop_ScreenshotVerticalFieldOfViewDegrees_Float = 2035, + Prop_DisplaySuppressed_Bool = 2036, + Prop_AttachedDeviceId_String = 3000, + Prop_SupportedButtons_Uint64 = 3001, + Prop_Axis0Type_Int32 = 3002, + Prop_Axis1Type_Int32 = 3003, + Prop_Axis2Type_Int32 = 3004, + Prop_Axis3Type_Int32 = 3005, + Prop_Axis4Type_Int32 = 3006, + Prop_FieldOfViewLeftDegrees_Float = 4000, + Prop_FieldOfViewRightDegrees_Float = 4001, + Prop_FieldOfViewTopDegrees_Float = 4002, + Prop_FieldOfViewBottomDegrees_Float = 4003, + Prop_TrackingRangeMinimumMeters_Float = 4004, + Prop_TrackingRangeMaximumMeters_Float = 4005, + Prop_ModeLabel_String = 4006, + Prop_VendorSpecific_Reserved_Start = 10000, + Prop_VendorSpecific_Reserved_End = 10999, +} +public enum ETrackedPropertyError +{ + TrackedProp_Success = 0, + TrackedProp_WrongDataType = 1, + TrackedProp_WrongDeviceClass = 2, + TrackedProp_BufferTooSmall = 3, + TrackedProp_UnknownProperty = 4, + TrackedProp_InvalidDevice = 5, + TrackedProp_CouldNotContactServer = 6, + TrackedProp_ValueNotProvidedByDevice = 7, + TrackedProp_StringExceedsMaximumLength = 8, + TrackedProp_NotYetAvailable = 9, +} +public enum EVRSubmitFlags +{ + Submit_Default = 0, + Submit_LensDistortionAlreadyApplied = 1, + Submit_GlRenderBuffer = 2, +} +public enum EVRState +{ + Undefined = -1, + Off = 0, + Searching = 1, + Searching_Alert = 2, + Ready = 3, + Ready_Alert = 4, + NotReady = 5, + Standby = 6, +} +public enum EVREventType +{ + VREvent_None = 0, + VREvent_TrackedDeviceActivated = 100, + VREvent_TrackedDeviceDeactivated = 101, + VREvent_TrackedDeviceUpdated = 102, + VREvent_TrackedDeviceUserInteractionStarted = 103, + VREvent_TrackedDeviceUserInteractionEnded = 104, + VREvent_IpdChanged = 105, + VREvent_EnterStandbyMode = 106, + VREvent_LeaveStandbyMode = 107, + VREvent_TrackedDeviceRoleChanged = 108, + VREvent_ButtonPress = 200, + VREvent_ButtonUnpress = 201, + VREvent_ButtonTouch = 202, + VREvent_ButtonUntouch = 203, + VREvent_MouseMove = 300, + VREvent_MouseButtonDown = 301, + VREvent_MouseButtonUp = 302, + VREvent_FocusEnter = 303, + VREvent_FocusLeave = 304, + VREvent_Scroll = 305, + VREvent_TouchPadMove = 306, + VREvent_InputFocusCaptured = 400, + VREvent_InputFocusReleased = 401, + VREvent_SceneFocusLost = 402, + VREvent_SceneFocusGained = 403, + VREvent_SceneApplicationChanged = 404, + VREvent_SceneFocusChanged = 405, + VREvent_InputFocusChanged = 406, + VREvent_SceneApplicationSecondaryRenderingStarted = 407, + VREvent_HideRenderModels = 410, + VREvent_ShowRenderModels = 411, + VREvent_OverlayShown = 500, + VREvent_OverlayHidden = 501, + VREvent_DashboardActivated = 502, + VREvent_DashboardDeactivated = 503, + VREvent_DashboardThumbSelected = 504, + VREvent_DashboardRequested = 505, + VREvent_ResetDashboard = 506, + VREvent_RenderToast = 507, + VREvent_ImageLoaded = 508, + VREvent_ShowKeyboard = 509, + VREvent_HideKeyboard = 510, + VREvent_OverlayGamepadFocusGained = 511, + VREvent_OverlayGamepadFocusLost = 512, + VREvent_OverlaySharedTextureChanged = 513, + VREvent_DashboardGuideButtonDown = 514, + VREvent_DashboardGuideButtonUp = 515, + VREvent_ScreenshotTriggered = 516, + VREvent_ImageFailed = 517, + VREvent_RequestScreenshot = 520, + VREvent_ScreenshotTaken = 521, + VREvent_ScreenshotFailed = 522, + VREvent_SubmitScreenshotToDashboard = 523, + VREvent_Notification_Shown = 600, + VREvent_Notification_Hidden = 601, + VREvent_Notification_BeginInteraction = 602, + VREvent_Notification_Destroyed = 603, + VREvent_Quit = 700, + VREvent_ProcessQuit = 701, + VREvent_QuitAborted_UserPrompt = 702, + VREvent_QuitAcknowledged = 703, + VREvent_DriverRequestedQuit = 704, + VREvent_ChaperoneDataHasChanged = 800, + VREvent_ChaperoneUniverseHasChanged = 801, + VREvent_ChaperoneTempDataHasChanged = 802, + VREvent_ChaperoneSettingsHaveChanged = 803, + VREvent_SeatedZeroPoseReset = 804, + VREvent_AudioSettingsHaveChanged = 820, + VREvent_BackgroundSettingHasChanged = 850, + VREvent_CameraSettingsHaveChanged = 851, + VREvent_ReprojectionSettingHasChanged = 852, + VREvent_ModelSkinSettingsHaveChanged = 853, + VREvent_EnvironmentSettingsHaveChanged = 854, + VREvent_StatusUpdate = 900, + VREvent_MCImageUpdated = 1000, + VREvent_FirmwareUpdateStarted = 1100, + VREvent_FirmwareUpdateFinished = 1101, + VREvent_KeyboardClosed = 1200, + VREvent_KeyboardCharInput = 1201, + VREvent_KeyboardDone = 1202, + VREvent_ApplicationTransitionStarted = 1300, + VREvent_ApplicationTransitionAborted = 1301, + VREvent_ApplicationTransitionNewAppStarted = 1302, + VREvent_ApplicationListUpdated = 1303, + VREvent_Compositor_MirrorWindowShown = 1400, + VREvent_Compositor_MirrorWindowHidden = 1401, + VREvent_Compositor_ChaperoneBoundsShown = 1410, + VREvent_Compositor_ChaperoneBoundsHidden = 1411, + VREvent_TrackedCamera_StartVideoStream = 1500, + VREvent_TrackedCamera_StopVideoStream = 1501, + VREvent_TrackedCamera_PauseVideoStream = 1502, + VREvent_TrackedCamera_ResumeVideoStream = 1503, + VREvent_PerformanceTest_EnableCapture = 1600, + VREvent_PerformanceTest_DisableCapture = 1601, + VREvent_PerformanceTest_FidelityLevel = 1602, + VREvent_VendorSpecific_Reserved_Start = 10000, + VREvent_VendorSpecific_Reserved_End = 19999, +} +public enum EDeviceActivityLevel +{ + k_EDeviceActivityLevel_Unknown = -1, + k_EDeviceActivityLevel_Idle = 0, + k_EDeviceActivityLevel_UserInteraction = 1, + k_EDeviceActivityLevel_UserInteraction_Timeout = 2, + k_EDeviceActivityLevel_Standby = 3, +} +public enum EVRButtonId +{ + k_EButton_System = 0, + k_EButton_ApplicationMenu = 1, + k_EButton_Grip = 2, + k_EButton_DPad_Left = 3, + k_EButton_DPad_Up = 4, + k_EButton_DPad_Right = 5, + k_EButton_DPad_Down = 6, + k_EButton_A = 7, + k_EButton_Axis0 = 32, + k_EButton_Axis1 = 33, + k_EButton_Axis2 = 34, + k_EButton_Axis3 = 35, + k_EButton_Axis4 = 36, + k_EButton_SteamVR_Touchpad = 32, + k_EButton_SteamVR_Trigger = 33, + k_EButton_Dashboard_Back = 2, + k_EButton_Max = 64, +} +public enum EVRMouseButton +{ + Left = 1, + Right = 2, + Middle = 4, +} +public enum EVRControllerAxisType +{ + k_eControllerAxis_None = 0, + k_eControllerAxis_TrackPad = 1, + k_eControllerAxis_Joystick = 2, + k_eControllerAxis_Trigger = 3, +} +public enum EVRControllerEventOutputType +{ + ControllerEventOutput_OSEvents = 0, + ControllerEventOutput_VREvents = 1, +} +public enum ECollisionBoundsStyle +{ + COLLISION_BOUNDS_STYLE_BEGINNER = 0, + COLLISION_BOUNDS_STYLE_INTERMEDIATE = 1, + COLLISION_BOUNDS_STYLE_SQUARES = 2, + COLLISION_BOUNDS_STYLE_ADVANCED = 3, + COLLISION_BOUNDS_STYLE_NONE = 4, + COLLISION_BOUNDS_STYLE_COUNT = 5, +} +public enum EVROverlayError +{ + None = 0, + UnknownOverlay = 10, + InvalidHandle = 11, + PermissionDenied = 12, + OverlayLimitExceeded = 13, + WrongVisibilityType = 14, + KeyTooLong = 15, + NameTooLong = 16, + KeyInUse = 17, + WrongTransformType = 18, + InvalidTrackedDevice = 19, + InvalidParameter = 20, + ThumbnailCantBeDestroyed = 21, + ArrayTooSmall = 22, + RequestFailed = 23, + InvalidTexture = 24, + UnableToLoadFile = 25, + VROVerlayError_KeyboardAlreadyInUse = 26, + NoNeighbor = 27, +} +public enum EVRApplicationType +{ + VRApplication_Other = 0, + VRApplication_Scene = 1, + VRApplication_Overlay = 2, + VRApplication_Background = 3, + VRApplication_Utility = 4, + VRApplication_VRMonitor = 5, +} +public enum EVRFirmwareError +{ + None = 0, + Success = 1, + Fail = 2, +} +public enum EVRNotificationError +{ + OK = 0, + InvalidNotificationId = 100, + NotificationQueueFull = 101, + InvalidOverlayHandle = 102, + SystemWithUserValueAlreadyExists = 103, +} +public enum EVRInitError +{ + None = 0, + Unknown = 1, + Init_InstallationNotFound = 100, + Init_InstallationCorrupt = 101, + Init_VRClientDLLNotFound = 102, + Init_FileNotFound = 103, + Init_FactoryNotFound = 104, + Init_InterfaceNotFound = 105, + Init_InvalidInterface = 106, + Init_UserConfigDirectoryInvalid = 107, + Init_HmdNotFound = 108, + Init_NotInitialized = 109, + Init_PathRegistryNotFound = 110, + Init_NoConfigPath = 111, + Init_NoLogPath = 112, + Init_PathRegistryNotWritable = 113, + Init_AppInfoInitFailed = 114, + Init_Retry = 115, + Init_InitCanceledByUser = 116, + Init_AnotherAppLaunching = 117, + Init_SettingsInitFailed = 118, + Init_ShuttingDown = 119, + Init_TooManyObjects = 120, + Init_NoServerForBackgroundApp = 121, + Init_NotSupportedWithCompositor = 122, + Init_NotAvailableToUtilityApps = 123, + Init_Internal = 124, + Driver_Failed = 200, + Driver_Unknown = 201, + Driver_HmdUnknown = 202, + Driver_NotLoaded = 203, + Driver_RuntimeOutOfDate = 204, + Driver_HmdInUse = 205, + Driver_NotCalibrated = 206, + Driver_CalibrationInvalid = 207, + Driver_HmdDisplayNotFound = 208, + IPC_ServerInitFailed = 300, + IPC_ConnectFailed = 301, + IPC_SharedStateInitFailed = 302, + IPC_CompositorInitFailed = 303, + IPC_MutexInitFailed = 304, + IPC_Failed = 305, + Compositor_Failed = 400, + Compositor_D3D11HardwareRequired = 401, + Compositor_FirmwareRequiresUpdate = 402, + Compositor_OverlayInitFailed = 403, + Compositor_ScreenshotsInitFailed = 404, + VendorSpecific_UnableToConnectToOculusRuntime = 1000, + VendorSpecific_HmdFound_CantOpenDevice = 1101, + VendorSpecific_HmdFound_UnableToRequestConfigStart = 1102, + VendorSpecific_HmdFound_NoStoredConfig = 1103, + VendorSpecific_HmdFound_ConfigTooBig = 1104, + VendorSpecific_HmdFound_ConfigTooSmall = 1105, + VendorSpecific_HmdFound_UnableToInitZLib = 1106, + VendorSpecific_HmdFound_CantReadFirmwareVersion = 1107, + VendorSpecific_HmdFound_UnableToSendUserDataStart = 1108, + VendorSpecific_HmdFound_UnableToGetUserDataStart = 1109, + VendorSpecific_HmdFound_UnableToGetUserDataNext = 1110, + VendorSpecific_HmdFound_UserDataAddressRange = 1111, + VendorSpecific_HmdFound_UserDataError = 1112, + VendorSpecific_HmdFound_ConfigFailedSanityCheck = 1113, + Steam_SteamInstallationNotFound = 2000, +} +public enum EVRScreenshotType +{ + None = 0, + Mono = 1, + Stereo = 2, + Cubemap = 3, + MonoPanorama = 4, + StereoPanorama = 5, +} +public enum EVRScreenshotPropertyFilenames +{ + Preview = 0, + VR = 1, +} +public enum EVRTrackedCameraError +{ + None = 0, + OperationFailed = 100, + InvalidHandle = 101, + InvalidFrameHeaderVersion = 102, + OutOfHandles = 103, + IPCFailure = 104, + NotSupportedForThisDevice = 105, + SharedMemoryFailure = 106, + FrameBufferingFailure = 107, + StreamSetupFailure = 108, + InvalidGLTextureId = 109, + InvalidSharedTextureHandle = 110, + FailedToGetGLTextureId = 111, + SharedTextureFailure = 112, + NoFrameAvailable = 113, + InvalidArgument = 114, + InvalidFrameBufferSize = 115, +} +public enum EVRTrackedCameraFrameType +{ + Distorted = 0, + Undistorted = 1, + MaximumUndistorted = 2, + MAX_CAMERA_FRAME_TYPES = 3, +} +public enum EVRApplicationError +{ + None = 0, + AppKeyAlreadyExists = 100, + NoManifest = 101, + NoApplication = 102, + InvalidIndex = 103, + UnknownApplication = 104, + IPCFailed = 105, + ApplicationAlreadyRunning = 106, + InvalidManifest = 107, + InvalidApplication = 108, + LaunchFailed = 109, + ApplicationAlreadyStarting = 110, + LaunchInProgress = 111, + OldApplicationQuitting = 112, + TransitionAborted = 113, + IsTemplate = 114, + BufferTooSmall = 200, + PropertyNotSet = 201, + UnknownProperty = 202, + InvalidParameter = 203, +} +public enum EVRApplicationProperty +{ + Name_String = 0, + LaunchType_String = 11, + WorkingDirectory_String = 12, + BinaryPath_String = 13, + Arguments_String = 14, + URL_String = 15, + Description_String = 50, + NewsURL_String = 51, + ImagePath_String = 52, + Source_String = 53, + IsDashboardOverlay_Bool = 60, + IsTemplate_Bool = 61, + IsInstanced_Bool = 62, + LastLaunchTime_Uint64 = 70, +} +public enum EVRApplicationTransitionState +{ + VRApplicationTransition_None = 0, + VRApplicationTransition_OldAppQuitSent = 10, + VRApplicationTransition_WaitingForExternalLaunch = 11, + VRApplicationTransition_NewAppLaunched = 20, +} +public enum ChaperoneCalibrationState +{ + OK = 1, + Warning = 100, + Warning_BaseStationMayHaveMoved = 101, + Warning_BaseStationRemoved = 102, + Warning_SeatedBoundsInvalid = 103, + Error = 200, + Error_BaseStationUninitalized = 201, + Error_BaseStationConflict = 202, + Error_PlayAreaInvalid = 203, + Error_CollisionBoundsInvalid = 204, +} +public enum EChaperoneConfigFile +{ + Live = 1, + Temp = 2, +} +public enum EChaperoneImportFlags +{ + EChaperoneImport_BoundsOnly = 1, +} +public enum EVRCompositorError +{ + None = 0, + RequestFailed = 1, + IncompatibleVersion = 100, + DoNotHaveFocus = 101, + InvalidTexture = 102, + IsNotSceneApplication = 103, + TextureIsOnWrongDevice = 104, + TextureUsesUnsupportedFormat = 105, + SharedTexturesNotSupported = 106, + IndexOutOfRange = 107, +} +public enum VROverlayInputMethod +{ + None = 0, + Mouse = 1, +} +public enum VROverlayTransformType +{ + VROverlayTransform_Absolute = 0, + VROverlayTransform_TrackedDeviceRelative = 1, + VROverlayTransform_SystemOverlay = 2, + VROverlayTransform_TrackedComponent = 3, +} +public enum VROverlayFlags +{ + None = 0, + Curved = 1, + RGSS4X = 2, + NoDashboardTab = 3, + AcceptsGamepadEvents = 4, + ShowGamepadFocus = 5, + SendVRScrollEvents = 6, + SendVRTouchpadEvents = 7, + ShowTouchPadScrollWheel = 8, + TransferOwnershipToInternalProcess = 9, + SideBySide_Parallel = 10, + SideBySide_Crossed = 11, + Panorama = 12, + StereoPanorama = 13, +} +public enum EGamepadTextInputMode +{ + k_EGamepadTextInputModeNormal = 0, + k_EGamepadTextInputModePassword = 1, + k_EGamepadTextInputModeSubmit = 2, +} +public enum EGamepadTextInputLineMode +{ + k_EGamepadTextInputLineModeSingleLine = 0, + k_EGamepadTextInputLineModeMultipleLines = 1, +} +public enum EOverlayDirection +{ + Up = 0, + Down = 1, + Left = 2, + Right = 3, + Count = 4, +} +public enum EVRRenderModelError +{ + None = 0, + Loading = 100, + NotSupported = 200, + InvalidArg = 300, + InvalidModel = 301, + NoShapes = 302, + MultipleShapes = 303, + TooManyVertices = 304, + MultipleTextures = 305, + BufferTooSmall = 306, + NotEnoughNormals = 307, + NotEnoughTexCoords = 308, + InvalidTexture = 400, +} +public enum EVRComponentProperty +{ + IsStatic = 1, + IsVisible = 2, + IsTouched = 4, + IsPressed = 8, + IsScrolled = 16, +} +public enum EVRNotificationType +{ + Transient = 0, + Persistent = 1, + Transient_SystemWithUserValue = 2, +} +public enum EVRNotificationStyle +{ + None = 0, + Application = 100, + Contact_Disabled = 200, + Contact_Enabled = 201, + Contact_Active = 202, +} +public enum EVRSettingsError +{ + None = 0, + IPCFailed = 1, + WriteFailed = 2, + ReadFailed = 3, +} +public enum EVRScreenshotError +{ + None = 0, + RequestFailed = 1, + IncompatibleVersion = 100, + NotFound = 101, + BufferTooSmall = 102, + ScreenshotAlreadyInProgress = 108, +} + +[StructLayout(LayoutKind.Explicit)] public struct VREvent_Data_t +{ + [FieldOffset(0)] public VREvent_Reserved_t reserved; + [FieldOffset(0)] public VREvent_Controller_t controller; + [FieldOffset(0)] public VREvent_Mouse_t mouse; + [FieldOffset(0)] public VREvent_Scroll_t scroll; + [FieldOffset(0)] public VREvent_Process_t process; + [FieldOffset(0)] public VREvent_Notification_t notification; + [FieldOffset(0)] public VREvent_Overlay_t overlay; + [FieldOffset(0)] public VREvent_Status_t status; + [FieldOffset(0)] public VREvent_Ipd_t ipd; + [FieldOffset(0)] public VREvent_Chaperone_t chaperone; + [FieldOffset(0)] public VREvent_PerformanceTest_t performanceTest; + [FieldOffset(0)] public VREvent_TouchPadMove_t touchPadMove; + [FieldOffset(0)] public VREvent_SeatedZeroPoseReset_t seatedZeroPoseReset; + [FieldOffset(0)] public VREvent_Screenshot_t screenshot; + [FieldOffset(0)] public VREvent_Keyboard_t keyboard; // This has to be at the end due to a mono bug +} + +[StructLayout(LayoutKind.Sequential)] public struct HmdMatrix34_t +{ + public float m0; //float[3][4] + public float m1; + public float m2; + public float m3; + public float m4; + public float m5; + public float m6; + public float m7; + public float m8; + public float m9; + public float m10; + public float m11; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdMatrix44_t +{ + public float m0; //float[4][4] + public float m1; + public float m2; + public float m3; + public float m4; + public float m5; + public float m6; + public float m7; + public float m8; + public float m9; + public float m10; + public float m11; + public float m12; + public float m13; + public float m14; + public float m15; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdVector3_t +{ + public float v0; //float[3] + public float v1; + public float v2; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdVector4_t +{ + public float v0; //float[4] + public float v1; + public float v2; + public float v3; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdVector3d_t +{ + public double v0; //double[3] + public double v1; + public double v2; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdVector2_t +{ + public float v0; //float[2] + public float v1; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdQuaternion_t +{ + public double w; + public double x; + public double y; + public double z; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdColor_t +{ + public float r; + public float g; + public float b; + public float a; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdQuad_t +{ + public HmdVector3_t vCorners0; //HmdVector3_t[4] + public HmdVector3_t vCorners1; + public HmdVector3_t vCorners2; + public HmdVector3_t vCorners3; +} +[StructLayout(LayoutKind.Sequential)] public struct HmdRect2_t +{ + public HmdVector2_t vTopLeft; + public HmdVector2_t vBottomRight; +} +[StructLayout(LayoutKind.Sequential)] public struct DistortionCoordinates_t +{ + public float rfRed0; //float[2] + public float rfRed1; + public float rfGreen0; //float[2] + public float rfGreen1; + public float rfBlue0; //float[2] + public float rfBlue1; +} +[StructLayout(LayoutKind.Sequential)] public struct Texture_t +{ + public IntPtr handle; // void * + public EGraphicsAPIConvention eType; + public EColorSpace eColorSpace; +} +[StructLayout(LayoutKind.Sequential)] public struct TrackedDevicePose_t +{ + public HmdMatrix34_t mDeviceToAbsoluteTracking; + public HmdVector3_t vVelocity; + public HmdVector3_t vAngularVelocity; + public ETrackingResult eTrackingResult; + [MarshalAs(UnmanagedType.I1)] + public bool bPoseIsValid; + [MarshalAs(UnmanagedType.I1)] + public bool bDeviceIsConnected; +} +[StructLayout(LayoutKind.Sequential)] public struct VRTextureBounds_t +{ + public float uMin; + public float vMin; + public float uMax; + public float vMax; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Controller_t +{ + public uint button; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Mouse_t +{ + public float x; + public float y; + public uint button; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Scroll_t +{ + public float xdelta; + public float ydelta; + public uint repeatCount; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_TouchPadMove_t +{ + [MarshalAs(UnmanagedType.I1)] + public bool bFingerDown; + public float flSecondsFingerDown; + public float fValueXFirst; + public float fValueYFirst; + public float fValueXRaw; + public float fValueYRaw; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Notification_t +{ + public ulong ulUserValue; + public uint notificationId; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Process_t +{ + public uint pid; + public uint oldPid; + [MarshalAs(UnmanagedType.I1)] + public bool bForced; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Overlay_t +{ + public ulong overlayHandle; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Status_t +{ + public uint statusState; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Keyboard_t +{ + public byte cNewInput0,cNewInput1,cNewInput2,cNewInput3,cNewInput4,cNewInput5,cNewInput6,cNewInput7; + public ulong uUserValue; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Ipd_t +{ + public float ipdMeters; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Chaperone_t +{ + public ulong m_nPreviousUniverse; + public ulong m_nCurrentUniverse; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Reserved_t +{ + public ulong reserved0; + public ulong reserved1; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_PerformanceTest_t +{ + public uint m_nFidelityLevel; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_SeatedZeroPoseReset_t +{ + [MarshalAs(UnmanagedType.I1)] + public bool bResetBySystemMenu; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_Screenshot_t +{ + public uint handle; + public uint type; +} +[StructLayout(LayoutKind.Sequential)] public struct VREvent_t +{ + public uint eventType; + public uint trackedDeviceIndex; + public float eventAgeSeconds; + public VREvent_Data_t data; +} +[StructLayout(LayoutKind.Sequential)] public struct HiddenAreaMesh_t +{ + public IntPtr pVertexData; // const struct vr::HmdVector2_t * + public uint unTriangleCount; +} +[StructLayout(LayoutKind.Sequential)] public struct VRControllerAxis_t +{ + public float x; + public float y; +} +[StructLayout(LayoutKind.Sequential)] public struct VRControllerState_t +{ + public uint unPacketNum; + public ulong ulButtonPressed; + public ulong ulButtonTouched; + public VRControllerAxis_t rAxis0; //VRControllerAxis_t[5] + public VRControllerAxis_t rAxis1; + public VRControllerAxis_t rAxis2; + public VRControllerAxis_t rAxis3; + public VRControllerAxis_t rAxis4; +} +[StructLayout(LayoutKind.Sequential)] public struct Compositor_OverlaySettings +{ + public uint size; + [MarshalAs(UnmanagedType.I1)] + public bool curved; + [MarshalAs(UnmanagedType.I1)] + public bool antialias; + public float scale; + public float distance; + public float alpha; + public float uOffset; + public float vOffset; + public float uScale; + public float vScale; + public float gridDivs; + public float gridWidth; + public float gridScale; + public HmdMatrix44_t transform; +} +[StructLayout(LayoutKind.Sequential)] public struct CameraVideoStreamFrameHeader_t +{ + public EVRTrackedCameraFrameType eFrameType; + public uint nWidth; + public uint nHeight; + public uint nBytesPerPixel; + public uint nFrameSequence; + public TrackedDevicePose_t standingTrackedDevicePose; +} +[StructLayout(LayoutKind.Sequential)] public struct AppOverrideKeys_t +{ + public IntPtr pchKey; // const char * + public IntPtr pchValue; // const char * +} +[StructLayout(LayoutKind.Sequential)] public struct Compositor_FrameTiming +{ + public uint m_nSize; + public uint m_nFrameIndex; + public uint m_nNumFramePresents; + public uint m_nNumDroppedFrames; + public double m_flSystemTimeInSeconds; + public float m_flSceneRenderGpuMs; + public float m_flTotalRenderGpuMs; + public float m_flCompositorRenderGpuMs; + public float m_flCompositorRenderCpuMs; + public float m_flCompositorIdleCpuMs; + public float m_flClientFrameIntervalMs; + public float m_flPresentCallCpuMs; + public float m_flWaitForPresentCpuMs; + public float m_flSubmitFrameMs; + public float m_flWaitGetPosesCalledMs; + public float m_flNewPosesReadyMs; + public float m_flNewFrameReadyMs; + public float m_flCompositorUpdateStartMs; + public float m_flCompositorUpdateEndMs; + public float m_flCompositorRenderStartMs; + public TrackedDevicePose_t m_HmdPose; + public int m_nFidelityLevel; + public uint m_nReprojectionFlags; +} +[StructLayout(LayoutKind.Sequential)] public struct Compositor_CumulativeStats +{ + public uint m_nPid; + public uint m_nNumFramePresents; + public uint m_nNumDroppedFrames; + public uint m_nNumReprojectedFrames; + public uint m_nNumFramePresentsOnStartup; + public uint m_nNumDroppedFramesOnStartup; + public uint m_nNumReprojectedFramesOnStartup; + public uint m_nNumLoading; + public uint m_nNumFramePresentsLoading; + public uint m_nNumDroppedFramesLoading; + public uint m_nNumReprojectedFramesLoading; + public uint m_nNumTimedOut; + public uint m_nNumFramePresentsTimedOut; + public uint m_nNumDroppedFramesTimedOut; + public uint m_nNumReprojectedFramesTimedOut; +} +[StructLayout(LayoutKind.Sequential)] public struct VROverlayIntersectionParams_t +{ + public HmdVector3_t vSource; + public HmdVector3_t vDirection; + public ETrackingUniverseOrigin eOrigin; +} +[StructLayout(LayoutKind.Sequential)] public struct VROverlayIntersectionResults_t +{ + public HmdVector3_t vPoint; + public HmdVector3_t vNormal; + public HmdVector2_t vUVs; + public float fDistance; +} +[StructLayout(LayoutKind.Sequential)] public struct RenderModel_ComponentState_t +{ + public HmdMatrix34_t mTrackingToComponentRenderModel; + public HmdMatrix34_t mTrackingToComponentLocal; + public uint uProperties; +} +[StructLayout(LayoutKind.Sequential)] public struct RenderModel_Vertex_t +{ + public HmdVector3_t vPosition; + public HmdVector3_t vNormal; + public float rfTextureCoord0; //float[2] + public float rfTextureCoord1; +} +[StructLayout(LayoutKind.Sequential)] public struct RenderModel_TextureMap_t +{ + public char unWidth; + public char unHeight; + public IntPtr rubTextureMapData; // const uint8_t * +} +[StructLayout(LayoutKind.Sequential)] public struct RenderModel_t +{ + public IntPtr rVertexData; // const struct vr::RenderModel_Vertex_t * + public uint unVertexCount; + public IntPtr rIndexData; // const uint16_t * + public uint unTriangleCount; + public int diffuseTextureId; +} +[StructLayout(LayoutKind.Sequential)] public struct RenderModel_ControllerMode_State_t +{ + [MarshalAs(UnmanagedType.I1)] + public bool bScrollWheelVisible; +} +[StructLayout(LayoutKind.Sequential)] public struct NotificationBitmap_t +{ + public IntPtr m_pImageData; // void * + public int m_nWidth; + public int m_nHeight; + public int m_nBytesPerPixel; +} +[StructLayout(LayoutKind.Sequential)] public struct COpenVRContext +{ + public IntPtr m_pVRSystem; // class vr::IVRSystem * + public IntPtr m_pVRChaperone; // class vr::IVRChaperone * + public IntPtr m_pVRChaperoneSetup; // class vr::IVRChaperoneSetup * + public IntPtr m_pVRCompositor; // class vr::IVRCompositor * + public IntPtr m_pVROverlay; // class vr::IVROverlay * + public IntPtr m_pVRRenderModels; // class vr::IVRRenderModels * + public IntPtr m_pVRExtendedDisplay; // class vr::IVRExtendedDisplay * + public IntPtr m_pVRSettings; // class vr::IVRSettings * + public IntPtr m_pVRApplications; // class vr::IVRApplications * + public IntPtr m_pVRTrackedCamera; // class vr::IVRTrackedCamera * + public IntPtr m_pVRScreenshots; // class vr::IVRScreenshots * +} + +public class OpenVR +{ + + public static uint InitInternal(ref EVRInitError peError, EVRApplicationType eApplicationType) + { + return OpenVRInterop.InitInternal(ref peError, eApplicationType); + } + + public static void ShutdownInternal() + { + OpenVRInterop.ShutdownInternal(); + } + + public static bool IsHmdPresent() + { + return OpenVRInterop.IsHmdPresent(); + } + + public static bool IsRuntimeInstalled() + { + return OpenVRInterop.IsRuntimeInstalled(); + } + + public static string GetStringForHmdError(EVRInitError error) + { + return Marshal.PtrToStringAnsi(OpenVRInterop.GetStringForHmdError(error)); + } + + public static IntPtr GetGenericInterface(string pchInterfaceVersion, ref EVRInitError peError) + { + return OpenVRInterop.GetGenericInterface(pchInterfaceVersion, ref peError); + } + + public static bool IsInterfaceVersionValid(string pchInterfaceVersion) + { + return OpenVRInterop.IsInterfaceVersionValid(pchInterfaceVersion); + } + + public static uint GetInitToken() + { + return OpenVRInterop.GetInitToken(); + } + + public const uint k_unTrackingStringSize = 32; + public const uint k_unMaxDriverDebugResponseSize = 32768; + public const uint k_unTrackedDeviceIndex_Hmd = 0; + public const uint k_unMaxTrackedDeviceCount = 16; + public const uint k_unTrackedDeviceIndexOther = 4294967294; + public const uint k_unTrackedDeviceIndexInvalid = 4294967295; + public const uint k_unMaxPropertyStringSize = 32768; + public const uint k_unControllerStateAxisCount = 5; + public const ulong k_ulOverlayHandleInvalid = 0; + public const uint k_unScreenshotHandleInvalid = 0; + public const string IVRSystem_Version = "IVRSystem_012"; + public const string IVRExtendedDisplay_Version = "IVRExtendedDisplay_001"; + public const string IVRTrackedCamera_Version = "IVRTrackedCamera_003"; + public const uint k_unMaxApplicationKeyLength = 128; + public const string IVRApplications_Version = "IVRApplications_005"; + public const string IVRChaperone_Version = "IVRChaperone_003"; + public const string IVRChaperoneSetup_Version = "IVRChaperoneSetup_005"; + public const string IVRCompositor_Version = "IVRCompositor_015"; + public const uint k_unVROverlayMaxKeyLength = 128; + public const uint k_unVROverlayMaxNameLength = 128; + public const uint k_unMaxOverlayCount = 32; + public const string IVROverlay_Version = "IVROverlay_012"; + public const string k_pch_Controller_Component_GDC2015 = "gdc2015"; + public const string k_pch_Controller_Component_Base = "base"; + public const string k_pch_Controller_Component_Tip = "tip"; + public const string k_pch_Controller_Component_HandGrip = "handgrip"; + public const string k_pch_Controller_Component_Status = "status"; + public const string IVRRenderModels_Version = "IVRRenderModels_005"; + public const uint k_unNotificationTextMaxSize = 256; + public const string IVRNotifications_Version = "IVRNotifications_002"; + public const uint k_unMaxSettingsKeyLength = 128; + public const string IVRSettings_Version = "IVRSettings_001"; + public const string k_pch_SteamVR_Section = "steamvr"; + public const string k_pch_SteamVR_RequireHmd_String = "requireHmd"; + public const string k_pch_SteamVR_ForcedDriverKey_String = "forcedDriver"; + public const string k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd"; + public const string k_pch_SteamVR_DisplayDebug_Bool = "displayDebug"; + public const string k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe"; + public const string k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion"; + public const string k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX"; + public const string k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY"; + public const string k_pch_SteamVR_SendSystemButtonToAllApps_Bool = "sendSystemButtonToAllApps"; + public const string k_pch_SteamVR_LogLevel_Int32 = "loglevel"; + public const string k_pch_SteamVR_IPD_Float = "ipd"; + public const string k_pch_SteamVR_Background_String = "background"; + public const string k_pch_SteamVR_BackgroundCameraHeight_Float = "backgroundCameraHeight"; + public const string k_pch_SteamVR_BackgroundDomeRadius_Float = "backgroundDomeRadius"; + public const string k_pch_SteamVR_Environment_String = "environment"; + public const string k_pch_SteamVR_GridColor_String = "gridColor"; + public const string k_pch_SteamVR_PlayAreaColor_String = "playAreaColor"; + public const string k_pch_SteamVR_ShowStage_Bool = "showStage"; + public const string k_pch_SteamVR_ActivateMultipleDrivers_Bool = "activateMultipleDrivers"; + public const string k_pch_SteamVR_PowerOffOnExit_Bool = "powerOffOnExit"; + public const string k_pch_SteamVR_StandbyAppRunningTimeout_Float = "standbyAppRunningTimeout"; + public const string k_pch_SteamVR_StandbyNoAppTimeout_Float = "standbyNoAppTimeout"; + public const string k_pch_SteamVR_DirectMode_Bool = "directMode"; + public const string k_pch_SteamVR_DirectModeEdidVid_Int32 = "directModeEdidVid"; + public const string k_pch_SteamVR_DirectModeEdidPid_Int32 = "directModeEdidPid"; + public const string k_pch_SteamVR_UsingSpeakers_Bool = "usingSpeakers"; + public const string k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees"; + public const string k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement"; + public const string k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses"; + public const string k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier"; + public const string k_pch_SteamVR_AllowReprojection_Bool = "allowReprojection"; + public const string k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection"; + public const string k_pch_SteamVR_ForceFadeOnBadTracking_Bool = "forceFadeOnBadTracking"; + public const string k_pch_SteamVR_DefaultMirrorView_Int32 = "defaultMirrorView"; + public const string k_pch_SteamVR_ShowMirrorView_Bool = "showMirrorView"; + public const string k_pch_Lighthouse_Section = "driver_lighthouse"; + public const string k_pch_Lighthouse_DisableIMU_Bool = "disableimu"; + public const string k_pch_Lighthouse_UseDisambiguation_String = "usedisambiguation"; + public const string k_pch_Lighthouse_DisambiguationDebug_Int32 = "disambiguationdebug"; + public const string k_pch_Lighthouse_PrimaryBasestation_Int32 = "primarybasestation"; + public const string k_pch_Lighthouse_LighthouseName_String = "lighthousename"; + public const string k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float = "maxincidenceangledegrees"; + public const string k_pch_Lighthouse_UseLighthouseDirect_Bool = "uselighthousedirect"; + public const string k_pch_Lighthouse_DBHistory_Bool = "dbhistory"; + public const string k_pch_Null_Section = "driver_null"; + public const string k_pch_Null_EnableNullDriver_Bool = "enable"; + public const string k_pch_Null_SerialNumber_String = "serialNumber"; + public const string k_pch_Null_ModelNumber_String = "modelNumber"; + public const string k_pch_Null_WindowX_Int32 = "windowX"; + public const string k_pch_Null_WindowY_Int32 = "windowY"; + public const string k_pch_Null_WindowWidth_Int32 = "windowWidth"; + public const string k_pch_Null_WindowHeight_Int32 = "windowHeight"; + public const string k_pch_Null_RenderWidth_Int32 = "renderWidth"; + public const string k_pch_Null_RenderHeight_Int32 = "renderHeight"; + public const string k_pch_Null_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons"; + public const string k_pch_Null_DisplayFrequency_Float = "displayFrequency"; + public const string k_pch_UserInterface_Section = "userinterface"; + public const string k_pch_UserInterface_StatusAlwaysOnTop_Bool = "StatusAlwaysOnTop"; + public const string k_pch_UserInterface_EnableScreenshots_Bool = "EnableScreenshots"; + public const string k_pch_Notifications_Section = "notifications"; + public const string k_pch_Notifications_DoNotDisturb_Bool = "DoNotDisturb"; + public const string k_pch_Keyboard_Section = "keyboard"; + public const string k_pch_Keyboard_TutorialCompletions = "TutorialCompletions"; + public const string k_pch_Keyboard_ScaleX = "ScaleX"; + public const string k_pch_Keyboard_ScaleY = "ScaleY"; + public const string k_pch_Keyboard_OffsetLeftX = "OffsetLeftX"; + public const string k_pch_Keyboard_OffsetRightX = "OffsetRightX"; + public const string k_pch_Keyboard_OffsetY = "OffsetY"; + public const string k_pch_Keyboard_Smoothing = "Smoothing"; + public const string k_pch_Perf_Section = "perfcheck"; + public const string k_pch_Perf_HeuristicActive_Bool = "heuristicActive"; + public const string k_pch_Perf_NotifyInHMD_Bool = "warnInHMD"; + public const string k_pch_Perf_NotifyOnlyOnce_Bool = "warnOnlyOnce"; + public const string k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore"; + public const string k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit"; + public const string k_pch_Perf_TestData_Float = "perfTestData"; + public const string k_pch_CollisionBounds_Section = "collisionBounds"; + public const string k_pch_CollisionBounds_Style_Int32 = "CollisionBoundsStyle"; + public const string k_pch_CollisionBounds_GroundPerimeterOn_Bool = "CollisionBoundsGroundPerimeterOn"; + public const string k_pch_CollisionBounds_CenterMarkerOn_Bool = "CollisionBoundsCenterMarkerOn"; + public const string k_pch_CollisionBounds_PlaySpaceOn_Bool = "CollisionBoundsPlaySpaceOn"; + public const string k_pch_CollisionBounds_FadeDistance_Float = "CollisionBoundsFadeDistance"; + public const string k_pch_CollisionBounds_ColorGammaR_Int32 = "CollisionBoundsColorGammaR"; + public const string k_pch_CollisionBounds_ColorGammaG_Int32 = "CollisionBoundsColorGammaG"; + public const string k_pch_CollisionBounds_ColorGammaB_Int32 = "CollisionBoundsColorGammaB"; + public const string k_pch_CollisionBounds_ColorGammaA_Int32 = "CollisionBoundsColorGammaA"; + public const string k_pch_Camera_Section = "camera"; + public const string k_pch_Camera_EnableCamera_Bool = "enableCamera"; + public const string k_pch_Camera_EnableCameraInDashboard_Bool = "enableCameraInDashboard"; + public const string k_pch_Camera_EnableCameraForCollisionBounds_Bool = "enableCameraForCollisionBounds"; + public const string k_pch_Camera_EnableCameraForRoomView_Bool = "enableCameraForRoomView"; + public const string k_pch_Camera_BoundsColorGammaR_Int32 = "cameraBoundsColorGammaR"; + public const string k_pch_Camera_BoundsColorGammaG_Int32 = "cameraBoundsColorGammaG"; + public const string k_pch_Camera_BoundsColorGammaB_Int32 = "cameraBoundsColorGammaB"; + public const string k_pch_Camera_BoundsColorGammaA_Int32 = "cameraBoundsColorGammaA"; + public const string k_pch_audio_Section = "audio"; + public const string k_pch_audio_OnPlaybackDevice_String = "onPlaybackDevice"; + public const string k_pch_audio_OnRecordDevice_String = "onRecordDevice"; + public const string k_pch_audio_OnPlaybackMirrorDevice_String = "onPlaybackMirrorDevice"; + public const string k_pch_audio_OffPlaybackDevice_String = "offPlaybackDevice"; + public const string k_pch_audio_OffRecordDevice_String = "offRecordDevice"; + public const string k_pch_audio_VIVEHDMIGain = "viveHDMIGain"; + public const string k_pch_modelskin_Section = "modelskins"; + public const string IVRScreenshots_Version = "IVRScreenshots_001"; + + static uint VRToken { get; set; } + + const string FnTable_Prefix = "FnTable:"; + + class COpenVRContext + { + public COpenVRContext() { Clear(); } + + public void Clear() + { + m_pVRSystem = null; + m_pVRChaperone = null; + m_pVRChaperoneSetup = null; + m_pVRCompositor = null; + m_pVROverlay = null; + m_pVRRenderModels = null; + m_pVRExtendedDisplay = null; + m_pVRSettings = null; + m_pVRApplications = null; + m_pVRScreenshots = null; + } + + void CheckClear() + { + if (VRToken != GetInitToken()) + { + Clear(); + VRToken = GetInitToken(); + } + } + + public CVRSystem VRSystem() + { + CheckClear(); + if (m_pVRSystem == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRSystem_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRSystem = new CVRSystem(pInterface); + } + return m_pVRSystem; + } + + public CVRChaperone VRChaperone() + { + CheckClear(); + if (m_pVRChaperone == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRChaperone_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRChaperone = new CVRChaperone(pInterface); + } + return m_pVRChaperone; + } + + public CVRChaperoneSetup VRChaperoneSetup() + { + CheckClear(); + if (m_pVRChaperoneSetup == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRChaperoneSetup_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRChaperoneSetup = new CVRChaperoneSetup(pInterface); + } + return m_pVRChaperoneSetup; + } + + public CVRCompositor VRCompositor() + { + CheckClear(); + if (m_pVRCompositor == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRCompositor_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRCompositor = new CVRCompositor(pInterface); + } + return m_pVRCompositor; + } + + public CVROverlay VROverlay() + { + CheckClear(); + if (m_pVROverlay == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVROverlay_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVROverlay = new CVROverlay(pInterface); + } + return m_pVROverlay; + } + + public CVRRenderModels VRRenderModels() + { + CheckClear(); + if (m_pVRRenderModels == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRRenderModels_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRRenderModels = new CVRRenderModels(pInterface); + } + return m_pVRRenderModels; + } + + public CVRExtendedDisplay VRExtendedDisplay() + { + CheckClear(); + if (m_pVRExtendedDisplay == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRExtendedDisplay_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRExtendedDisplay = new CVRExtendedDisplay(pInterface); + } + return m_pVRExtendedDisplay; + } + + public CVRSettings VRSettings() + { + CheckClear(); + if (m_pVRSettings == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRSettings_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRSettings = new CVRSettings(pInterface); + } + return m_pVRSettings; + } + + public CVRApplications VRApplications() + { + CheckClear(); + if (m_pVRApplications == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRApplications_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRApplications = new CVRApplications(pInterface); + } + return m_pVRApplications; + } + + public CVRScreenshots VRScreenshots() + { + CheckClear(); + if (m_pVRScreenshots == null) + { + var eError = EVRInitError.None; + var pInterface = OpenVRInterop.GetGenericInterface(FnTable_Prefix+IVRScreenshots_Version, ref eError); + if (pInterface != IntPtr.Zero && eError == EVRInitError.None) + m_pVRScreenshots = new CVRScreenshots(pInterface); + } + return m_pVRScreenshots; + } + + private CVRSystem m_pVRSystem; + private CVRChaperone m_pVRChaperone; + private CVRChaperoneSetup m_pVRChaperoneSetup; + private CVRCompositor m_pVRCompositor; + private CVROverlay m_pVROverlay; + private CVRRenderModels m_pVRRenderModels; + private CVRExtendedDisplay m_pVRExtendedDisplay; + private CVRSettings m_pVRSettings; + private CVRApplications m_pVRApplications; + private CVRScreenshots m_pVRScreenshots; + }; + + private static COpenVRContext _OpenVRInternal_ModuleContext = null; + static COpenVRContext OpenVRInternal_ModuleContext + { + get + { + if (_OpenVRInternal_ModuleContext == null) + _OpenVRInternal_ModuleContext = new COpenVRContext(); + return _OpenVRInternal_ModuleContext; + } + } + + public static CVRSystem System { get { return OpenVRInternal_ModuleContext.VRSystem(); } } + public static CVRChaperone Chaperone { get { return OpenVRInternal_ModuleContext.VRChaperone(); } } + public static CVRChaperoneSetup ChaperoneSetup { get { return OpenVRInternal_ModuleContext.VRChaperoneSetup(); } } + public static CVRCompositor Compositor { get { return OpenVRInternal_ModuleContext.VRCompositor(); } } + public static CVROverlay Overlay { get { return OpenVRInternal_ModuleContext.VROverlay(); } } + public static CVRRenderModels RenderModels { get { return OpenVRInternal_ModuleContext.VRRenderModels(); } } + public static CVRApplications Applications { get { return OpenVRInternal_ModuleContext.VRApplications(); } } + public static CVRSettings Settings { get { return OpenVRInternal_ModuleContext.VRSettings(); } } + public static CVRExtendedDisplay ExtendedDisplay { get { return OpenVRInternal_ModuleContext.VRExtendedDisplay(); } } + public static CVRScreenshots Screenshots { get { return OpenVRInternal_ModuleContext.VRScreenshots(); } } + + /** Finds the active installation of vrclient.dll and initializes it */ + public static CVRSystem Init(ref EVRInitError peError, EVRApplicationType eApplicationType = EVRApplicationType.VRApplication_Scene) + { + VRToken = InitInternal(ref peError, eApplicationType); + OpenVRInternal_ModuleContext.Clear(); + + if (peError != EVRInitError.None) + return null; + + bool bInterfaceValid = IsInterfaceVersionValid(IVRSystem_Version); + if (!bInterfaceValid) + { + ShutdownInternal(); + peError = EVRInitError.Init_InterfaceNotFound; + return null; + } + + return OpenVR.System; + } + + /** unloads vrclient.dll. Any interface pointers from the interface are + * invalid after this point */ + public static void Shutdown() + { + ShutdownInternal(); + } + +} + + + +} + diff --git a/examples/ThirdPartyLibs/openvr/headers/openvr_api.json b/examples/ThirdPartyLibs/openvr/headers/openvr_api.json new file mode 100644 index 000000000..c72b2c415 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/headers/openvr_api.json @@ -0,0 +1,3347 @@ +{"typedefs":[{"typedef": "vr::glSharedTextureHandle_t","type": "void *"} +,{"typedef": "vr::glInt_t","type": "int32_t"} +,{"typedef": "vr::glUInt_t","type": "uint32_t"} +,{"typedef": "vr::TrackedDeviceIndex_t","type": "uint32_t"} +,{"typedef": "vr::VREvent_Data_t","type": "union VREvent_Data_t"} +,{"typedef": "vr::VRControllerState_t","type": "struct vr::VRControllerState001_t"} +,{"typedef": "vr::VROverlayHandle_t","type": "uint64_t"} +,{"typedef": "vr::TrackedCameraHandle_t","type": "uint64_t"} +,{"typedef": "vr::ScreenshotHandle_t","type": "uint32_t"} +,{"typedef": "vr::VRComponentProperties","type": "uint32_t"} +,{"typedef": "vr::TextureID_t","type": "int32_t"} +,{"typedef": "vr::VRNotificationId","type": "uint32_t"} +,{"typedef": "vr::HmdError","type": "enum vr::EVRInitError"} +,{"typedef": "vr::Hmd_Eye","type": "enum vr::EVREye"} +,{"typedef": "vr::GraphicsAPIConvention","type": "enum vr::EGraphicsAPIConvention"} +,{"typedef": "vr::ColorSpace","type": "enum vr::EColorSpace"} +,{"typedef": "vr::HmdTrackingResult","type": "enum vr::ETrackingResult"} +,{"typedef": "vr::TrackedDeviceClass","type": "enum vr::ETrackedDeviceClass"} +,{"typedef": "vr::TrackingUniverseOrigin","type": "enum vr::ETrackingUniverseOrigin"} +,{"typedef": "vr::TrackedDeviceProperty","type": "enum vr::ETrackedDeviceProperty"} +,{"typedef": "vr::TrackedPropertyError","type": "enum vr::ETrackedPropertyError"} +,{"typedef": "vr::VRSubmitFlags_t","type": "enum vr::EVRSubmitFlags"} +,{"typedef": "vr::VRState_t","type": "enum vr::EVRState"} +,{"typedef": "vr::CollisionBoundsStyle_t","type": "enum vr::ECollisionBoundsStyle"} +,{"typedef": "vr::VROverlayError","type": "enum vr::EVROverlayError"} +,{"typedef": "vr::VRFirmwareError","type": "enum vr::EVRFirmwareError"} +,{"typedef": "vr::VRCompositorError","type": "enum vr::EVRCompositorError"} +,{"typedef": "vr::VRScreenshotsError","type": "enum vr::EVRScreenshotError"} +], +"enums":[ + {"enumname": "vr::EVREye","values": [ + {"name": "Eye_Left","value": "0"} + ,{"name": "Eye_Right","value": "1"} +]} +, {"enumname": "vr::EGraphicsAPIConvention","values": [ + {"name": "API_DirectX","value": "0"} + ,{"name": "API_OpenGL","value": "1"} +]} +, {"enumname": "vr::EColorSpace","values": [ + {"name": "ColorSpace_Auto","value": "0"} + ,{"name": "ColorSpace_Gamma","value": "1"} + ,{"name": "ColorSpace_Linear","value": "2"} +]} +, {"enumname": "vr::ETrackingResult","values": [ + {"name": "TrackingResult_Uninitialized","value": "1"} + ,{"name": "TrackingResult_Calibrating_InProgress","value": "100"} + ,{"name": "TrackingResult_Calibrating_OutOfRange","value": "101"} + ,{"name": "TrackingResult_Running_OK","value": "200"} + ,{"name": "TrackingResult_Running_OutOfRange","value": "201"} +]} +, {"enumname": "vr::ETrackedDeviceClass","values": [ + {"name": "TrackedDeviceClass_Invalid","value": "0"} + ,{"name": "TrackedDeviceClass_HMD","value": "1"} + ,{"name": "TrackedDeviceClass_Controller","value": "2"} + ,{"name": "TrackedDeviceClass_TrackingReference","value": "4"} + ,{"name": "TrackedDeviceClass_Other","value": "1000"} +]} +, {"enumname": "vr::ETrackedControllerRole","values": [ + {"name": "TrackedControllerRole_Invalid","value": "0"} + ,{"name": "TrackedControllerRole_LeftHand","value": "1"} + ,{"name": "TrackedControllerRole_RightHand","value": "2"} +]} +, {"enumname": "vr::ETrackingUniverseOrigin","values": [ + {"name": "TrackingUniverseSeated","value": "0"} + ,{"name": "TrackingUniverseStanding","value": "1"} + ,{"name": "TrackingUniverseRawAndUncalibrated","value": "2"} +]} +, {"enumname": "vr::ETrackedDeviceProperty","values": [ + {"name": "Prop_TrackingSystemName_String","value": "1000"} + ,{"name": "Prop_ModelNumber_String","value": "1001"} + ,{"name": "Prop_SerialNumber_String","value": "1002"} + ,{"name": "Prop_RenderModelName_String","value": "1003"} + ,{"name": "Prop_WillDriftInYaw_Bool","value": "1004"} + ,{"name": "Prop_ManufacturerName_String","value": "1005"} + ,{"name": "Prop_TrackingFirmwareVersion_String","value": "1006"} + ,{"name": "Prop_HardwareRevision_String","value": "1007"} + ,{"name": "Prop_AllWirelessDongleDescriptions_String","value": "1008"} + ,{"name": "Prop_ConnectedWirelessDongle_String","value": "1009"} + ,{"name": "Prop_DeviceIsWireless_Bool","value": "1010"} + ,{"name": "Prop_DeviceIsCharging_Bool","value": "1011"} + ,{"name": "Prop_DeviceBatteryPercentage_Float","value": "1012"} + ,{"name": "Prop_StatusDisplayTransform_Matrix34","value": "1013"} + ,{"name": "Prop_Firmware_UpdateAvailable_Bool","value": "1014"} + ,{"name": "Prop_Firmware_ManualUpdate_Bool","value": "1015"} + ,{"name": "Prop_Firmware_ManualUpdateURL_String","value": "1016"} + ,{"name": "Prop_HardwareRevision_Uint64","value": "1017"} + ,{"name": "Prop_FirmwareVersion_Uint64","value": "1018"} + ,{"name": "Prop_FPGAVersion_Uint64","value": "1019"} + ,{"name": "Prop_VRCVersion_Uint64","value": "1020"} + ,{"name": "Prop_RadioVersion_Uint64","value": "1021"} + ,{"name": "Prop_DongleVersion_Uint64","value": "1022"} + ,{"name": "Prop_BlockServerShutdown_Bool","value": "1023"} + ,{"name": "Prop_CanUnifyCoordinateSystemWithHmd_Bool","value": "1024"} + ,{"name": "Prop_ContainsProximitySensor_Bool","value": "1025"} + ,{"name": "Prop_DeviceProvidesBatteryStatus_Bool","value": "1026"} + ,{"name": "Prop_DeviceCanPowerOff_Bool","value": "1027"} + ,{"name": "Prop_Firmware_ProgrammingTarget_String","value": "1028"} + ,{"name": "Prop_DeviceClass_Int32","value": "1029"} + ,{"name": "Prop_HasCamera_Bool","value": "1030"} + ,{"name": "Prop_DriverVersion_String","value": "1031"} + ,{"name": "Prop_Firmware_ForceUpdateRequired_Bool","value": "1032"} + ,{"name": "Prop_ReportsTimeSinceVSync_Bool","value": "2000"} + ,{"name": "Prop_SecondsFromVsyncToPhotons_Float","value": "2001"} + ,{"name": "Prop_DisplayFrequency_Float","value": "2002"} + ,{"name": "Prop_UserIpdMeters_Float","value": "2003"} + ,{"name": "Prop_CurrentUniverseId_Uint64","value": "2004"} + ,{"name": "Prop_PreviousUniverseId_Uint64","value": "2005"} + ,{"name": "Prop_DisplayFirmwareVersion_Uint64","value": "2006"} + ,{"name": "Prop_IsOnDesktop_Bool","value": "2007"} + ,{"name": "Prop_DisplayMCType_Int32","value": "2008"} + ,{"name": "Prop_DisplayMCOffset_Float","value": "2009"} + ,{"name": "Prop_DisplayMCScale_Float","value": "2010"} + ,{"name": "Prop_EdidVendorID_Int32","value": "2011"} + ,{"name": "Prop_DisplayMCImageLeft_String","value": "2012"} + ,{"name": "Prop_DisplayMCImageRight_String","value": "2013"} + ,{"name": "Prop_DisplayGCBlackClamp_Float","value": "2014"} + ,{"name": "Prop_EdidProductID_Int32","value": "2015"} + ,{"name": "Prop_CameraToHeadTransform_Matrix34","value": "2016"} + ,{"name": "Prop_DisplayGCType_Int32","value": "2017"} + ,{"name": "Prop_DisplayGCOffset_Float","value": "2018"} + ,{"name": "Prop_DisplayGCScale_Float","value": "2019"} + ,{"name": "Prop_DisplayGCPrescale_Float","value": "2020"} + ,{"name": "Prop_DisplayGCImage_String","value": "2021"} + ,{"name": "Prop_LensCenterLeftU_Float","value": "2022"} + ,{"name": "Prop_LensCenterLeftV_Float","value": "2023"} + ,{"name": "Prop_LensCenterRightU_Float","value": "2024"} + ,{"name": "Prop_LensCenterRightV_Float","value": "2025"} + ,{"name": "Prop_UserHeadToEyeDepthMeters_Float","value": "2026"} + ,{"name": "Prop_CameraFirmwareVersion_Uint64","value": "2027"} + ,{"name": "Prop_CameraFirmwareDescription_String","value": "2028"} + ,{"name": "Prop_DisplayFPGAVersion_Uint64","value": "2029"} + ,{"name": "Prop_DisplayBootloaderVersion_Uint64","value": "2030"} + ,{"name": "Prop_DisplayHardwareVersion_Uint64","value": "2031"} + ,{"name": "Prop_AudioFirmwareVersion_Uint64","value": "2032"} + ,{"name": "Prop_CameraCompatibilityMode_Int32","value": "2033"} + ,{"name": "Prop_ScreenshotHorizontalFieldOfViewDegrees_Float","value": "2034"} + ,{"name": "Prop_ScreenshotVerticalFieldOfViewDegrees_Float","value": "2035"} + ,{"name": "Prop_DisplaySuppressed_Bool","value": "2036"} + ,{"name": "Prop_AttachedDeviceId_String","value": "3000"} + ,{"name": "Prop_SupportedButtons_Uint64","value": "3001"} + ,{"name": "Prop_Axis0Type_Int32","value": "3002"} + ,{"name": "Prop_Axis1Type_Int32","value": "3003"} + ,{"name": "Prop_Axis2Type_Int32","value": "3004"} + ,{"name": "Prop_Axis3Type_Int32","value": "3005"} + ,{"name": "Prop_Axis4Type_Int32","value": "3006"} + ,{"name": "Prop_FieldOfViewLeftDegrees_Float","value": "4000"} + ,{"name": "Prop_FieldOfViewRightDegrees_Float","value": "4001"} + ,{"name": "Prop_FieldOfViewTopDegrees_Float","value": "4002"} + ,{"name": "Prop_FieldOfViewBottomDegrees_Float","value": "4003"} + ,{"name": "Prop_TrackingRangeMinimumMeters_Float","value": "4004"} + ,{"name": "Prop_TrackingRangeMaximumMeters_Float","value": "4005"} + ,{"name": "Prop_ModeLabel_String","value": "4006"} + ,{"name": "Prop_VendorSpecific_Reserved_Start","value": "10000"} + ,{"name": "Prop_VendorSpecific_Reserved_End","value": "10999"} +]} +, {"enumname": "vr::ETrackedPropertyError","values": [ + {"name": "TrackedProp_Success","value": "0"} + ,{"name": "TrackedProp_WrongDataType","value": "1"} + ,{"name": "TrackedProp_WrongDeviceClass","value": "2"} + ,{"name": "TrackedProp_BufferTooSmall","value": "3"} + ,{"name": "TrackedProp_UnknownProperty","value": "4"} + ,{"name": "TrackedProp_InvalidDevice","value": "5"} + ,{"name": "TrackedProp_CouldNotContactServer","value": "6"} + ,{"name": "TrackedProp_ValueNotProvidedByDevice","value": "7"} + ,{"name": "TrackedProp_StringExceedsMaximumLength","value": "8"} + ,{"name": "TrackedProp_NotYetAvailable","value": "9"} +]} +, {"enumname": "vr::EVRSubmitFlags","values": [ + {"name": "Submit_Default","value": "0"} + ,{"name": "Submit_LensDistortionAlreadyApplied","value": "1"} + ,{"name": "Submit_GlRenderBuffer","value": "2"} +]} +, {"enumname": "vr::EVRState","values": [ + {"name": "VRState_Undefined","value": "-1"} + ,{"name": "VRState_Off","value": "0"} + ,{"name": "VRState_Searching","value": "1"} + ,{"name": "VRState_Searching_Alert","value": "2"} + ,{"name": "VRState_Ready","value": "3"} + ,{"name": "VRState_Ready_Alert","value": "4"} + ,{"name": "VRState_NotReady","value": "5"} + ,{"name": "VRState_Standby","value": "6"} +]} +, {"enumname": "vr::EVREventType","values": [ + {"name": "VREvent_None","value": "0"} + ,{"name": "VREvent_TrackedDeviceActivated","value": "100"} + ,{"name": "VREvent_TrackedDeviceDeactivated","value": "101"} + ,{"name": "VREvent_TrackedDeviceUpdated","value": "102"} + ,{"name": "VREvent_TrackedDeviceUserInteractionStarted","value": "103"} + ,{"name": "VREvent_TrackedDeviceUserInteractionEnded","value": "104"} + ,{"name": "VREvent_IpdChanged","value": "105"} + ,{"name": "VREvent_EnterStandbyMode","value": "106"} + ,{"name": "VREvent_LeaveStandbyMode","value": "107"} + ,{"name": "VREvent_TrackedDeviceRoleChanged","value": "108"} + ,{"name": "VREvent_ButtonPress","value": "200"} + ,{"name": "VREvent_ButtonUnpress","value": "201"} + ,{"name": "VREvent_ButtonTouch","value": "202"} + ,{"name": "VREvent_ButtonUntouch","value": "203"} + ,{"name": "VREvent_MouseMove","value": "300"} + ,{"name": "VREvent_MouseButtonDown","value": "301"} + ,{"name": "VREvent_MouseButtonUp","value": "302"} + ,{"name": "VREvent_FocusEnter","value": "303"} + ,{"name": "VREvent_FocusLeave","value": "304"} + ,{"name": "VREvent_Scroll","value": "305"} + ,{"name": "VREvent_TouchPadMove","value": "306"} + ,{"name": "VREvent_InputFocusCaptured","value": "400"} + ,{"name": "VREvent_InputFocusReleased","value": "401"} + ,{"name": "VREvent_SceneFocusLost","value": "402"} + ,{"name": "VREvent_SceneFocusGained","value": "403"} + ,{"name": "VREvent_SceneApplicationChanged","value": "404"} + ,{"name": "VREvent_SceneFocusChanged","value": "405"} + ,{"name": "VREvent_InputFocusChanged","value": "406"} + ,{"name": "VREvent_SceneApplicationSecondaryRenderingStarted","value": "407"} + ,{"name": "VREvent_HideRenderModels","value": "410"} + ,{"name": "VREvent_ShowRenderModels","value": "411"} + ,{"name": "VREvent_OverlayShown","value": "500"} + ,{"name": "VREvent_OverlayHidden","value": "501"} + ,{"name": "VREvent_DashboardActivated","value": "502"} + ,{"name": "VREvent_DashboardDeactivated","value": "503"} + ,{"name": "VREvent_DashboardThumbSelected","value": "504"} + ,{"name": "VREvent_DashboardRequested","value": "505"} + ,{"name": "VREvent_ResetDashboard","value": "506"} + ,{"name": "VREvent_RenderToast","value": "507"} + ,{"name": "VREvent_ImageLoaded","value": "508"} + ,{"name": "VREvent_ShowKeyboard","value": "509"} + ,{"name": "VREvent_HideKeyboard","value": "510"} + ,{"name": "VREvent_OverlayGamepadFocusGained","value": "511"} + ,{"name": "VREvent_OverlayGamepadFocusLost","value": "512"} + ,{"name": "VREvent_OverlaySharedTextureChanged","value": "513"} + ,{"name": "VREvent_DashboardGuideButtonDown","value": "514"} + ,{"name": "VREvent_DashboardGuideButtonUp","value": "515"} + ,{"name": "VREvent_ScreenshotTriggered","value": "516"} + ,{"name": "VREvent_ImageFailed","value": "517"} + ,{"name": "VREvent_RequestScreenshot","value": "520"} + ,{"name": "VREvent_ScreenshotTaken","value": "521"} + ,{"name": "VREvent_ScreenshotFailed","value": "522"} + ,{"name": "VREvent_SubmitScreenshotToDashboard","value": "523"} + ,{"name": "VREvent_Notification_Shown","value": "600"} + ,{"name": "VREvent_Notification_Hidden","value": "601"} + ,{"name": "VREvent_Notification_BeginInteraction","value": "602"} + ,{"name": "VREvent_Notification_Destroyed","value": "603"} + ,{"name": "VREvent_Quit","value": "700"} + ,{"name": "VREvent_ProcessQuit","value": "701"} + ,{"name": "VREvent_QuitAborted_UserPrompt","value": "702"} + ,{"name": "VREvent_QuitAcknowledged","value": "703"} + ,{"name": "VREvent_DriverRequestedQuit","value": "704"} + ,{"name": "VREvent_ChaperoneDataHasChanged","value": "800"} + ,{"name": "VREvent_ChaperoneUniverseHasChanged","value": "801"} + ,{"name": "VREvent_ChaperoneTempDataHasChanged","value": "802"} + ,{"name": "VREvent_ChaperoneSettingsHaveChanged","value": "803"} + ,{"name": "VREvent_SeatedZeroPoseReset","value": "804"} + ,{"name": "VREvent_AudioSettingsHaveChanged","value": "820"} + ,{"name": "VREvent_BackgroundSettingHasChanged","value": "850"} + ,{"name": "VREvent_CameraSettingsHaveChanged","value": "851"} + ,{"name": "VREvent_ReprojectionSettingHasChanged","value": "852"} + ,{"name": "VREvent_ModelSkinSettingsHaveChanged","value": "853"} + ,{"name": "VREvent_EnvironmentSettingsHaveChanged","value": "854"} + ,{"name": "VREvent_StatusUpdate","value": "900"} + ,{"name": "VREvent_MCImageUpdated","value": "1000"} + ,{"name": "VREvent_FirmwareUpdateStarted","value": "1100"} + ,{"name": "VREvent_FirmwareUpdateFinished","value": "1101"} + ,{"name": "VREvent_KeyboardClosed","value": "1200"} + ,{"name": "VREvent_KeyboardCharInput","value": "1201"} + ,{"name": "VREvent_KeyboardDone","value": "1202"} + ,{"name": "VREvent_ApplicationTransitionStarted","value": "1300"} + ,{"name": "VREvent_ApplicationTransitionAborted","value": "1301"} + ,{"name": "VREvent_ApplicationTransitionNewAppStarted","value": "1302"} + ,{"name": "VREvent_ApplicationListUpdated","value": "1303"} + ,{"name": "VREvent_Compositor_MirrorWindowShown","value": "1400"} + ,{"name": "VREvent_Compositor_MirrorWindowHidden","value": "1401"} + ,{"name": "VREvent_Compositor_ChaperoneBoundsShown","value": "1410"} + ,{"name": "VREvent_Compositor_ChaperoneBoundsHidden","value": "1411"} + ,{"name": "VREvent_TrackedCamera_StartVideoStream","value": "1500"} + ,{"name": "VREvent_TrackedCamera_StopVideoStream","value": "1501"} + ,{"name": "VREvent_TrackedCamera_PauseVideoStream","value": "1502"} + ,{"name": "VREvent_TrackedCamera_ResumeVideoStream","value": "1503"} + ,{"name": "VREvent_PerformanceTest_EnableCapture","value": "1600"} + ,{"name": "VREvent_PerformanceTest_DisableCapture","value": "1601"} + ,{"name": "VREvent_PerformanceTest_FidelityLevel","value": "1602"} + ,{"name": "VREvent_VendorSpecific_Reserved_Start","value": "10000"} + ,{"name": "VREvent_VendorSpecific_Reserved_End","value": "19999"} +]} +, {"enumname": "vr::EDeviceActivityLevel","values": [ + {"name": "k_EDeviceActivityLevel_Unknown","value": "-1"} + ,{"name": "k_EDeviceActivityLevel_Idle","value": "0"} + ,{"name": "k_EDeviceActivityLevel_UserInteraction","value": "1"} + ,{"name": "k_EDeviceActivityLevel_UserInteraction_Timeout","value": "2"} + ,{"name": "k_EDeviceActivityLevel_Standby","value": "3"} +]} +, {"enumname": "vr::EVRButtonId","values": [ + {"name": "k_EButton_System","value": "0"} + ,{"name": "k_EButton_ApplicationMenu","value": "1"} + ,{"name": "k_EButton_Grip","value": "2"} + ,{"name": "k_EButton_DPad_Left","value": "3"} + ,{"name": "k_EButton_DPad_Up","value": "4"} + ,{"name": "k_EButton_DPad_Right","value": "5"} + ,{"name": "k_EButton_DPad_Down","value": "6"} + ,{"name": "k_EButton_A","value": "7"} + ,{"name": "k_EButton_Axis0","value": "32"} + ,{"name": "k_EButton_Axis1","value": "33"} + ,{"name": "k_EButton_Axis2","value": "34"} + ,{"name": "k_EButton_Axis3","value": "35"} + ,{"name": "k_EButton_Axis4","value": "36"} + ,{"name": "k_EButton_SteamVR_Touchpad","value": "32"} + ,{"name": "k_EButton_SteamVR_Trigger","value": "33"} + ,{"name": "k_EButton_Dashboard_Back","value": "2"} + ,{"name": "k_EButton_Max","value": "64"} +]} +, {"enumname": "vr::EVRMouseButton","values": [ + {"name": "VRMouseButton_Left","value": "1"} + ,{"name": "VRMouseButton_Right","value": "2"} + ,{"name": "VRMouseButton_Middle","value": "4"} +]} +, {"enumname": "vr::EVRControllerAxisType","values": [ + {"name": "k_eControllerAxis_None","value": "0"} + ,{"name": "k_eControllerAxis_TrackPad","value": "1"} + ,{"name": "k_eControllerAxis_Joystick","value": "2"} + ,{"name": "k_eControllerAxis_Trigger","value": "3"} +]} +, {"enumname": "vr::EVRControllerEventOutputType","values": [ + {"name": "ControllerEventOutput_OSEvents","value": "0"} + ,{"name": "ControllerEventOutput_VREvents","value": "1"} +]} +, {"enumname": "vr::ECollisionBoundsStyle","values": [ + {"name": "COLLISION_BOUNDS_STYLE_BEGINNER","value": "0"} + ,{"name": "COLLISION_BOUNDS_STYLE_INTERMEDIATE","value": "1"} + ,{"name": "COLLISION_BOUNDS_STYLE_SQUARES","value": "2"} + ,{"name": "COLLISION_BOUNDS_STYLE_ADVANCED","value": "3"} + ,{"name": "COLLISION_BOUNDS_STYLE_NONE","value": "4"} + ,{"name": "COLLISION_BOUNDS_STYLE_COUNT","value": "5"} +]} +, {"enumname": "vr::EVROverlayError","values": [ + {"name": "VROverlayError_None","value": "0"} + ,{"name": "VROverlayError_UnknownOverlay","value": "10"} + ,{"name": "VROverlayError_InvalidHandle","value": "11"} + ,{"name": "VROverlayError_PermissionDenied","value": "12"} + ,{"name": "VROverlayError_OverlayLimitExceeded","value": "13"} + ,{"name": "VROverlayError_WrongVisibilityType","value": "14"} + ,{"name": "VROverlayError_KeyTooLong","value": "15"} + ,{"name": "VROverlayError_NameTooLong","value": "16"} + ,{"name": "VROverlayError_KeyInUse","value": "17"} + ,{"name": "VROverlayError_WrongTransformType","value": "18"} + ,{"name": "VROverlayError_InvalidTrackedDevice","value": "19"} + ,{"name": "VROverlayError_InvalidParameter","value": "20"} + ,{"name": "VROverlayError_ThumbnailCantBeDestroyed","value": "21"} + ,{"name": "VROverlayError_ArrayTooSmall","value": "22"} + ,{"name": "VROverlayError_RequestFailed","value": "23"} + ,{"name": "VROverlayError_InvalidTexture","value": "24"} + ,{"name": "VROverlayError_UnableToLoadFile","value": "25"} + ,{"name": "VROVerlayError_KeyboardAlreadyInUse","value": "26"} + ,{"name": "VROverlayError_NoNeighbor","value": "27"} +]} +, {"enumname": "vr::EVRApplicationType","values": [ + {"name": "VRApplication_Other","value": "0"} + ,{"name": "VRApplication_Scene","value": "1"} + ,{"name": "VRApplication_Overlay","value": "2"} + ,{"name": "VRApplication_Background","value": "3"} + ,{"name": "VRApplication_Utility","value": "4"} + ,{"name": "VRApplication_VRMonitor","value": "5"} +]} +, {"enumname": "vr::EVRFirmwareError","values": [ + {"name": "VRFirmwareError_None","value": "0"} + ,{"name": "VRFirmwareError_Success","value": "1"} + ,{"name": "VRFirmwareError_Fail","value": "2"} +]} +, {"enumname": "vr::EVRNotificationError","values": [ + {"name": "VRNotificationError_OK","value": "0"} + ,{"name": "VRNotificationError_InvalidNotificationId","value": "100"} + ,{"name": "VRNotificationError_NotificationQueueFull","value": "101"} + ,{"name": "VRNotificationError_InvalidOverlayHandle","value": "102"} + ,{"name": "VRNotificationError_SystemWithUserValueAlreadyExists","value": "103"} +]} +, {"enumname": "vr::EVRInitError","values": [ + {"name": "VRInitError_None","value": "0"} + ,{"name": "VRInitError_Unknown","value": "1"} + ,{"name": "VRInitError_Init_InstallationNotFound","value": "100"} + ,{"name": "VRInitError_Init_InstallationCorrupt","value": "101"} + ,{"name": "VRInitError_Init_VRClientDLLNotFound","value": "102"} + ,{"name": "VRInitError_Init_FileNotFound","value": "103"} + ,{"name": "VRInitError_Init_FactoryNotFound","value": "104"} + ,{"name": "VRInitError_Init_InterfaceNotFound","value": "105"} + ,{"name": "VRInitError_Init_InvalidInterface","value": "106"} + ,{"name": "VRInitError_Init_UserConfigDirectoryInvalid","value": "107"} + ,{"name": "VRInitError_Init_HmdNotFound","value": "108"} + ,{"name": "VRInitError_Init_NotInitialized","value": "109"} + ,{"name": "VRInitError_Init_PathRegistryNotFound","value": "110"} + ,{"name": "VRInitError_Init_NoConfigPath","value": "111"} + ,{"name": "VRInitError_Init_NoLogPath","value": "112"} + ,{"name": "VRInitError_Init_PathRegistryNotWritable","value": "113"} + ,{"name": "VRInitError_Init_AppInfoInitFailed","value": "114"} + ,{"name": "VRInitError_Init_Retry","value": "115"} + ,{"name": "VRInitError_Init_InitCanceledByUser","value": "116"} + ,{"name": "VRInitError_Init_AnotherAppLaunching","value": "117"} + ,{"name": "VRInitError_Init_SettingsInitFailed","value": "118"} + ,{"name": "VRInitError_Init_ShuttingDown","value": "119"} + ,{"name": "VRInitError_Init_TooManyObjects","value": "120"} + ,{"name": "VRInitError_Init_NoServerForBackgroundApp","value": "121"} + ,{"name": "VRInitError_Init_NotSupportedWithCompositor","value": "122"} + ,{"name": "VRInitError_Init_NotAvailableToUtilityApps","value": "123"} + ,{"name": "VRInitError_Init_Internal","value": "124"} + ,{"name": "VRInitError_Driver_Failed","value": "200"} + ,{"name": "VRInitError_Driver_Unknown","value": "201"} + ,{"name": "VRInitError_Driver_HmdUnknown","value": "202"} + ,{"name": "VRInitError_Driver_NotLoaded","value": "203"} + ,{"name": "VRInitError_Driver_RuntimeOutOfDate","value": "204"} + ,{"name": "VRInitError_Driver_HmdInUse","value": "205"} + ,{"name": "VRInitError_Driver_NotCalibrated","value": "206"} + ,{"name": "VRInitError_Driver_CalibrationInvalid","value": "207"} + ,{"name": "VRInitError_Driver_HmdDisplayNotFound","value": "208"} + ,{"name": "VRInitError_IPC_ServerInitFailed","value": "300"} + ,{"name": "VRInitError_IPC_ConnectFailed","value": "301"} + ,{"name": "VRInitError_IPC_SharedStateInitFailed","value": "302"} + ,{"name": "VRInitError_IPC_CompositorInitFailed","value": "303"} + ,{"name": "VRInitError_IPC_MutexInitFailed","value": "304"} + ,{"name": "VRInitError_IPC_Failed","value": "305"} + ,{"name": "VRInitError_Compositor_Failed","value": "400"} + ,{"name": "VRInitError_Compositor_D3D11HardwareRequired","value": "401"} + ,{"name": "VRInitError_Compositor_FirmwareRequiresUpdate","value": "402"} + ,{"name": "VRInitError_Compositor_OverlayInitFailed","value": "403"} + ,{"name": "VRInitError_Compositor_ScreenshotsInitFailed","value": "404"} + ,{"name": "VRInitError_VendorSpecific_UnableToConnectToOculusRuntime","value": "1000"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_CantOpenDevice","value": "1101"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UnableToRequestConfigStart","value": "1102"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_NoStoredConfig","value": "1103"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_ConfigTooBig","value": "1104"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_ConfigTooSmall","value": "1105"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UnableToInitZLib","value": "1106"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_CantReadFirmwareVersion","value": "1107"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UnableToSendUserDataStart","value": "1108"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataStart","value": "1109"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataNext","value": "1110"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UserDataAddressRange","value": "1111"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_UserDataError","value": "1112"} + ,{"name": "VRInitError_VendorSpecific_HmdFound_ConfigFailedSanityCheck","value": "1113"} + ,{"name": "VRInitError_Steam_SteamInstallationNotFound","value": "2000"} +]} +, {"enumname": "vr::EVRScreenshotType","values": [ + {"name": "VRScreenshotType_None","value": "0"} + ,{"name": "VRScreenshotType_Mono","value": "1"} + ,{"name": "VRScreenshotType_Stereo","value": "2"} + ,{"name": "VRScreenshotType_Cubemap","value": "3"} + ,{"name": "VRScreenshotType_MonoPanorama","value": "4"} + ,{"name": "VRScreenshotType_StereoPanorama","value": "5"} +]} +, {"enumname": "vr::EVRScreenshotPropertyFilenames","values": [ + {"name": "VRScreenshotPropertyFilenames_Preview","value": "0"} + ,{"name": "VRScreenshotPropertyFilenames_VR","value": "1"} +]} +, {"enumname": "vr::EVRTrackedCameraError","values": [ + {"name": "VRTrackedCameraError_None","value": "0"} + ,{"name": "VRTrackedCameraError_OperationFailed","value": "100"} + ,{"name": "VRTrackedCameraError_InvalidHandle","value": "101"} + ,{"name": "VRTrackedCameraError_InvalidFrameHeaderVersion","value": "102"} + ,{"name": "VRTrackedCameraError_OutOfHandles","value": "103"} + ,{"name": "VRTrackedCameraError_IPCFailure","value": "104"} + ,{"name": "VRTrackedCameraError_NotSupportedForThisDevice","value": "105"} + ,{"name": "VRTrackedCameraError_SharedMemoryFailure","value": "106"} + ,{"name": "VRTrackedCameraError_FrameBufferingFailure","value": "107"} + ,{"name": "VRTrackedCameraError_StreamSetupFailure","value": "108"} + ,{"name": "VRTrackedCameraError_InvalidGLTextureId","value": "109"} + ,{"name": "VRTrackedCameraError_InvalidSharedTextureHandle","value": "110"} + ,{"name": "VRTrackedCameraError_FailedToGetGLTextureId","value": "111"} + ,{"name": "VRTrackedCameraError_SharedTextureFailure","value": "112"} + ,{"name": "VRTrackedCameraError_NoFrameAvailable","value": "113"} + ,{"name": "VRTrackedCameraError_InvalidArgument","value": "114"} + ,{"name": "VRTrackedCameraError_InvalidFrameBufferSize","value": "115"} +]} +, {"enumname": "vr::EVRTrackedCameraFrameType","values": [ + {"name": "VRTrackedCameraFrameType_Distorted","value": "0"} + ,{"name": "VRTrackedCameraFrameType_Undistorted","value": "1"} + ,{"name": "VRTrackedCameraFrameType_MaximumUndistorted","value": "2"} + ,{"name": "MAX_CAMERA_FRAME_TYPES","value": "3"} +]} +, {"enumname": "vr::EVRApplicationError","values": [ + {"name": "VRApplicationError_None","value": "0"} + ,{"name": "VRApplicationError_AppKeyAlreadyExists","value": "100"} + ,{"name": "VRApplicationError_NoManifest","value": "101"} + ,{"name": "VRApplicationError_NoApplication","value": "102"} + ,{"name": "VRApplicationError_InvalidIndex","value": "103"} + ,{"name": "VRApplicationError_UnknownApplication","value": "104"} + ,{"name": "VRApplicationError_IPCFailed","value": "105"} + ,{"name": "VRApplicationError_ApplicationAlreadyRunning","value": "106"} + ,{"name": "VRApplicationError_InvalidManifest","value": "107"} + ,{"name": "VRApplicationError_InvalidApplication","value": "108"} + ,{"name": "VRApplicationError_LaunchFailed","value": "109"} + ,{"name": "VRApplicationError_ApplicationAlreadyStarting","value": "110"} + ,{"name": "VRApplicationError_LaunchInProgress","value": "111"} + ,{"name": "VRApplicationError_OldApplicationQuitting","value": "112"} + ,{"name": "VRApplicationError_TransitionAborted","value": "113"} + ,{"name": "VRApplicationError_IsTemplate","value": "114"} + ,{"name": "VRApplicationError_BufferTooSmall","value": "200"} + ,{"name": "VRApplicationError_PropertyNotSet","value": "201"} + ,{"name": "VRApplicationError_UnknownProperty","value": "202"} + ,{"name": "VRApplicationError_InvalidParameter","value": "203"} +]} +, {"enumname": "vr::EVRApplicationProperty","values": [ + {"name": "VRApplicationProperty_Name_String","value": "0"} + ,{"name": "VRApplicationProperty_LaunchType_String","value": "11"} + ,{"name": "VRApplicationProperty_WorkingDirectory_String","value": "12"} + ,{"name": "VRApplicationProperty_BinaryPath_String","value": "13"} + ,{"name": "VRApplicationProperty_Arguments_String","value": "14"} + ,{"name": "VRApplicationProperty_URL_String","value": "15"} + ,{"name": "VRApplicationProperty_Description_String","value": "50"} + ,{"name": "VRApplicationProperty_NewsURL_String","value": "51"} + ,{"name": "VRApplicationProperty_ImagePath_String","value": "52"} + ,{"name": "VRApplicationProperty_Source_String","value": "53"} + ,{"name": "VRApplicationProperty_IsDashboardOverlay_Bool","value": "60"} + ,{"name": "VRApplicationProperty_IsTemplate_Bool","value": "61"} + ,{"name": "VRApplicationProperty_IsInstanced_Bool","value": "62"} + ,{"name": "VRApplicationProperty_LastLaunchTime_Uint64","value": "70"} +]} +, {"enumname": "vr::EVRApplicationTransitionState","values": [ + {"name": "VRApplicationTransition_None","value": "0"} + ,{"name": "VRApplicationTransition_OldAppQuitSent","value": "10"} + ,{"name": "VRApplicationTransition_WaitingForExternalLaunch","value": "11"} + ,{"name": "VRApplicationTransition_NewAppLaunched","value": "20"} +]} +, {"enumname": "vr::ChaperoneCalibrationState","values": [ + {"name": "ChaperoneCalibrationState_OK","value": "1"} + ,{"name": "ChaperoneCalibrationState_Warning","value": "100"} + ,{"name": "ChaperoneCalibrationState_Warning_BaseStationMayHaveMoved","value": "101"} + ,{"name": "ChaperoneCalibrationState_Warning_BaseStationRemoved","value": "102"} + ,{"name": "ChaperoneCalibrationState_Warning_SeatedBoundsInvalid","value": "103"} + ,{"name": "ChaperoneCalibrationState_Error","value": "200"} + ,{"name": "ChaperoneCalibrationState_Error_BaseStationUninitalized","value": "201"} + ,{"name": "ChaperoneCalibrationState_Error_BaseStationConflict","value": "202"} + ,{"name": "ChaperoneCalibrationState_Error_PlayAreaInvalid","value": "203"} + ,{"name": "ChaperoneCalibrationState_Error_CollisionBoundsInvalid","value": "204"} +]} +, {"enumname": "vr::EChaperoneConfigFile","values": [ + {"name": "EChaperoneConfigFile_Live","value": "1"} + ,{"name": "EChaperoneConfigFile_Temp","value": "2"} +]} +, {"enumname": "vr::EChaperoneImportFlags","values": [ + {"name": "EChaperoneImport_BoundsOnly","value": "1"} +]} +, {"enumname": "vr::EVRCompositorError","values": [ + {"name": "VRCompositorError_None","value": "0"} + ,{"name": "VRCompositorError_RequestFailed","value": "1"} + ,{"name": "VRCompositorError_IncompatibleVersion","value": "100"} + ,{"name": "VRCompositorError_DoNotHaveFocus","value": "101"} + ,{"name": "VRCompositorError_InvalidTexture","value": "102"} + ,{"name": "VRCompositorError_IsNotSceneApplication","value": "103"} + ,{"name": "VRCompositorError_TextureIsOnWrongDevice","value": "104"} + ,{"name": "VRCompositorError_TextureUsesUnsupportedFormat","value": "105"} + ,{"name": "VRCompositorError_SharedTexturesNotSupported","value": "106"} + ,{"name": "VRCompositorError_IndexOutOfRange","value": "107"} +]} +, {"enumname": "vr::VROverlayInputMethod","values": [ + {"name": "VROverlayInputMethod_None","value": "0"} + ,{"name": "VROverlayInputMethod_Mouse","value": "1"} +]} +, {"enumname": "vr::VROverlayTransformType","values": [ + {"name": "VROverlayTransform_Absolute","value": "0"} + ,{"name": "VROverlayTransform_TrackedDeviceRelative","value": "1"} + ,{"name": "VROverlayTransform_SystemOverlay","value": "2"} + ,{"name": "VROverlayTransform_TrackedComponent","value": "3"} +]} +, {"enumname": "vr::VROverlayFlags","values": [ + {"name": "VROverlayFlags_None","value": "0"} + ,{"name": "VROverlayFlags_Curved","value": "1"} + ,{"name": "VROverlayFlags_RGSS4X","value": "2"} + ,{"name": "VROverlayFlags_NoDashboardTab","value": "3"} + ,{"name": "VROverlayFlags_AcceptsGamepadEvents","value": "4"} + ,{"name": "VROverlayFlags_ShowGamepadFocus","value": "5"} + ,{"name": "VROverlayFlags_SendVRScrollEvents","value": "6"} + ,{"name": "VROverlayFlags_SendVRTouchpadEvents","value": "7"} + ,{"name": "VROverlayFlags_ShowTouchPadScrollWheel","value": "8"} + ,{"name": "VROverlayFlags_TransferOwnershipToInternalProcess","value": "9"} + ,{"name": "VROverlayFlags_SideBySide_Parallel","value": "10"} + ,{"name": "VROverlayFlags_SideBySide_Crossed","value": "11"} + ,{"name": "VROverlayFlags_Panorama","value": "12"} + ,{"name": "VROverlayFlags_StereoPanorama","value": "13"} +]} +, {"enumname": "vr::EGamepadTextInputMode","values": [ + {"name": "k_EGamepadTextInputModeNormal","value": "0"} + ,{"name": "k_EGamepadTextInputModePassword","value": "1"} + ,{"name": "k_EGamepadTextInputModeSubmit","value": "2"} +]} +, {"enumname": "vr::EGamepadTextInputLineMode","values": [ + {"name": "k_EGamepadTextInputLineModeSingleLine","value": "0"} + ,{"name": "k_EGamepadTextInputLineModeMultipleLines","value": "1"} +]} +, {"enumname": "vr::EOverlayDirection","values": [ + {"name": "OverlayDirection_Up","value": "0"} + ,{"name": "OverlayDirection_Down","value": "1"} + ,{"name": "OverlayDirection_Left","value": "2"} + ,{"name": "OverlayDirection_Right","value": "3"} + ,{"name": "OverlayDirection_Count","value": "4"} +]} +, {"enumname": "vr::EVRRenderModelError","values": [ + {"name": "VRRenderModelError_None","value": "0"} + ,{"name": "VRRenderModelError_Loading","value": "100"} + ,{"name": "VRRenderModelError_NotSupported","value": "200"} + ,{"name": "VRRenderModelError_InvalidArg","value": "300"} + ,{"name": "VRRenderModelError_InvalidModel","value": "301"} + ,{"name": "VRRenderModelError_NoShapes","value": "302"} + ,{"name": "VRRenderModelError_MultipleShapes","value": "303"} + ,{"name": "VRRenderModelError_TooManyVertices","value": "304"} + ,{"name": "VRRenderModelError_MultipleTextures","value": "305"} + ,{"name": "VRRenderModelError_BufferTooSmall","value": "306"} + ,{"name": "VRRenderModelError_NotEnoughNormals","value": "307"} + ,{"name": "VRRenderModelError_NotEnoughTexCoords","value": "308"} + ,{"name": "VRRenderModelError_InvalidTexture","value": "400"} +]} +, {"enumname": "vr::EVRComponentProperty","values": [ + {"name": "VRComponentProperty_IsStatic","value": "1"} + ,{"name": "VRComponentProperty_IsVisible","value": "2"} + ,{"name": "VRComponentProperty_IsTouched","value": "4"} + ,{"name": "VRComponentProperty_IsPressed","value": "8"} + ,{"name": "VRComponentProperty_IsScrolled","value": "16"} +]} +, {"enumname": "vr::EVRNotificationType","values": [ + {"name": "EVRNotificationType_Transient","value": "0"} + ,{"name": "EVRNotificationType_Persistent","value": "1"} + ,{"name": "EVRNotificationType_Transient_SystemWithUserValue","value": "2"} +]} +, {"enumname": "vr::EVRNotificationStyle","values": [ + {"name": "EVRNotificationStyle_None","value": "0"} + ,{"name": "EVRNotificationStyle_Application","value": "100"} + ,{"name": "EVRNotificationStyle_Contact_Disabled","value": "200"} + ,{"name": "EVRNotificationStyle_Contact_Enabled","value": "201"} + ,{"name": "EVRNotificationStyle_Contact_Active","value": "202"} +]} +, {"enumname": "vr::EVRSettingsError","values": [ + {"name": "VRSettingsError_None","value": "0"} + ,{"name": "VRSettingsError_IPCFailed","value": "1"} + ,{"name": "VRSettingsError_WriteFailed","value": "2"} + ,{"name": "VRSettingsError_ReadFailed","value": "3"} +]} +, {"enumname": "vr::EVRScreenshotError","values": [ + {"name": "VRScreenshotError_None","value": "0"} + ,{"name": "VRScreenshotError_RequestFailed","value": "1"} + ,{"name": "VRScreenshotError_IncompatibleVersion","value": "100"} + ,{"name": "VRScreenshotError_NotFound","value": "101"} + ,{"name": "VRScreenshotError_BufferTooSmall","value": "102"} + ,{"name": "VRScreenshotError_ScreenshotAlreadyInProgress","value": "108"} +]} +], +"consts":[{ + "constname": "k_unTrackingStringSize","consttype": "const uint32_t", "constval": "32"} +,{ + "constname": "k_unMaxDriverDebugResponseSize","consttype": "const uint32_t", "constval": "32768"} +,{ + "constname": "k_unTrackedDeviceIndex_Hmd","consttype": "const uint32_t", "constval": "0"} +,{ + "constname": "k_unMaxTrackedDeviceCount","consttype": "const uint32_t", "constval": "16"} +,{ + "constname": "k_unTrackedDeviceIndexOther","consttype": "const uint32_t", "constval": "4294967294"} +,{ + "constname": "k_unTrackedDeviceIndexInvalid","consttype": "const uint32_t", "constval": "4294967295"} +,{ + "constname": "k_unMaxPropertyStringSize","consttype": "const uint32_t", "constval": "32768"} +,{ + "constname": "k_unControllerStateAxisCount","consttype": "const uint32_t", "constval": "5"} +,{ + "constname": "k_ulOverlayHandleInvalid","consttype": "const VROverlayHandle_t", "constval": "0"} +,{ + "constname": "k_unScreenshotHandleInvalid","consttype": "const uint32_t", "constval": "0"} +,{ + "constname": "IVRSystem_Version","consttype": "const char *const", "constval": "IVRSystem_012"} +,{ + "constname": "IVRExtendedDisplay_Version","consttype": "const char *const", "constval": "IVRExtendedDisplay_001"} +,{ + "constname": "IVRTrackedCamera_Version","consttype": "const char *const", "constval": "IVRTrackedCamera_003"} +,{ + "constname": "k_unMaxApplicationKeyLength","consttype": "const uint32_t", "constval": "128"} +,{ + "constname": "IVRApplications_Version","consttype": "const char *const", "constval": "IVRApplications_005"} +,{ + "constname": "IVRChaperone_Version","consttype": "const char *const", "constval": "IVRChaperone_003"} +,{ + "constname": "IVRChaperoneSetup_Version","consttype": "const char *const", "constval": "IVRChaperoneSetup_005"} +,{ + "constname": "IVRCompositor_Version","consttype": "const char *const", "constval": "IVRCompositor_015"} +,{ + "constname": "k_unVROverlayMaxKeyLength","consttype": "const uint32_t", "constval": "128"} +,{ + "constname": "k_unVROverlayMaxNameLength","consttype": "const uint32_t", "constval": "128"} +,{ + "constname": "k_unMaxOverlayCount","consttype": "const uint32_t", "constval": "32"} +,{ + "constname": "IVROverlay_Version","consttype": "const char *const", "constval": "IVROverlay_012"} +,{ + "constname": "k_pch_Controller_Component_GDC2015","consttype": "const char *const", "constval": "gdc2015"} +,{ + "constname": "k_pch_Controller_Component_Base","consttype": "const char *const", "constval": "base"} +,{ + "constname": "k_pch_Controller_Component_Tip","consttype": "const char *const", "constval": "tip"} +,{ + "constname": "k_pch_Controller_Component_HandGrip","consttype": "const char *const", "constval": "handgrip"} +,{ + "constname": "k_pch_Controller_Component_Status","consttype": "const char *const", "constval": "status"} +,{ + "constname": "IVRRenderModels_Version","consttype": "const char *const", "constval": "IVRRenderModels_005"} +,{ + "constname": "k_unNotificationTextMaxSize","consttype": "const uint32_t", "constval": "256"} +,{ + "constname": "IVRNotifications_Version","consttype": "const char *const", "constval": "IVRNotifications_002"} +,{ + "constname": "k_unMaxSettingsKeyLength","consttype": "const uint32_t", "constval": "128"} +,{ + "constname": "IVRSettings_Version","consttype": "const char *const", "constval": "IVRSettings_001"} +,{ + "constname": "k_pch_SteamVR_Section","consttype": "const char *const", "constval": "steamvr"} +,{ + "constname": "k_pch_SteamVR_RequireHmd_String","consttype": "const char *const", "constval": "requireHmd"} +,{ + "constname": "k_pch_SteamVR_ForcedDriverKey_String","consttype": "const char *const", "constval": "forcedDriver"} +,{ + "constname": "k_pch_SteamVR_ForcedHmdKey_String","consttype": "const char *const", "constval": "forcedHmd"} +,{ + "constname": "k_pch_SteamVR_DisplayDebug_Bool","consttype": "const char *const", "constval": "displayDebug"} +,{ + "constname": "k_pch_SteamVR_DebugProcessPipe_String","consttype": "const char *const", "constval": "debugProcessPipe"} +,{ + "constname": "k_pch_SteamVR_EnableDistortion_Bool","consttype": "const char *const", "constval": "enableDistortion"} +,{ + "constname": "k_pch_SteamVR_DisplayDebugX_Int32","consttype": "const char *const", "constval": "displayDebugX"} +,{ + "constname": "k_pch_SteamVR_DisplayDebugY_Int32","consttype": "const char *const", "constval": "displayDebugY"} +,{ + "constname": "k_pch_SteamVR_SendSystemButtonToAllApps_Bool","consttype": "const char *const", "constval": "sendSystemButtonToAllApps"} +,{ + "constname": "k_pch_SteamVR_LogLevel_Int32","consttype": "const char *const", "constval": "loglevel"} +,{ + "constname": "k_pch_SteamVR_IPD_Float","consttype": "const char *const", "constval": "ipd"} +,{ + "constname": "k_pch_SteamVR_Background_String","consttype": "const char *const", "constval": "background"} +,{ + "constname": "k_pch_SteamVR_BackgroundCameraHeight_Float","consttype": "const char *const", "constval": "backgroundCameraHeight"} +,{ + "constname": "k_pch_SteamVR_BackgroundDomeRadius_Float","consttype": "const char *const", "constval": "backgroundDomeRadius"} +,{ + "constname": "k_pch_SteamVR_Environment_String","consttype": "const char *const", "constval": "environment"} +,{ + "constname": "k_pch_SteamVR_GridColor_String","consttype": "const char *const", "constval": "gridColor"} +,{ + "constname": "k_pch_SteamVR_PlayAreaColor_String","consttype": "const char *const", "constval": "playAreaColor"} +,{ + "constname": "k_pch_SteamVR_ShowStage_Bool","consttype": "const char *const", "constval": "showStage"} +,{ + "constname": "k_pch_SteamVR_ActivateMultipleDrivers_Bool","consttype": "const char *const", "constval": "activateMultipleDrivers"} +,{ + "constname": "k_pch_SteamVR_PowerOffOnExit_Bool","consttype": "const char *const", "constval": "powerOffOnExit"} +,{ + "constname": "k_pch_SteamVR_StandbyAppRunningTimeout_Float","consttype": "const char *const", "constval": "standbyAppRunningTimeout"} +,{ + "constname": "k_pch_SteamVR_StandbyNoAppTimeout_Float","consttype": "const char *const", "constval": "standbyNoAppTimeout"} +,{ + "constname": "k_pch_SteamVR_DirectMode_Bool","consttype": "const char *const", "constval": "directMode"} +,{ + "constname": "k_pch_SteamVR_DirectModeEdidVid_Int32","consttype": "const char *const", "constval": "directModeEdidVid"} +,{ + "constname": "k_pch_SteamVR_DirectModeEdidPid_Int32","consttype": "const char *const", "constval": "directModeEdidPid"} +,{ + "constname": "k_pch_SteamVR_UsingSpeakers_Bool","consttype": "const char *const", "constval": "usingSpeakers"} +,{ + "constname": "k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float","consttype": "const char *const", "constval": "speakersForwardYawOffsetDegrees"} +,{ + "constname": "k_pch_SteamVR_BaseStationPowerManagement_Bool","consttype": "const char *const", "constval": "basestationPowerManagement"} +,{ + "constname": "k_pch_SteamVR_NeverKillProcesses_Bool","consttype": "const char *const", "constval": "neverKillProcesses"} +,{ + "constname": "k_pch_SteamVR_RenderTargetMultiplier_Float","consttype": "const char *const", "constval": "renderTargetMultiplier"} +,{ + "constname": "k_pch_SteamVR_AllowReprojection_Bool","consttype": "const char *const", "constval": "allowReprojection"} +,{ + "constname": "k_pch_SteamVR_ForceReprojection_Bool","consttype": "const char *const", "constval": "forceReprojection"} +,{ + "constname": "k_pch_SteamVR_ForceFadeOnBadTracking_Bool","consttype": "const char *const", "constval": "forceFadeOnBadTracking"} +,{ + "constname": "k_pch_SteamVR_DefaultMirrorView_Int32","consttype": "const char *const", "constval": "defaultMirrorView"} +,{ + "constname": "k_pch_SteamVR_ShowMirrorView_Bool","consttype": "const char *const", "constval": "showMirrorView"} +,{ + "constname": "k_pch_Lighthouse_Section","consttype": "const char *const", "constval": "driver_lighthouse"} +,{ + "constname": "k_pch_Lighthouse_DisableIMU_Bool","consttype": "const char *const", "constval": "disableimu"} +,{ + "constname": "k_pch_Lighthouse_UseDisambiguation_String","consttype": "const char *const", "constval": "usedisambiguation"} +,{ + "constname": "k_pch_Lighthouse_DisambiguationDebug_Int32","consttype": "const char *const", "constval": "disambiguationdebug"} +,{ + "constname": "k_pch_Lighthouse_PrimaryBasestation_Int32","consttype": "const char *const", "constval": "primarybasestation"} +,{ + "constname": "k_pch_Lighthouse_LighthouseName_String","consttype": "const char *const", "constval": "lighthousename"} +,{ + "constname": "k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float","consttype": "const char *const", "constval": "maxincidenceangledegrees"} +,{ + "constname": "k_pch_Lighthouse_UseLighthouseDirect_Bool","consttype": "const char *const", "constval": "uselighthousedirect"} +,{ + "constname": "k_pch_Lighthouse_DBHistory_Bool","consttype": "const char *const", "constval": "dbhistory"} +,{ + "constname": "k_pch_Null_Section","consttype": "const char *const", "constval": "driver_null"} +,{ + "constname": "k_pch_Null_EnableNullDriver_Bool","consttype": "const char *const", "constval": "enable"} +,{ + "constname": "k_pch_Null_SerialNumber_String","consttype": "const char *const", "constval": "serialNumber"} +,{ + "constname": "k_pch_Null_ModelNumber_String","consttype": "const char *const", "constval": "modelNumber"} +,{ + "constname": "k_pch_Null_WindowX_Int32","consttype": "const char *const", "constval": "windowX"} +,{ + "constname": "k_pch_Null_WindowY_Int32","consttype": "const char *const", "constval": "windowY"} +,{ + "constname": "k_pch_Null_WindowWidth_Int32","consttype": "const char *const", "constval": "windowWidth"} +,{ + "constname": "k_pch_Null_WindowHeight_Int32","consttype": "const char *const", "constval": "windowHeight"} +,{ + "constname": "k_pch_Null_RenderWidth_Int32","consttype": "const char *const", "constval": "renderWidth"} +,{ + "constname": "k_pch_Null_RenderHeight_Int32","consttype": "const char *const", "constval": "renderHeight"} +,{ + "constname": "k_pch_Null_SecondsFromVsyncToPhotons_Float","consttype": "const char *const", "constval": "secondsFromVsyncToPhotons"} +,{ + "constname": "k_pch_Null_DisplayFrequency_Float","consttype": "const char *const", "constval": "displayFrequency"} +,{ + "constname": "k_pch_UserInterface_Section","consttype": "const char *const", "constval": "userinterface"} +,{ + "constname": "k_pch_UserInterface_StatusAlwaysOnTop_Bool","consttype": "const char *const", "constval": "StatusAlwaysOnTop"} +,{ + "constname": "k_pch_UserInterface_EnableScreenshots_Bool","consttype": "const char *const", "constval": "EnableScreenshots"} +,{ + "constname": "k_pch_Notifications_Section","consttype": "const char *const", "constval": "notifications"} +,{ + "constname": "k_pch_Notifications_DoNotDisturb_Bool","consttype": "const char *const", "constval": "DoNotDisturb"} +,{ + "constname": "k_pch_Keyboard_Section","consttype": "const char *const", "constval": "keyboard"} +,{ + "constname": "k_pch_Keyboard_TutorialCompletions","consttype": "const char *const", "constval": "TutorialCompletions"} +,{ + "constname": "k_pch_Keyboard_ScaleX","consttype": "const char *const", "constval": "ScaleX"} +,{ + "constname": "k_pch_Keyboard_ScaleY","consttype": "const char *const", "constval": "ScaleY"} +,{ + "constname": "k_pch_Keyboard_OffsetLeftX","consttype": "const char *const", "constval": "OffsetLeftX"} +,{ + "constname": "k_pch_Keyboard_OffsetRightX","consttype": "const char *const", "constval": "OffsetRightX"} +,{ + "constname": "k_pch_Keyboard_OffsetY","consttype": "const char *const", "constval": "OffsetY"} +,{ + "constname": "k_pch_Keyboard_Smoothing","consttype": "const char *const", "constval": "Smoothing"} +,{ + "constname": "k_pch_Perf_Section","consttype": "const char *const", "constval": "perfcheck"} +,{ + "constname": "k_pch_Perf_HeuristicActive_Bool","consttype": "const char *const", "constval": "heuristicActive"} +,{ + "constname": "k_pch_Perf_NotifyInHMD_Bool","consttype": "const char *const", "constval": "warnInHMD"} +,{ + "constname": "k_pch_Perf_NotifyOnlyOnce_Bool","consttype": "const char *const", "constval": "warnOnlyOnce"} +,{ + "constname": "k_pch_Perf_AllowTimingStore_Bool","consttype": "const char *const", "constval": "allowTimingStore"} +,{ + "constname": "k_pch_Perf_SaveTimingsOnExit_Bool","consttype": "const char *const", "constval": "saveTimingsOnExit"} +,{ + "constname": "k_pch_Perf_TestData_Float","consttype": "const char *const", "constval": "perfTestData"} +,{ + "constname": "k_pch_CollisionBounds_Section","consttype": "const char *const", "constval": "collisionBounds"} +,{ + "constname": "k_pch_CollisionBounds_Style_Int32","consttype": "const char *const", "constval": "CollisionBoundsStyle"} +,{ + "constname": "k_pch_CollisionBounds_GroundPerimeterOn_Bool","consttype": "const char *const", "constval": "CollisionBoundsGroundPerimeterOn"} +,{ + "constname": "k_pch_CollisionBounds_CenterMarkerOn_Bool","consttype": "const char *const", "constval": "CollisionBoundsCenterMarkerOn"} +,{ + "constname": "k_pch_CollisionBounds_PlaySpaceOn_Bool","consttype": "const char *const", "constval": "CollisionBoundsPlaySpaceOn"} +,{ + "constname": "k_pch_CollisionBounds_FadeDistance_Float","consttype": "const char *const", "constval": "CollisionBoundsFadeDistance"} +,{ + "constname": "k_pch_CollisionBounds_ColorGammaR_Int32","consttype": "const char *const", "constval": "CollisionBoundsColorGammaR"} +,{ + "constname": "k_pch_CollisionBounds_ColorGammaG_Int32","consttype": "const char *const", "constval": "CollisionBoundsColorGammaG"} +,{ + "constname": "k_pch_CollisionBounds_ColorGammaB_Int32","consttype": "const char *const", "constval": "CollisionBoundsColorGammaB"} +,{ + "constname": "k_pch_CollisionBounds_ColorGammaA_Int32","consttype": "const char *const", "constval": "CollisionBoundsColorGammaA"} +,{ + "constname": "k_pch_Camera_Section","consttype": "const char *const", "constval": "camera"} +,{ + "constname": "k_pch_Camera_EnableCamera_Bool","consttype": "const char *const", "constval": "enableCamera"} +,{ + "constname": "k_pch_Camera_EnableCameraInDashboard_Bool","consttype": "const char *const", "constval": "enableCameraInDashboard"} +,{ + "constname": "k_pch_Camera_EnableCameraForCollisionBounds_Bool","consttype": "const char *const", "constval": "enableCameraForCollisionBounds"} +,{ + "constname": "k_pch_Camera_EnableCameraForRoomView_Bool","consttype": "const char *const", "constval": "enableCameraForRoomView"} +,{ + "constname": "k_pch_Camera_BoundsColorGammaR_Int32","consttype": "const char *const", "constval": "cameraBoundsColorGammaR"} +,{ + "constname": "k_pch_Camera_BoundsColorGammaG_Int32","consttype": "const char *const", "constval": "cameraBoundsColorGammaG"} +,{ + "constname": "k_pch_Camera_BoundsColorGammaB_Int32","consttype": "const char *const", "constval": "cameraBoundsColorGammaB"} +,{ + "constname": "k_pch_Camera_BoundsColorGammaA_Int32","consttype": "const char *const", "constval": "cameraBoundsColorGammaA"} +,{ + "constname": "k_pch_audio_Section","consttype": "const char *const", "constval": "audio"} +,{ + "constname": "k_pch_audio_OnPlaybackDevice_String","consttype": "const char *const", "constval": "onPlaybackDevice"} +,{ + "constname": "k_pch_audio_OnRecordDevice_String","consttype": "const char *const", "constval": "onRecordDevice"} +,{ + "constname": "k_pch_audio_OnPlaybackMirrorDevice_String","consttype": "const char *const", "constval": "onPlaybackMirrorDevice"} +,{ + "constname": "k_pch_audio_OffPlaybackDevice_String","consttype": "const char *const", "constval": "offPlaybackDevice"} +,{ + "constname": "k_pch_audio_OffRecordDevice_String","consttype": "const char *const", "constval": "offRecordDevice"} +,{ + "constname": "k_pch_audio_VIVEHDMIGain","consttype": "const char *const", "constval": "viveHDMIGain"} +,{ + "constname": "k_pch_modelskin_Section","consttype": "const char *const", "constval": "modelskins"} +,{ + "constname": "IVRScreenshots_Version","consttype": "const char *const", "constval": "IVRScreenshots_001"} +], +"structs":[{"struct": "vr::HmdMatrix34_t","fields": [ +{ "fieldname": "m", "fieldtype": "float [3][4]"}]} +,{"struct": "vr::HmdMatrix44_t","fields": [ +{ "fieldname": "m", "fieldtype": "float [4][4]"}]} +,{"struct": "vr::HmdVector3_t","fields": [ +{ "fieldname": "v", "fieldtype": "float [3]"}]} +,{"struct": "vr::HmdVector4_t","fields": [ +{ "fieldname": "v", "fieldtype": "float [4]"}]} +,{"struct": "vr::HmdVector3d_t","fields": [ +{ "fieldname": "v", "fieldtype": "double [3]"}]} +,{"struct": "vr::HmdVector2_t","fields": [ +{ "fieldname": "v", "fieldtype": "float [2]"}]} +,{"struct": "vr::HmdQuaternion_t","fields": [ +{ "fieldname": "w", "fieldtype": "double"}, +{ "fieldname": "x", "fieldtype": "double"}, +{ "fieldname": "y", "fieldtype": "double"}, +{ "fieldname": "z", "fieldtype": "double"}]} +,{"struct": "vr::HmdColor_t","fields": [ +{ "fieldname": "r", "fieldtype": "float"}, +{ "fieldname": "g", "fieldtype": "float"}, +{ "fieldname": "b", "fieldtype": "float"}, +{ "fieldname": "a", "fieldtype": "float"}]} +,{"struct": "vr::HmdQuad_t","fields": [ +{ "fieldname": "vCorners", "fieldtype": "struct vr::HmdVector3_t [4]"}]} +,{"struct": "vr::HmdRect2_t","fields": [ +{ "fieldname": "vTopLeft", "fieldtype": "struct vr::HmdVector2_t"}, +{ "fieldname": "vBottomRight", "fieldtype": "struct vr::HmdVector2_t"}]} +,{"struct": "vr::DistortionCoordinates_t","fields": [ +{ "fieldname": "rfRed", "fieldtype": "float [2]"}, +{ "fieldname": "rfGreen", "fieldtype": "float [2]"}, +{ "fieldname": "rfBlue", "fieldtype": "float [2]"}]} +,{"struct": "vr::Texture_t","fields": [ +{ "fieldname": "handle", "fieldtype": "void *"}, +{ "fieldname": "eType", "fieldtype": "enum vr::EGraphicsAPIConvention"}, +{ "fieldname": "eColorSpace", "fieldtype": "enum vr::EColorSpace"}]} +,{"struct": "vr::TrackedDevicePose_t","fields": [ +{ "fieldname": "mDeviceToAbsoluteTracking", "fieldtype": "struct vr::HmdMatrix34_t"}, +{ "fieldname": "vVelocity", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "vAngularVelocity", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "eTrackingResult", "fieldtype": "enum vr::ETrackingResult"}, +{ "fieldname": "bPoseIsValid", "fieldtype": "_Bool"}, +{ "fieldname": "bDeviceIsConnected", "fieldtype": "_Bool"}]} +,{"struct": "vr::VRTextureBounds_t","fields": [ +{ "fieldname": "uMin", "fieldtype": "float"}, +{ "fieldname": "vMin", "fieldtype": "float"}, +{ "fieldname": "uMax", "fieldtype": "float"}, +{ "fieldname": "vMax", "fieldtype": "float"}]} +,{"struct": "vr::VREvent_Controller_t","fields": [ +{ "fieldname": "button", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_Mouse_t","fields": [ +{ "fieldname": "x", "fieldtype": "float"}, +{ "fieldname": "y", "fieldtype": "float"}, +{ "fieldname": "button", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_Scroll_t","fields": [ +{ "fieldname": "xdelta", "fieldtype": "float"}, +{ "fieldname": "ydelta", "fieldtype": "float"}, +{ "fieldname": "repeatCount", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_TouchPadMove_t","fields": [ +{ "fieldname": "bFingerDown", "fieldtype": "_Bool"}, +{ "fieldname": "flSecondsFingerDown", "fieldtype": "float"}, +{ "fieldname": "fValueXFirst", "fieldtype": "float"}, +{ "fieldname": "fValueYFirst", "fieldtype": "float"}, +{ "fieldname": "fValueXRaw", "fieldtype": "float"}, +{ "fieldname": "fValueYRaw", "fieldtype": "float"}]} +,{"struct": "vr::VREvent_Notification_t","fields": [ +{ "fieldname": "ulUserValue", "fieldtype": "uint64_t"}, +{ "fieldname": "notificationId", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_Process_t","fields": [ +{ "fieldname": "pid", "fieldtype": "uint32_t"}, +{ "fieldname": "oldPid", "fieldtype": "uint32_t"}, +{ "fieldname": "bForced", "fieldtype": "_Bool"}]} +,{"struct": "vr::VREvent_Overlay_t","fields": [ +{ "fieldname": "overlayHandle", "fieldtype": "uint64_t"}]} +,{"struct": "vr::VREvent_Status_t","fields": [ +{ "fieldname": "statusState", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_Keyboard_t","fields": [ +{ "fieldname": "cNewInput", "fieldtype": "char [8]"}, +{ "fieldname": "uUserValue", "fieldtype": "uint64_t"}]} +,{"struct": "vr::VREvent_Ipd_t","fields": [ +{ "fieldname": "ipdMeters", "fieldtype": "float"}]} +,{"struct": "vr::VREvent_Chaperone_t","fields": [ +{ "fieldname": "m_nPreviousUniverse", "fieldtype": "uint64_t"}, +{ "fieldname": "m_nCurrentUniverse", "fieldtype": "uint64_t"}]} +,{"struct": "vr::VREvent_Reserved_t","fields": [ +{ "fieldname": "reserved0", "fieldtype": "uint64_t"}, +{ "fieldname": "reserved1", "fieldtype": "uint64_t"}]} +,{"struct": "vr::VREvent_PerformanceTest_t","fields": [ +{ "fieldname": "m_nFidelityLevel", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VREvent_SeatedZeroPoseReset_t","fields": [ +{ "fieldname": "bResetBySystemMenu", "fieldtype": "_Bool"}]} +,{"struct": "vr::VREvent_Screenshot_t","fields": [ +{ "fieldname": "handle", "fieldtype": "uint32_t"}, +{ "fieldname": "type", "fieldtype": "uint32_t"}]} +,{"struct": "vr::(anonymous)","fields": [ +{ "fieldname": "reserved", "fieldtype": "struct vr::VREvent_Reserved_t"}, +{ "fieldname": "controller", "fieldtype": "struct vr::VREvent_Controller_t"}, +{ "fieldname": "mouse", "fieldtype": "struct vr::VREvent_Mouse_t"}, +{ "fieldname": "scroll", "fieldtype": "struct vr::VREvent_Scroll_t"}, +{ "fieldname": "process", "fieldtype": "struct vr::VREvent_Process_t"}, +{ "fieldname": "notification", "fieldtype": "struct vr::VREvent_Notification_t"}, +{ "fieldname": "overlay", "fieldtype": "struct vr::VREvent_Overlay_t"}, +{ "fieldname": "status", "fieldtype": "struct vr::VREvent_Status_t"}, +{ "fieldname": "keyboard", "fieldtype": "struct vr::VREvent_Keyboard_t"}, +{ "fieldname": "ipd", "fieldtype": "struct vr::VREvent_Ipd_t"}, +{ "fieldname": "chaperone", "fieldtype": "struct vr::VREvent_Chaperone_t"}, +{ "fieldname": "performanceTest", "fieldtype": "struct vr::VREvent_PerformanceTest_t"}, +{ "fieldname": "touchPadMove", "fieldtype": "struct vr::VREvent_TouchPadMove_t"}, +{ "fieldname": "seatedZeroPoseReset", "fieldtype": "struct vr::VREvent_SeatedZeroPoseReset_t"}, +{ "fieldname": "screenshot", "fieldtype": "struct vr::VREvent_Screenshot_t"}]} +,{"struct": "vr::VREvent_t","fields": [ +{ "fieldname": "eventType", "fieldtype": "uint32_t"}, +{ "fieldname": "trackedDeviceIndex", "fieldtype": "TrackedDeviceIndex_t"}, +{ "fieldname": "eventAgeSeconds", "fieldtype": "float"}, +{ "fieldname": "data", "fieldtype": "VREvent_Data_t"}]} +,{"struct": "vr::HiddenAreaMesh_t","fields": [ +{ "fieldname": "pVertexData", "fieldtype": "const struct vr::HmdVector2_t *"}, +{ "fieldname": "unTriangleCount", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VRControllerAxis_t","fields": [ +{ "fieldname": "x", "fieldtype": "float"}, +{ "fieldname": "y", "fieldtype": "float"}]} +,{"struct": "vr::VRControllerState001_t","fields": [ +{ "fieldname": "unPacketNum", "fieldtype": "uint32_t"}, +{ "fieldname": "ulButtonPressed", "fieldtype": "uint64_t"}, +{ "fieldname": "ulButtonTouched", "fieldtype": "uint64_t"}, +{ "fieldname": "rAxis", "fieldtype": "struct vr::VRControllerAxis_t [5]"}]} +,{"struct": "vr::Compositor_OverlaySettings","fields": [ +{ "fieldname": "size", "fieldtype": "uint32_t"}, +{ "fieldname": "curved", "fieldtype": "_Bool"}, +{ "fieldname": "antialias", "fieldtype": "_Bool"}, +{ "fieldname": "scale", "fieldtype": "float"}, +{ "fieldname": "distance", "fieldtype": "float"}, +{ "fieldname": "alpha", "fieldtype": "float"}, +{ "fieldname": "uOffset", "fieldtype": "float"}, +{ "fieldname": "vOffset", "fieldtype": "float"}, +{ "fieldname": "uScale", "fieldtype": "float"}, +{ "fieldname": "vScale", "fieldtype": "float"}, +{ "fieldname": "gridDivs", "fieldtype": "float"}, +{ "fieldname": "gridWidth", "fieldtype": "float"}, +{ "fieldname": "gridScale", "fieldtype": "float"}, +{ "fieldname": "transform", "fieldtype": "struct vr::HmdMatrix44_t"}]} +,{"struct": "vr::CameraVideoStreamFrameHeader_t","fields": [ +{ "fieldname": "eFrameType", "fieldtype": "enum vr::EVRTrackedCameraFrameType"}, +{ "fieldname": "nWidth", "fieldtype": "uint32_t"}, +{ "fieldname": "nHeight", "fieldtype": "uint32_t"}, +{ "fieldname": "nBytesPerPixel", "fieldtype": "uint32_t"}, +{ "fieldname": "nFrameSequence", "fieldtype": "uint32_t"}, +{ "fieldname": "standingTrackedDevicePose", "fieldtype": "struct vr::TrackedDevicePose_t"}]} +,{"struct": "vr::AppOverrideKeys_t","fields": [ +{ "fieldname": "pchKey", "fieldtype": "const char *"}, +{ "fieldname": "pchValue", "fieldtype": "const char *"}]} +,{"struct": "vr::Compositor_FrameTiming","fields": [ +{ "fieldname": "m_nSize", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nFrameIndex", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumFramePresents", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumDroppedFrames", "fieldtype": "uint32_t"}, +{ "fieldname": "m_flSystemTimeInSeconds", "fieldtype": "double"}, +{ "fieldname": "m_flSceneRenderGpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flTotalRenderGpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorRenderGpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorRenderCpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorIdleCpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flClientFrameIntervalMs", "fieldtype": "float"}, +{ "fieldname": "m_flPresentCallCpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flWaitForPresentCpuMs", "fieldtype": "float"}, +{ "fieldname": "m_flSubmitFrameMs", "fieldtype": "float"}, +{ "fieldname": "m_flWaitGetPosesCalledMs", "fieldtype": "float"}, +{ "fieldname": "m_flNewPosesReadyMs", "fieldtype": "float"}, +{ "fieldname": "m_flNewFrameReadyMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorUpdateStartMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorUpdateEndMs", "fieldtype": "float"}, +{ "fieldname": "m_flCompositorRenderStartMs", "fieldtype": "float"}, +{ "fieldname": "m_HmdPose", "fieldtype": "vr::TrackedDevicePose_t"}, +{ "fieldname": "m_nFidelityLevel", "fieldtype": "int32_t"}, +{ "fieldname": "m_nReprojectionFlags", "fieldtype": "uint32_t"}]} +,{"struct": "vr::Compositor_CumulativeStats","fields": [ +{ "fieldname": "m_nPid", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumFramePresents", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumDroppedFrames", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumReprojectedFrames", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumFramePresentsOnStartup", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumDroppedFramesOnStartup", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumReprojectedFramesOnStartup", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumLoading", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumFramePresentsLoading", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumDroppedFramesLoading", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumReprojectedFramesLoading", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumTimedOut", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumFramePresentsTimedOut", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumDroppedFramesTimedOut", "fieldtype": "uint32_t"}, +{ "fieldname": "m_nNumReprojectedFramesTimedOut", "fieldtype": "uint32_t"}]} +,{"struct": "vr::VROverlayIntersectionParams_t","fields": [ +{ "fieldname": "vSource", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "vDirection", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "eOrigin", "fieldtype": "enum vr::ETrackingUniverseOrigin"}]} +,{"struct": "vr::VROverlayIntersectionResults_t","fields": [ +{ "fieldname": "vPoint", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "vNormal", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "vUVs", "fieldtype": "struct vr::HmdVector2_t"}, +{ "fieldname": "fDistance", "fieldtype": "float"}]} +,{"struct": "vr::RenderModel_ComponentState_t","fields": [ +{ "fieldname": "mTrackingToComponentRenderModel", "fieldtype": "struct vr::HmdMatrix34_t"}, +{ "fieldname": "mTrackingToComponentLocal", "fieldtype": "struct vr::HmdMatrix34_t"}, +{ "fieldname": "uProperties", "fieldtype": "VRComponentProperties"}]} +,{"struct": "vr::RenderModel_Vertex_t","fields": [ +{ "fieldname": "vPosition", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "vNormal", "fieldtype": "struct vr::HmdVector3_t"}, +{ "fieldname": "rfTextureCoord", "fieldtype": "float [2]"}]} +,{"struct": "vr::RenderModel_TextureMap_t","fields": [ +{ "fieldname": "unWidth", "fieldtype": "uint16_t"}, +{ "fieldname": "unHeight", "fieldtype": "uint16_t"}, +{ "fieldname": "rubTextureMapData", "fieldtype": "const uint8_t *"}]} +,{"struct": "vr::RenderModel_t","fields": [ +{ "fieldname": "rVertexData", "fieldtype": "const struct vr::RenderModel_Vertex_t *"}, +{ "fieldname": "unVertexCount", "fieldtype": "uint32_t"}, +{ "fieldname": "rIndexData", "fieldtype": "const uint16_t *"}, +{ "fieldname": "unTriangleCount", "fieldtype": "uint32_t"}, +{ "fieldname": "diffuseTextureId", "fieldtype": "TextureID_t"}]} +,{"struct": "vr::RenderModel_ControllerMode_State_t","fields": [ +{ "fieldname": "bScrollWheelVisible", "fieldtype": "_Bool"}]} +,{"struct": "vr::NotificationBitmap_t","fields": [ +{ "fieldname": "m_pImageData", "fieldtype": "void *"}, +{ "fieldname": "m_nWidth", "fieldtype": "int32_t"}, +{ "fieldname": "m_nHeight", "fieldtype": "int32_t"}, +{ "fieldname": "m_nBytesPerPixel", "fieldtype": "int32_t"}]} +,{"struct": "vr::COpenVRContext","fields": [ +{ "fieldname": "m_pVRSystem", "fieldtype": "class vr::IVRSystem *"}, +{ "fieldname": "m_pVRChaperone", "fieldtype": "class vr::IVRChaperone *"}, +{ "fieldname": "m_pVRChaperoneSetup", "fieldtype": "class vr::IVRChaperoneSetup *"}, +{ "fieldname": "m_pVRCompositor", "fieldtype": "class vr::IVRCompositor *"}, +{ "fieldname": "m_pVROverlay", "fieldtype": "class vr::IVROverlay *"}, +{ "fieldname": "m_pVRRenderModels", "fieldtype": "class vr::IVRRenderModels *"}, +{ "fieldname": "m_pVRExtendedDisplay", "fieldtype": "class vr::IVRExtendedDisplay *"}, +{ "fieldname": "m_pVRSettings", "fieldtype": "class vr::IVRSettings *"}, +{ "fieldname": "m_pVRApplications", "fieldtype": "class vr::IVRApplications *"}, +{ "fieldname": "m_pVRTrackedCamera", "fieldtype": "class vr::IVRTrackedCamera *"}, +{ "fieldname": "m_pVRScreenshots", "fieldtype": "class vr::IVRScreenshots *"}]} +], +"methods":[{ + "classname": "vr::IVRSystem", + "methodname": "GetRecommendedRenderTargetSize", + "returntype": "void", + "params": [ +{ "paramname": "pnWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnHeight" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetProjectionMatrix", + "returntype": "struct vr::HmdMatrix44_t", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "fNearZ" ,"paramtype": "float"}, +{ "paramname": "fFarZ" ,"paramtype": "float"}, +{ "paramname": "eProjType" ,"paramtype": "vr::EGraphicsAPIConvention"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetProjectionRaw", + "returntype": "void", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "pfLeft" ,"paramtype": "float *"}, +{ "paramname": "pfRight" ,"paramtype": "float *"}, +{ "paramname": "pfTop" ,"paramtype": "float *"}, +{ "paramname": "pfBottom" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "ComputeDistortion", + "returntype": "struct vr::DistortionCoordinates_t", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "fU" ,"paramtype": "float"}, +{ "paramname": "fV" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetEyeToHeadTransform", + "returntype": "struct vr::HmdMatrix34_t", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetTimeSinceLastVsync", + "returntype": "bool", + "params": [ +{ "paramname": "pfSecondsSinceLastVsync" ,"paramtype": "float *"}, +{ "paramname": "pulFrameCounter" ,"paramtype": "uint64_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetD3D9AdapterIndex", + "returntype": "int32_t" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetDXGIOutputInfo", + "returntype": "void", + "params": [ +{ "paramname": "pnAdapterIndex" ,"paramtype": "int32_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "IsDisplayOnDesktop", + "returntype": "bool" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "SetDisplayVisibility", + "returntype": "bool", + "params": [ +{ "paramname": "bIsVisibleOnDesktop" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetDeviceToAbsoluteTrackingPose", + "returntype": "void", + "params": [ +{ "paramname": "eOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "fPredictedSecondsToPhotonsFromNow" ,"paramtype": "float"}, +{ "paramname": "pTrackedDevicePoseArray" ,"array_count": "unTrackedDevicePoseArrayCount" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "unTrackedDevicePoseArrayCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "ResetSeatedZeroPose", + "returntype": "void" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetSeatedZeroPoseToStandingAbsoluteTrackingPose", + "returntype": "struct vr::HmdMatrix34_t" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetRawZeroPoseToStandingAbsoluteTrackingPose", + "returntype": "struct vr::HmdMatrix34_t" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetSortedTrackedDeviceIndicesOfClass", + "returntype": "uint32_t", + "params": [ +{ "paramname": "eTrackedDeviceClass" ,"paramtype": "vr::ETrackedDeviceClass"}, +{ "paramname": "punTrackedDeviceIndexArray" ,"array_count": "unTrackedDeviceIndexArrayCount" ,"paramtype": "vr::TrackedDeviceIndex_t *"}, +{ "paramname": "unTrackedDeviceIndexArrayCount" ,"paramtype": "uint32_t"}, +{ "paramname": "unRelativeToTrackedDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetTrackedDeviceActivityLevel", + "returntype": "vr::EDeviceActivityLevel", + "params": [ +{ "paramname": "unDeviceId" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "ApplyTransform", + "returntype": "void", + "params": [ +{ "paramname": "pOutputPose" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "pTrackedDevicePose" ,"paramtype": "const struct vr::TrackedDevicePose_t *"}, +{ "paramname": "pTransform" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetTrackedDeviceIndexForControllerRole", + "returntype": "vr::TrackedDeviceIndex_t", + "params": [ +{ "paramname": "unDeviceType" ,"paramtype": "vr::ETrackedControllerRole"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetControllerRoleForTrackedDeviceIndex", + "returntype": "vr::ETrackedControllerRole", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetTrackedDeviceClass", + "returntype": "vr::ETrackedDeviceClass", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "IsTrackedDeviceConnected", + "returntype": "bool", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetBoolTrackedDeviceProperty", + "returntype": "bool", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetFloatTrackedDeviceProperty", + "returntype": "float", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetInt32TrackedDeviceProperty", + "returntype": "int32_t", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetUint64TrackedDeviceProperty", + "returntype": "uint64_t", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetMatrix34TrackedDeviceProperty", + "returntype": "struct vr::HmdMatrix34_t", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetStringTrackedDeviceProperty", + "returntype": "uint32_t", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "prop" ,"paramtype": "vr::ETrackedDeviceProperty"}, +{ "paramname": "pchValue" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unBufferSize" ,"paramtype": "uint32_t"}, +{ "paramname": "pError" ,"paramtype": "vr::ETrackedPropertyError *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetPropErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "error" ,"paramtype": "vr::ETrackedPropertyError"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "PollNextEvent", + "returntype": "bool", + "params": [ +{ "paramname": "pEvent" ,"paramtype": "struct vr::VREvent_t *"}, +{ "paramname": "uncbVREvent" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "PollNextEventWithPose", + "returntype": "bool", + "params": [ +{ "paramname": "eOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "pEvent" ,"paramtype": "struct vr::VREvent_t *"}, +{ "paramname": "uncbVREvent" ,"paramtype": "uint32_t"}, +{ "paramname": "pTrackedDevicePose" ,"paramtype": "vr::TrackedDevicePose_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetEventTypeNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "eType" ,"paramtype": "vr::EVREventType"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetHiddenAreaMesh", + "returntype": "struct vr::HiddenAreaMesh_t", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetControllerState", + "returntype": "bool", + "params": [ +{ "paramname": "unControllerDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pControllerState" ,"paramtype": "vr::VRControllerState_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetControllerStateWithPose", + "returntype": "bool", + "params": [ +{ "paramname": "eOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "unControllerDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pControllerState" ,"paramtype": "vr::VRControllerState_t *"}, +{ "paramname": "pTrackedDevicePose" ,"paramtype": "struct vr::TrackedDevicePose_t *"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "TriggerHapticPulse", + "returntype": "void", + "params": [ +{ "paramname": "unControllerDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "unAxisId" ,"paramtype": "uint32_t"}, +{ "paramname": "usDurationMicroSec" ,"paramtype": "unsigned short"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetButtonIdNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "eButtonId" ,"paramtype": "vr::EVRButtonId"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "GetControllerAxisTypeNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "eAxisType" ,"paramtype": "vr::EVRControllerAxisType"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "CaptureInputFocus", + "returntype": "bool" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "ReleaseInputFocus", + "returntype": "void" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "IsInputFocusCapturedByAnotherProcess", + "returntype": "bool" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "DriverDebugRequest", + "returntype": "uint32_t", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pchRequest" ,"paramtype": "const char *"}, +{ "paramname": "pchResponseBuffer" ,"paramtype": "char *"}, +{ "paramname": "unResponseBufferSize" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "PerformFirmwareUpdate", + "returntype": "vr::EVRFirmwareError", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVRSystem", + "methodname": "AcknowledgeQuit_Exiting", + "returntype": "void" +} +,{ + "classname": "vr::IVRSystem", + "methodname": "AcknowledgeQuit_UserPrompt", + "returntype": "void" +} +,{ + "classname": "vr::IVRExtendedDisplay", + "methodname": "GetWindowBounds", + "returntype": "void", + "params": [ +{ "paramname": "pnX" ,"paramtype": "int32_t *"}, +{ "paramname": "pnY" ,"paramtype": "int32_t *"}, +{ "paramname": "pnWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnHeight" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRExtendedDisplay", + "methodname": "GetEyeOutputViewport", + "returntype": "void", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "pnX" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnY" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnHeight" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRExtendedDisplay", + "methodname": "GetDXGIOutputInfo", + "returntype": "void", + "params": [ +{ "paramname": "pnAdapterIndex" ,"paramtype": "int32_t *"}, +{ "paramname": "pnAdapterOutputIndex" ,"paramtype": "int32_t *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "GetCameraErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "eCameraError" ,"paramtype": "vr::EVRTrackedCameraError"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "HasCamera", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "nDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pHasCamera" ,"paramtype": "bool *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "GetCameraFrameSize", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "nDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "eFrameType" ,"paramtype": "vr::EVRTrackedCameraFrameType"}, +{ "paramname": "pnWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnHeight" ,"paramtype": "uint32_t *"}, +{ "paramname": "pnFrameBufferSize" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "GetCameraIntrinisics", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "nDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "eFrameType" ,"paramtype": "vr::EVRTrackedCameraFrameType"}, +{ "paramname": "pFocalLength" ,"paramtype": "vr::HmdVector2_t *"}, +{ "paramname": "pCenter" ,"paramtype": "vr::HmdVector2_t *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "GetCameraProjection", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "nDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "eFrameType" ,"paramtype": "vr::EVRTrackedCameraFrameType"}, +{ "paramname": "flZNear" ,"paramtype": "float"}, +{ "paramname": "flZFar" ,"paramtype": "float"}, +{ "paramname": "pProjection" ,"paramtype": "vr::HmdMatrix44_t *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "AcquireVideoStreamingService", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "nDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pHandle" ,"paramtype": "vr::TrackedCameraHandle_t *"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "ReleaseVideoStreamingService", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "hTrackedCamera" ,"paramtype": "vr::TrackedCameraHandle_t"} + ] +} +,{ + "classname": "vr::IVRTrackedCamera", + "methodname": "GetVideoStreamFrameBuffer", + "returntype": "vr::EVRTrackedCameraError", + "params": [ +{ "paramname": "hTrackedCamera" ,"paramtype": "vr::TrackedCameraHandle_t"}, +{ "paramname": "eFrameType" ,"paramtype": "vr::EVRTrackedCameraFrameType"}, +{ "paramname": "pFrameBuffer" ,"paramtype": "void *"}, +{ "paramname": "nFrameBufferSize" ,"paramtype": "uint32_t"}, +{ "paramname": "pFrameHeader" ,"paramtype": "vr::CameraVideoStreamFrameHeader_t *"}, +{ "paramname": "nFrameHeaderSize" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "AddApplicationManifest", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchApplicationManifestFullPath" ,"paramtype": "const char *"}, +{ "paramname": "bTemporary" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "RemoveApplicationManifest", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchApplicationManifestFullPath" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "IsApplicationInstalled", + "returntype": "bool", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationCount", + "returntype": "uint32_t" +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationKeyByIndex", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "unApplicationIndex" ,"paramtype": "uint32_t"}, +{ "paramname": "pchAppKeyBuffer" ,"paramtype": "char *"}, +{ "paramname": "unAppKeyBufferLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationKeyByProcessId", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "unProcessId" ,"paramtype": "uint32_t"}, +{ "paramname": "pchAppKeyBuffer" ,"paramtype": "char *"}, +{ "paramname": "unAppKeyBufferLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "LaunchApplication", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "LaunchTemplateApplication", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchTemplateAppKey" ,"paramtype": "const char *"}, +{ "paramname": "pchNewAppKey" ,"paramtype": "const char *"}, +{ "paramname": "pKeys" ,"array_count": "unKeys" ,"paramtype": "const struct vr::AppOverrideKeys_t *"}, +{ "paramname": "unKeys" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "LaunchDashboardOverlay", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "CancelApplicationLaunch", + "returntype": "bool", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "IdentifyApplication", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "unProcessId" ,"paramtype": "uint32_t"}, +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationProcessId", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationsErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "error" ,"paramtype": "vr::EVRApplicationError"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationPropertyString", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"}, +{ "paramname": "eProperty" ,"paramtype": "vr::EVRApplicationProperty"}, +{ "paramname": "pchPropertyValueBuffer" ,"paramtype": "char *"}, +{ "paramname": "unPropertyValueBufferLen" ,"paramtype": "uint32_t"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRApplicationError *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationPropertyBool", + "returntype": "bool", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"}, +{ "paramname": "eProperty" ,"paramtype": "vr::EVRApplicationProperty"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRApplicationError *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationPropertyUint64", + "returntype": "uint64_t", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"}, +{ "paramname": "eProperty" ,"paramtype": "vr::EVRApplicationProperty"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRApplicationError *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "SetApplicationAutoLaunch", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"}, +{ "paramname": "bAutoLaunch" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationAutoLaunch", + "returntype": "bool", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetStartingApplication", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchAppKeyBuffer" ,"paramtype": "char *"}, +{ "paramname": "unAppKeyBufferLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetTransitionState", + "returntype": "vr::EVRApplicationTransitionState" +} +,{ + "classname": "vr::IVRApplications", + "methodname": "PerformApplicationPrelaunchCheck", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchAppKey" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "GetApplicationsTransitionStateNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "state" ,"paramtype": "vr::EVRApplicationTransitionState"} + ] +} +,{ + "classname": "vr::IVRApplications", + "methodname": "IsQuitUserPromptRequested", + "returntype": "bool" +} +,{ + "classname": "vr::IVRApplications", + "methodname": "LaunchInternalProcess", + "returntype": "vr::EVRApplicationError", + "params": [ +{ "paramname": "pchBinaryPath" ,"paramtype": "const char *"}, +{ "paramname": "pchArguments" ,"paramtype": "const char *"}, +{ "paramname": "pchWorkingDirectory" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "GetCalibrationState", + "returntype": "vr::ChaperoneCalibrationState" +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "GetPlayAreaSize", + "returntype": "bool", + "params": [ +{ "paramname": "pSizeX" ,"paramtype": "float *"}, +{ "paramname": "pSizeZ" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "GetPlayAreaRect", + "returntype": "bool", + "params": [ +{ "paramname": "rect" ,"paramtype": "struct vr::HmdQuad_t *"} + ] +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "ReloadInfo", + "returntype": "void" +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "SetSceneColor", + "returntype": "void", + "params": [ +{ "paramname": "color" ,"paramtype": "struct vr::HmdColor_t"} + ] +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "GetBoundsColor", + "returntype": "void", + "params": [ +{ "paramname": "pOutputColorArray" ,"paramtype": "struct vr::HmdColor_t *"}, +{ "paramname": "nNumOutputColors" ,"paramtype": "int"}, +{ "paramname": "flCollisionBoundsFadeDistance" ,"paramtype": "float"}, +{ "paramname": "pOutputCameraColor" ,"paramtype": "struct vr::HmdColor_t *"} + ] +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "AreBoundsVisible", + "returntype": "bool" +} +,{ + "classname": "vr::IVRChaperone", + "methodname": "ForceBoundsVisible", + "returntype": "void", + "params": [ +{ "paramname": "bForce" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "CommitWorkingCopy", + "returntype": "bool", + "params": [ +{ "paramname": "configFile" ,"paramtype": "vr::EChaperoneConfigFile"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "RevertWorkingCopy", + "returntype": "void" +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetWorkingPlayAreaSize", + "returntype": "bool", + "params": [ +{ "paramname": "pSizeX" ,"paramtype": "float *"}, +{ "paramname": "pSizeZ" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetWorkingPlayAreaRect", + "returntype": "bool", + "params": [ +{ "paramname": "rect" ,"paramtype": "struct vr::HmdQuad_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetWorkingCollisionBoundsInfo", + "returntype": "bool", + "params": [ +{ "paramname": "pQuadsBuffer" ,"out_array_count": "punQuadsCount" ,"paramtype": "struct vr::HmdQuad_t *"}, +{ "paramname": "punQuadsCount" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetLiveCollisionBoundsInfo", + "returntype": "bool", + "params": [ +{ "paramname": "pQuadsBuffer" ,"out_array_count": "punQuadsCount" ,"paramtype": "struct vr::HmdQuad_t *"}, +{ "paramname": "punQuadsCount" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetWorkingSeatedZeroPoseToRawTrackingPose", + "returntype": "bool", + "params": [ +{ "paramname": "pmatSeatedZeroPoseToRawTrackingPose" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetWorkingStandingZeroPoseToRawTrackingPose", + "returntype": "bool", + "params": [ +{ "paramname": "pmatStandingZeroPoseToRawTrackingPose" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingPlayAreaSize", + "returntype": "void", + "params": [ +{ "paramname": "sizeX" ,"paramtype": "float"}, +{ "paramname": "sizeZ" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingCollisionBoundsInfo", + "returntype": "void", + "params": [ +{ "paramname": "pQuadsBuffer" ,"array_count": "unQuadsCount" ,"paramtype": "struct vr::HmdQuad_t *"}, +{ "paramname": "unQuadsCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingSeatedZeroPoseToRawTrackingPose", + "returntype": "void", + "params": [ +{ "paramname": "pMatSeatedZeroPoseToRawTrackingPose" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingStandingZeroPoseToRawTrackingPose", + "returntype": "void", + "params": [ +{ "paramname": "pMatStandingZeroPoseToRawTrackingPose" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "ReloadFromDisk", + "returntype": "void", + "params": [ +{ "paramname": "configFile" ,"paramtype": "vr::EChaperoneConfigFile"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetLiveSeatedZeroPoseToRawTrackingPose", + "returntype": "bool", + "params": [ +{ "paramname": "pmatSeatedZeroPoseToRawTrackingPose" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingCollisionBoundsTagsInfo", + "returntype": "void", + "params": [ +{ "paramname": "pTagsBuffer" ,"array_count": "unTagCount" ,"paramtype": "uint8_t *"}, +{ "paramname": "unTagCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetLiveCollisionBoundsTagsInfo", + "returntype": "bool", + "params": [ +{ "paramname": "pTagsBuffer" ,"out_array_count": "punTagCount" ,"paramtype": "uint8_t *"}, +{ "paramname": "punTagCount" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "SetWorkingPhysicalBoundsInfo", + "returntype": "bool", + "params": [ +{ "paramname": "pQuadsBuffer" ,"array_count": "unQuadsCount" ,"paramtype": "struct vr::HmdQuad_t *"}, +{ "paramname": "unQuadsCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "GetLivePhysicalBoundsInfo", + "returntype": "bool", + "params": [ +{ "paramname": "pQuadsBuffer" ,"out_array_count": "punQuadsCount" ,"paramtype": "struct vr::HmdQuad_t *"}, +{ "paramname": "punQuadsCount" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "ExportLiveToBuffer", + "returntype": "bool", + "params": [ +{ "paramname": "pBuffer" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "pnBufferLength" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVRChaperoneSetup", + "methodname": "ImportFromBufferToWorking", + "returntype": "bool", + "params": [ +{ "paramname": "pBuffer" ,"paramtype": "const char *"}, +{ "paramname": "nImportFlags" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "SetTrackingSpace", + "returntype": "void", + "params": [ +{ "paramname": "eOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetTrackingSpace", + "returntype": "vr::ETrackingUniverseOrigin" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "WaitGetPoses", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "pRenderPoseArray" ,"array_count": "unRenderPoseArrayCount" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "unRenderPoseArrayCount" ,"paramtype": "uint32_t"}, +{ "paramname": "pGamePoseArray" ,"array_count": "unGamePoseArrayCount" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "unGamePoseArrayCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetLastPoses", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "pRenderPoseArray" ,"array_count": "unRenderPoseArrayCount" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "unRenderPoseArrayCount" ,"paramtype": "uint32_t"}, +{ "paramname": "pGamePoseArray" ,"array_count": "unGamePoseArrayCount" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "unGamePoseArrayCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetLastPoseForTrackedDeviceIndex", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pOutputPose" ,"paramtype": "struct vr::TrackedDevicePose_t *"}, +{ "paramname": "pOutputGamePose" ,"paramtype": "struct vr::TrackedDevicePose_t *"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "Submit", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "pTexture" ,"paramtype": "const struct vr::Texture_t *"}, +{ "paramname": "pBounds" ,"paramtype": "const struct vr::VRTextureBounds_t *"}, +{ "paramname": "nSubmitFlags" ,"paramtype": "vr::EVRSubmitFlags"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ClearLastSubmittedFrame", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "PostPresentHandoff", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetFrameTiming", + "returntype": "bool", + "params": [ +{ "paramname": "pTiming" ,"paramtype": "struct vr::Compositor_FrameTiming *"}, +{ "paramname": "unFramesAgo" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetFrameTimeRemaining", + "returntype": "float" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetCumulativeStats", + "returntype": "void", + "params": [ +{ "paramname": "pStats" ,"paramtype": "struct vr::Compositor_CumulativeStats *"}, +{ "paramname": "nStatsSizeInBytes" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "FadeToColor", + "returntype": "void", + "params": [ +{ "paramname": "fSeconds" ,"paramtype": "float"}, +{ "paramname": "fRed" ,"paramtype": "float"}, +{ "paramname": "fGreen" ,"paramtype": "float"}, +{ "paramname": "fBlue" ,"paramtype": "float"}, +{ "paramname": "fAlpha" ,"paramtype": "float"}, +{ "paramname": "bBackground" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "FadeGrid", + "returntype": "void", + "params": [ +{ "paramname": "fSeconds" ,"paramtype": "float"}, +{ "paramname": "bFadeIn" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "SetSkyboxOverride", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "pTextures" ,"array_count": "unTextureCount" ,"paramtype": "const struct vr::Texture_t *"}, +{ "paramname": "unTextureCount" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ClearSkyboxOverride", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "CompositorBringToFront", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "CompositorGoToBack", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "CompositorQuit", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "IsFullscreen", + "returntype": "bool" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetCurrentSceneFocusProcess", + "returntype": "uint32_t" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetLastFrameRenderer", + "returntype": "uint32_t" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "CanRenderScene", + "returntype": "bool" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ShowMirrorWindow", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "HideMirrorWindow", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "IsMirrorWindowVisible", + "returntype": "bool" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "CompositorDumpImages", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ShouldAppRenderWithLowResources", + "returntype": "bool" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ForceInterleavedReprojectionOn", + "returntype": "void", + "params": [ +{ "paramname": "bOverride" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ForceReconnectProcess", + "returntype": "void" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "SuspendRendering", + "returntype": "void", + "params": [ +{ "paramname": "bSuspend" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "RequestScreenshot", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "type" ,"paramtype": "vr::EVRScreenshotType"}, +{ "paramname": "pchDestinationFileName" ,"paramtype": "const char *"}, +{ "paramname": "pchVRDestinationFileName" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetCurrentScreenshotType", + "returntype": "vr::EVRScreenshotType" +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetMirrorTextureD3D11", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "pD3D11DeviceOrResource" ,"paramtype": "void *"}, +{ "paramname": "ppD3D11ShaderResourceView" ,"paramtype": "void **"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "GetMirrorTextureGL", + "returntype": "vr::EVRCompositorError", + "params": [ +{ "paramname": "eEye" ,"paramtype": "vr::EVREye"}, +{ "paramname": "pglTextureId" ,"paramtype": "vr::glUInt_t *"}, +{ "paramname": "pglSharedTextureHandle" ,"paramtype": "vr::glSharedTextureHandle_t *"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "ReleaseSharedGLTexture", + "returntype": "bool", + "params": [ +{ "paramname": "glTextureId" ,"paramtype": "vr::glUInt_t"}, +{ "paramname": "glSharedTextureHandle" ,"paramtype": "vr::glSharedTextureHandle_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "LockGLSharedTextureForAccess", + "returntype": "void", + "params": [ +{ "paramname": "glSharedTextureHandle" ,"paramtype": "vr::glSharedTextureHandle_t"} + ] +} +,{ + "classname": "vr::IVRCompositor", + "methodname": "UnlockGLSharedTextureForAccess", + "returntype": "void", + "params": [ +{ "paramname": "glSharedTextureHandle" ,"paramtype": "vr::glSharedTextureHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "FindOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "pchOverlayKey" ,"paramtype": "const char *"}, +{ "paramname": "pOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "CreateOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "pchOverlayKey" ,"paramtype": "const char *"}, +{ "paramname": "pchOverlayFriendlyName" ,"paramtype": "const char *"}, +{ "paramname": "pOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "DestroyOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetHighQualityOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetHighQualityOverlay", + "returntype": "vr::VROverlayHandle_t" +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayKey", + "returntype": "uint32_t", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pchValue" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unBufferSize" ,"paramtype": "uint32_t"}, +{ "paramname": "pError" ,"paramtype": "vr::EVROverlayError *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayName", + "returntype": "uint32_t", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pchValue" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unBufferSize" ,"paramtype": "uint32_t"}, +{ "paramname": "pError" ,"paramtype": "vr::EVROverlayError *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayImageData", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pvBuffer" ,"paramtype": "void *"}, +{ "paramname": "unBufferSize" ,"paramtype": "uint32_t"}, +{ "paramname": "punWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "punHeight" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "error" ,"paramtype": "vr::EVROverlayError"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayRenderingPid", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "unPID" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayRenderingPid", + "returntype": "uint32_t", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayFlag", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eOverlayFlag" ,"paramtype": "vr::VROverlayFlags"}, +{ "paramname": "bEnabled" ,"paramtype": "bool"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayFlag", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eOverlayFlag" ,"paramtype": "vr::VROverlayFlags"}, +{ "paramname": "pbEnabled" ,"paramtype": "bool *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayColor", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "fRed" ,"paramtype": "float"}, +{ "paramname": "fGreen" ,"paramtype": "float"}, +{ "paramname": "fBlue" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayColor", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pfRed" ,"paramtype": "float *"}, +{ "paramname": "pfGreen" ,"paramtype": "float *"}, +{ "paramname": "pfBlue" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayAlpha", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "fAlpha" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayAlpha", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pfAlpha" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayWidthInMeters", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "fWidthInMeters" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayWidthInMeters", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pfWidthInMeters" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayAutoCurveDistanceRangeInMeters", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "fMinDistanceInMeters" ,"paramtype": "float"}, +{ "paramname": "fMaxDistanceInMeters" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayAutoCurveDistanceRangeInMeters", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pfMinDistanceInMeters" ,"paramtype": "float *"}, +{ "paramname": "pfMaxDistanceInMeters" ,"paramtype": "float *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTextureColorSpace", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eTextureColorSpace" ,"paramtype": "vr::EColorSpace"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTextureColorSpace", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "peTextureColorSpace" ,"paramtype": "vr::EColorSpace *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTextureBounds", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pOverlayTextureBounds" ,"paramtype": "const struct vr::VRTextureBounds_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTextureBounds", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pOverlayTextureBounds" ,"paramtype": "struct vr::VRTextureBounds_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTransformType", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "peTransformType" ,"paramtype": "vr::VROverlayTransformType *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTransformAbsolute", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eTrackingOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "pmatTrackingOriginToOverlayTransform" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTransformAbsolute", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "peTrackingOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin *"}, +{ "paramname": "pmatTrackingOriginToOverlayTransform" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTransformTrackedDeviceRelative", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "unTrackedDevice" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pmatTrackedDeviceToOverlayTransform" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTransformTrackedDeviceRelative", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "punTrackedDevice" ,"paramtype": "vr::TrackedDeviceIndex_t *"}, +{ "paramname": "pmatTrackedDeviceToOverlayTransform" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTransformTrackedDeviceComponent", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "unDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"}, +{ "paramname": "pchComponentName" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTransformTrackedDeviceComponent", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "punDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t *"}, +{ "paramname": "pchComponentName" ,"paramtype": "char *"}, +{ "paramname": "unComponentNameSize" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ShowOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "HideOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "IsOverlayVisible", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetTransformForOverlayCoordinates", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eTrackingOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "coordinatesInOverlay" ,"paramtype": "struct vr::HmdVector2_t"}, +{ "paramname": "pmatTransform" ,"paramtype": "struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "PollNextOverlayEvent", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pEvent" ,"paramtype": "struct vr::VREvent_t *"}, +{ "paramname": "uncbVREvent" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayInputMethod", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "peInputMethod" ,"paramtype": "vr::VROverlayInputMethod *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayInputMethod", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eInputMethod" ,"paramtype": "vr::VROverlayInputMethod"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayMouseScale", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pvecMouseScale" ,"paramtype": "struct vr::HmdVector2_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayMouseScale", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pvecMouseScale" ,"paramtype": "const struct vr::HmdVector2_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ComputeOverlayIntersection", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pParams" ,"paramtype": "const struct vr::VROverlayIntersectionParams_t *"}, +{ "paramname": "pResults" ,"paramtype": "struct vr::VROverlayIntersectionResults_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "HandleControllerOverlayInteractionAsMouse", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "unControllerDeviceIndex" ,"paramtype": "vr::TrackedDeviceIndex_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "IsHoverTargetOverlay", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetGamepadFocusOverlay", + "returntype": "vr::VROverlayHandle_t" +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetGamepadFocusOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulNewFocusOverlay" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayNeighbor", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "eDirection" ,"paramtype": "vr::EOverlayDirection"}, +{ "paramname": "ulFrom" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "ulTo" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "MoveGamepadFocusToNeighbor", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "eDirection" ,"paramtype": "vr::EOverlayDirection"}, +{ "paramname": "ulFrom" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayTexture", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pTexture" ,"paramtype": "const struct vr::Texture_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ClearOverlayTexture", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayRaw", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pvBuffer" ,"paramtype": "void *"}, +{ "paramname": "unWidth" ,"paramtype": "uint32_t"}, +{ "paramname": "unHeight" ,"paramtype": "uint32_t"}, +{ "paramname": "unDepth" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetOverlayFromFile", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pchFilePath" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTexture", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pNativeTextureHandle" ,"paramtype": "void **"}, +{ "paramname": "pNativeTextureRef" ,"paramtype": "void *"}, +{ "paramname": "pWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pHeight" ,"paramtype": "uint32_t *"}, +{ "paramname": "pNativeFormat" ,"paramtype": "uint32_t *"}, +{ "paramname": "pAPI" ,"paramtype": "vr::EGraphicsAPIConvention *"}, +{ "paramname": "pColorSpace" ,"paramtype": "vr::EColorSpace *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ReleaseNativeOverlayHandle", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pNativeTextureHandle" ,"paramtype": "void *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetOverlayTextureSize", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "pWidth" ,"paramtype": "uint32_t *"}, +{ "paramname": "pHeight" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "CreateDashboardOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "pchOverlayKey" ,"paramtype": "const char *"}, +{ "paramname": "pchOverlayFriendlyName" ,"paramtype": "const char *"}, +{ "paramname": "pMainHandle" ,"paramtype": "vr::VROverlayHandle_t *"}, +{ "paramname": "pThumbnailHandle" ,"paramtype": "vr::VROverlayHandle_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "IsDashboardVisible", + "returntype": "bool" +} +,{ + "classname": "vr::IVROverlay", + "methodname": "IsActiveDashboardOverlay", + "returntype": "bool", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetDashboardOverlaySceneProcess", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "unProcessId" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetDashboardOverlaySceneProcess", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "punProcessId" ,"paramtype": "uint32_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ShowDashboard", + "returntype": "void", + "params": [ +{ "paramname": "pchOverlayToShow" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetPrimaryDashboardDevice", + "returntype": "vr::TrackedDeviceIndex_t" +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ShowKeyboard", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "eInputMode" ,"paramtype": "vr::EGamepadTextInputMode"}, +{ "paramname": "eLineInputMode" ,"paramtype": "vr::EGamepadTextInputLineMode"}, +{ "paramname": "pchDescription" ,"paramtype": "const char *"}, +{ "paramname": "unCharMax" ,"paramtype": "uint32_t"}, +{ "paramname": "pchExistingText" ,"paramtype": "const char *"}, +{ "paramname": "bUseMinimalMode" ,"paramtype": "bool"}, +{ "paramname": "uUserValue" ,"paramtype": "uint64_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "ShowKeyboardForOverlay", + "returntype": "vr::EVROverlayError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "eInputMode" ,"paramtype": "vr::EGamepadTextInputMode"}, +{ "paramname": "eLineInputMode" ,"paramtype": "vr::EGamepadTextInputLineMode"}, +{ "paramname": "pchDescription" ,"paramtype": "const char *"}, +{ "paramname": "unCharMax" ,"paramtype": "uint32_t"}, +{ "paramname": "pchExistingText" ,"paramtype": "const char *"}, +{ "paramname": "bUseMinimalMode" ,"paramtype": "bool"}, +{ "paramname": "uUserValue" ,"paramtype": "uint64_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "GetKeyboardText", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchText" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "cchText" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "HideKeyboard", + "returntype": "void" +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetKeyboardTransformAbsolute", + "returntype": "void", + "params": [ +{ "paramname": "eTrackingOrigin" ,"paramtype": "vr::ETrackingUniverseOrigin"}, +{ "paramname": "pmatTrackingOriginToKeyboardTransform" ,"paramtype": "const struct vr::HmdMatrix34_t *"} + ] +} +,{ + "classname": "vr::IVROverlay", + "methodname": "SetKeyboardPositionForOverlay", + "returntype": "void", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "avoidRect" ,"paramtype": "struct vr::HmdRect2_t"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "LoadRenderModel_Async", + "returntype": "vr::EVRRenderModelError", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "ppRenderModel" ,"paramtype": "struct vr::RenderModel_t **"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "FreeRenderModel", + "returntype": "void", + "params": [ +{ "paramname": "pRenderModel" ,"paramtype": "struct vr::RenderModel_t *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "LoadTexture_Async", + "returntype": "vr::EVRRenderModelError", + "params": [ +{ "paramname": "textureId" ,"paramtype": "vr::TextureID_t"}, +{ "paramname": "ppTexture" ,"paramtype": "struct vr::RenderModel_TextureMap_t **"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "FreeTexture", + "returntype": "void", + "params": [ +{ "paramname": "pTexture" ,"paramtype": "struct vr::RenderModel_TextureMap_t *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "LoadTextureD3D11_Async", + "returntype": "vr::EVRRenderModelError", + "params": [ +{ "paramname": "textureId" ,"paramtype": "vr::TextureID_t"}, +{ "paramname": "pD3D11Device" ,"paramtype": "void *"}, +{ "paramname": "ppD3D11Texture2D" ,"paramtype": "void **"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "LoadIntoTextureD3D11_Async", + "returntype": "vr::EVRRenderModelError", + "params": [ +{ "paramname": "textureId" ,"paramtype": "vr::TextureID_t"}, +{ "paramname": "pDstTexture" ,"paramtype": "void *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "FreeTextureD3D11", + "returntype": "void", + "params": [ +{ "paramname": "pD3D11Texture2D" ,"paramtype": "void *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetRenderModelName", + "returntype": "uint32_t", + "params": [ +{ "paramname": "unRenderModelIndex" ,"paramtype": "uint32_t"}, +{ "paramname": "pchRenderModelName" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unRenderModelNameLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetRenderModelCount", + "returntype": "uint32_t" +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetComponentCount", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetComponentName", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "unComponentIndex" ,"paramtype": "uint32_t"}, +{ "paramname": "pchComponentName" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unComponentNameLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetComponentButtonMask", + "returntype": "uint64_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchComponentName" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetComponentRenderModelName", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchComponentName" ,"paramtype": "const char *"}, +{ "paramname": "pchComponentRenderModelName" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unComponentRenderModelNameLen" ,"paramtype": "uint32_t"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetComponentState", + "returntype": "bool", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchComponentName" ,"paramtype": "const char *"}, +{ "paramname": "pControllerState" ,"paramtype": "const vr::VRControllerState_t *"}, +{ "paramname": "pState" ,"paramtype": "const struct vr::RenderModel_ControllerMode_State_t *"}, +{ "paramname": "pComponentState" ,"paramtype": "struct vr::RenderModel_ComponentState_t *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "RenderModelHasComponent", + "returntype": "bool", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchComponentName" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetRenderModelThumbnailURL", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchThumbnailURL" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unThumbnailURLLen" ,"paramtype": "uint32_t"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRRenderModelError *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetRenderModelOriginalPath", + "returntype": "uint32_t", + "params": [ +{ "paramname": "pchRenderModelName" ,"paramtype": "const char *"}, +{ "paramname": "pchOriginalPath" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "unOriginalPathLen" ,"paramtype": "uint32_t"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRRenderModelError *"} + ] +} +,{ + "classname": "vr::IVRRenderModels", + "methodname": "GetRenderModelErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "error" ,"paramtype": "vr::EVRRenderModelError"} + ] +} +,{ + "classname": "vr::IVRNotifications", + "methodname": "CreateNotification", + "returntype": "vr::EVRNotificationError", + "params": [ +{ "paramname": "ulOverlayHandle" ,"paramtype": "vr::VROverlayHandle_t"}, +{ "paramname": "ulUserValue" ,"paramtype": "uint64_t"}, +{ "paramname": "type" ,"paramtype": "vr::EVRNotificationType"}, +{ "paramname": "pchText" ,"paramtype": "const char *"}, +{ "paramname": "style" ,"paramtype": "vr::EVRNotificationStyle"}, +{ "paramname": "pImage" ,"paramtype": "const struct vr::NotificationBitmap_t *"}, +{ "paramname": "pNotificationId" ,"paramtype": "vr::VRNotificationId *"} + ] +} +,{ + "classname": "vr::IVRNotifications", + "methodname": "RemoveNotification", + "returntype": "vr::EVRNotificationError", + "params": [ +{ "paramname": "notificationId" ,"paramtype": "vr::VRNotificationId"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "GetSettingsErrorNameFromEnum", + "returntype": "const char *", + "params": [ +{ "paramname": "eError" ,"paramtype": "vr::EVRSettingsError"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "Sync", + "returntype": "bool", + "params": [ +{ "paramname": "bForce" ,"paramtype": "bool"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "GetBool", + "returntype": "bool", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "bDefaultValue" ,"paramtype": "bool"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "SetBool", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "bValue" ,"paramtype": "bool"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "GetInt32", + "returntype": "int32_t", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "nDefaultValue" ,"paramtype": "int32_t"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "SetInt32", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "nValue" ,"paramtype": "int32_t"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "GetFloat", + "returntype": "float", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "flDefaultValue" ,"paramtype": "float"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "SetFloat", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "flValue" ,"paramtype": "float"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "GetString", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "pchValue" ,"paramtype": "char *"}, +{ "paramname": "unValueLen" ,"paramtype": "uint32_t"}, +{ "paramname": "pchDefaultValue" ,"paramtype": "const char *"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "SetString", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "pchValue" ,"paramtype": "const char *"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "RemoveSection", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRSettings", + "methodname": "RemoveKeyInSection", + "returntype": "void", + "params": [ +{ "paramname": "pchSection" ,"paramtype": "const char *"}, +{ "paramname": "pchSettingsKey" ,"paramtype": "const char *"}, +{ "paramname": "peError" ,"paramtype": "vr::EVRSettingsError *"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "RequestScreenshot", + "returntype": "vr::EVRScreenshotError", + "params": [ +{ "paramname": "pOutScreenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t *"}, +{ "paramname": "type" ,"paramtype": "vr::EVRScreenshotType"}, +{ "paramname": "pchPreviewFilename" ,"paramtype": "const char *"}, +{ "paramname": "pchVRFilename" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "HookScreenshot", + "returntype": "vr::EVRScreenshotError", + "params": [ +{ "paramname": "pSupportedTypes" ,"array_count": "numTypes" ,"paramtype": "const vr::EVRScreenshotType *"}, +{ "paramname": "numTypes" ,"paramtype": "int"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "GetScreenshotPropertyType", + "returntype": "vr::EVRScreenshotType", + "params": [ +{ "paramname": "screenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t"}, +{ "paramname": "pError" ,"paramtype": "vr::EVRScreenshotError *"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "GetScreenshotPropertyFilename", + "returntype": "uint32_t", + "params": [ +{ "paramname": "screenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t"}, +{ "paramname": "filenameType" ,"paramtype": "vr::EVRScreenshotPropertyFilenames"}, +{ "paramname": "pchFilename" ,"out_string": " " ,"paramtype": "char *"}, +{ "paramname": "cchFilename" ,"paramtype": "uint32_t"}, +{ "paramname": "pError" ,"paramtype": "vr::EVRScreenshotError *"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "UpdateScreenshotProgress", + "returntype": "vr::EVRScreenshotError", + "params": [ +{ "paramname": "screenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t"}, +{ "paramname": "flProgress" ,"paramtype": "float"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "TakeStereoScreenshot", + "returntype": "vr::EVRScreenshotError", + "params": [ +{ "paramname": "pOutScreenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t *"}, +{ "paramname": "pchPreviewFilename" ,"paramtype": "const char *"}, +{ "paramname": "pchVRFilename" ,"paramtype": "const char *"} + ] +} +,{ + "classname": "vr::IVRScreenshots", + "methodname": "SubmitScreenshot", + "returntype": "vr::EVRScreenshotError", + "params": [ +{ "paramname": "screenshotHandle" ,"paramtype": "vr::ScreenshotHandle_t"}, +{ "paramname": "type" ,"paramtype": "vr::EVRScreenshotType"}, +{ "paramname": "pchSourcePreviewFilename" ,"paramtype": "const char *"}, +{ "paramname": "pchSourceVRFilename" ,"paramtype": "const char *"} + ] +} +] +} \ No newline at end of file diff --git a/examples/ThirdPartyLibs/openvr/headers/openvr_capi.h b/examples/ThirdPartyLibs/openvr/headers/openvr_capi.h new file mode 100644 index 000000000..76eafd4d6 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/headers/openvr_capi.h @@ -0,0 +1,1626 @@ +//======= Copyright (c) Valve Corporation, All rights reserved. =============== +// +// Purpose: Header for flatted SteamAPI. Use this for binding to other languages. +// This file is auto-generated, do not edit it. +// +//============================================================================= + +#ifndef __OPENVR_API_FLAT_H__ +#define __OPENVR_API_FLAT_H__ +#if defined( _WIN32 ) || defined( __clang__ ) +#pragma once +#endif + +#ifdef __cplusplus +#define EXTERN_C extern "C" +#else +#define EXTERN_C +#endif + +#define OPENVR_FNTABLE_CALLTYPE __stdcall + +// OPENVR API export macro +#if defined( _WIN32 ) && !defined( _X360 ) + #if defined( OPENVR_API_EXPORTS ) + #define S_API EXTERN_C __declspec( dllexport ) + #elif defined( OPENVR_API_NODLL ) + #define S_API EXTERN_C + #else + #define S_API extern "C" __declspec( dllimport ) + #endif // OPENVR_API_EXPORTS +#elif defined( GNUC ) + #if defined( OPENVR_API_EXPORTS ) + #define S_API EXTERN_C __attribute__ ((visibility("default"))) + #else + #define S_API EXTERN_C + #endif // OPENVR_API_EXPORTS +#else // !WIN32 + #if defined( OPENVR_API_EXPORTS ) + #define S_API EXTERN_C + #else + #define S_API EXTERN_C + #endif // OPENVR_API_EXPORTS +#endif + +#include + +#if defined( __WIN32 ) +typedef char bool; +#endif + + +// OpenVR Constants + +static const unsigned int k_unTrackingStringSize = 32; +static const unsigned int k_unMaxDriverDebugResponseSize = 32768; +static const unsigned int k_unTrackedDeviceIndex_Hmd = 0; +static const unsigned int k_unMaxTrackedDeviceCount = 16; +static const unsigned int k_unTrackedDeviceIndexOther = 4294967294; +static const unsigned int k_unTrackedDeviceIndexInvalid = 4294967295; +static const unsigned int k_unMaxPropertyStringSize = 32768; +static const unsigned int k_unControllerStateAxisCount = 5; +static const unsigned long k_ulOverlayHandleInvalid = 0; +static const unsigned int k_unScreenshotHandleInvalid = 0; +static const char * IVRSystem_Version = "IVRSystem_012"; +static const char * IVRExtendedDisplay_Version = "IVRExtendedDisplay_001"; +static const char * IVRTrackedCamera_Version = "IVRTrackedCamera_003"; +static const unsigned int k_unMaxApplicationKeyLength = 128; +static const char * IVRApplications_Version = "IVRApplications_005"; +static const char * IVRChaperone_Version = "IVRChaperone_003"; +static const char * IVRChaperoneSetup_Version = "IVRChaperoneSetup_005"; +static const char * IVRCompositor_Version = "IVRCompositor_015"; +static const unsigned int k_unVROverlayMaxKeyLength = 128; +static const unsigned int k_unVROverlayMaxNameLength = 128; +static const unsigned int k_unMaxOverlayCount = 32; +static const char * IVROverlay_Version = "IVROverlay_012"; +static const char * k_pch_Controller_Component_GDC2015 = "gdc2015"; +static const char * k_pch_Controller_Component_Base = "base"; +static const char * k_pch_Controller_Component_Tip = "tip"; +static const char * k_pch_Controller_Component_HandGrip = "handgrip"; +static const char * k_pch_Controller_Component_Status = "status"; +static const char * IVRRenderModels_Version = "IVRRenderModels_005"; +static const unsigned int k_unNotificationTextMaxSize = 256; +static const char * IVRNotifications_Version = "IVRNotifications_002"; +static const unsigned int k_unMaxSettingsKeyLength = 128; +static const char * IVRSettings_Version = "IVRSettings_001"; +static const char * k_pch_SteamVR_Section = "steamvr"; +static const char * k_pch_SteamVR_RequireHmd_String = "requireHmd"; +static const char * k_pch_SteamVR_ForcedDriverKey_String = "forcedDriver"; +static const char * k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd"; +static const char * k_pch_SteamVR_DisplayDebug_Bool = "displayDebug"; +static const char * k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe"; +static const char * k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion"; +static const char * k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX"; +static const char * k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY"; +static const char * k_pch_SteamVR_SendSystemButtonToAllApps_Bool = "sendSystemButtonToAllApps"; +static const char * k_pch_SteamVR_LogLevel_Int32 = "loglevel"; +static const char * k_pch_SteamVR_IPD_Float = "ipd"; +static const char * k_pch_SteamVR_Background_String = "background"; +static const char * k_pch_SteamVR_BackgroundCameraHeight_Float = "backgroundCameraHeight"; +static const char * k_pch_SteamVR_BackgroundDomeRadius_Float = "backgroundDomeRadius"; +static const char * k_pch_SteamVR_Environment_String = "environment"; +static const char * k_pch_SteamVR_GridColor_String = "gridColor"; +static const char * k_pch_SteamVR_PlayAreaColor_String = "playAreaColor"; +static const char * k_pch_SteamVR_ShowStage_Bool = "showStage"; +static const char * k_pch_SteamVR_ActivateMultipleDrivers_Bool = "activateMultipleDrivers"; +static const char * k_pch_SteamVR_PowerOffOnExit_Bool = "powerOffOnExit"; +static const char * k_pch_SteamVR_StandbyAppRunningTimeout_Float = "standbyAppRunningTimeout"; +static const char * k_pch_SteamVR_StandbyNoAppTimeout_Float = "standbyNoAppTimeout"; +static const char * k_pch_SteamVR_DirectMode_Bool = "directMode"; +static const char * k_pch_SteamVR_DirectModeEdidVid_Int32 = "directModeEdidVid"; +static const char * k_pch_SteamVR_DirectModeEdidPid_Int32 = "directModeEdidPid"; +static const char * k_pch_SteamVR_UsingSpeakers_Bool = "usingSpeakers"; +static const char * k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees"; +static const char * k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement"; +static const char * k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses"; +static const char * k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier"; +static const char * k_pch_SteamVR_AllowReprojection_Bool = "allowReprojection"; +static const char * k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection"; +static const char * k_pch_SteamVR_ForceFadeOnBadTracking_Bool = "forceFadeOnBadTracking"; +static const char * k_pch_SteamVR_DefaultMirrorView_Int32 = "defaultMirrorView"; +static const char * k_pch_SteamVR_ShowMirrorView_Bool = "showMirrorView"; +static const char * k_pch_Lighthouse_Section = "driver_lighthouse"; +static const char * k_pch_Lighthouse_DisableIMU_Bool = "disableimu"; +static const char * k_pch_Lighthouse_UseDisambiguation_String = "usedisambiguation"; +static const char * k_pch_Lighthouse_DisambiguationDebug_Int32 = "disambiguationdebug"; +static const char * k_pch_Lighthouse_PrimaryBasestation_Int32 = "primarybasestation"; +static const char * k_pch_Lighthouse_LighthouseName_String = "lighthousename"; +static const char * k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float = "maxincidenceangledegrees"; +static const char * k_pch_Lighthouse_UseLighthouseDirect_Bool = "uselighthousedirect"; +static const char * k_pch_Lighthouse_DBHistory_Bool = "dbhistory"; +static const char * k_pch_Null_Section = "driver_null"; +static const char * k_pch_Null_EnableNullDriver_Bool = "enable"; +static const char * k_pch_Null_SerialNumber_String = "serialNumber"; +static const char * k_pch_Null_ModelNumber_String = "modelNumber"; +static const char * k_pch_Null_WindowX_Int32 = "windowX"; +static const char * k_pch_Null_WindowY_Int32 = "windowY"; +static const char * k_pch_Null_WindowWidth_Int32 = "windowWidth"; +static const char * k_pch_Null_WindowHeight_Int32 = "windowHeight"; +static const char * k_pch_Null_RenderWidth_Int32 = "renderWidth"; +static const char * k_pch_Null_RenderHeight_Int32 = "renderHeight"; +static const char * k_pch_Null_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons"; +static const char * k_pch_Null_DisplayFrequency_Float = "displayFrequency"; +static const char * k_pch_UserInterface_Section = "userinterface"; +static const char * k_pch_UserInterface_StatusAlwaysOnTop_Bool = "StatusAlwaysOnTop"; +static const char * k_pch_UserInterface_EnableScreenshots_Bool = "EnableScreenshots"; +static const char * k_pch_Notifications_Section = "notifications"; +static const char * k_pch_Notifications_DoNotDisturb_Bool = "DoNotDisturb"; +static const char * k_pch_Keyboard_Section = "keyboard"; +static const char * k_pch_Keyboard_TutorialCompletions = "TutorialCompletions"; +static const char * k_pch_Keyboard_ScaleX = "ScaleX"; +static const char * k_pch_Keyboard_ScaleY = "ScaleY"; +static const char * k_pch_Keyboard_OffsetLeftX = "OffsetLeftX"; +static const char * k_pch_Keyboard_OffsetRightX = "OffsetRightX"; +static const char * k_pch_Keyboard_OffsetY = "OffsetY"; +static const char * k_pch_Keyboard_Smoothing = "Smoothing"; +static const char * k_pch_Perf_Section = "perfcheck"; +static const char * k_pch_Perf_HeuristicActive_Bool = "heuristicActive"; +static const char * k_pch_Perf_NotifyInHMD_Bool = "warnInHMD"; +static const char * k_pch_Perf_NotifyOnlyOnce_Bool = "warnOnlyOnce"; +static const char * k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore"; +static const char * k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit"; +static const char * k_pch_Perf_TestData_Float = "perfTestData"; +static const char * k_pch_CollisionBounds_Section = "collisionBounds"; +static const char * k_pch_CollisionBounds_Style_Int32 = "CollisionBoundsStyle"; +static const char * k_pch_CollisionBounds_GroundPerimeterOn_Bool = "CollisionBoundsGroundPerimeterOn"; +static const char * k_pch_CollisionBounds_CenterMarkerOn_Bool = "CollisionBoundsCenterMarkerOn"; +static const char * k_pch_CollisionBounds_PlaySpaceOn_Bool = "CollisionBoundsPlaySpaceOn"; +static const char * k_pch_CollisionBounds_FadeDistance_Float = "CollisionBoundsFadeDistance"; +static const char * k_pch_CollisionBounds_ColorGammaR_Int32 = "CollisionBoundsColorGammaR"; +static const char * k_pch_CollisionBounds_ColorGammaG_Int32 = "CollisionBoundsColorGammaG"; +static const char * k_pch_CollisionBounds_ColorGammaB_Int32 = "CollisionBoundsColorGammaB"; +static const char * k_pch_CollisionBounds_ColorGammaA_Int32 = "CollisionBoundsColorGammaA"; +static const char * k_pch_Camera_Section = "camera"; +static const char * k_pch_Camera_EnableCamera_Bool = "enableCamera"; +static const char * k_pch_Camera_EnableCameraInDashboard_Bool = "enableCameraInDashboard"; +static const char * k_pch_Camera_EnableCameraForCollisionBounds_Bool = "enableCameraForCollisionBounds"; +static const char * k_pch_Camera_EnableCameraForRoomView_Bool = "enableCameraForRoomView"; +static const char * k_pch_Camera_BoundsColorGammaR_Int32 = "cameraBoundsColorGammaR"; +static const char * k_pch_Camera_BoundsColorGammaG_Int32 = "cameraBoundsColorGammaG"; +static const char * k_pch_Camera_BoundsColorGammaB_Int32 = "cameraBoundsColorGammaB"; +static const char * k_pch_Camera_BoundsColorGammaA_Int32 = "cameraBoundsColorGammaA"; +static const char * k_pch_audio_Section = "audio"; +static const char * k_pch_audio_OnPlaybackDevice_String = "onPlaybackDevice"; +static const char * k_pch_audio_OnRecordDevice_String = "onRecordDevice"; +static const char * k_pch_audio_OnPlaybackMirrorDevice_String = "onPlaybackMirrorDevice"; +static const char * k_pch_audio_OffPlaybackDevice_String = "offPlaybackDevice"; +static const char * k_pch_audio_OffRecordDevice_String = "offRecordDevice"; +static const char * k_pch_audio_VIVEHDMIGain = "viveHDMIGain"; +static const char * k_pch_modelskin_Section = "modelskins"; +static const char * IVRScreenshots_Version = "IVRScreenshots_001"; + +// OpenVR Enums + +typedef enum EVREye +{ + EVREye_Eye_Left = 0, + EVREye_Eye_Right = 1, +} EVREye; + +typedef enum EGraphicsAPIConvention +{ + EGraphicsAPIConvention_API_DirectX = 0, + EGraphicsAPIConvention_API_OpenGL = 1, +} EGraphicsAPIConvention; + +typedef enum EColorSpace +{ + EColorSpace_ColorSpace_Auto = 0, + EColorSpace_ColorSpace_Gamma = 1, + EColorSpace_ColorSpace_Linear = 2, +} EColorSpace; + +typedef enum ETrackingResult +{ + ETrackingResult_TrackingResult_Uninitialized = 1, + ETrackingResult_TrackingResult_Calibrating_InProgress = 100, + ETrackingResult_TrackingResult_Calibrating_OutOfRange = 101, + ETrackingResult_TrackingResult_Running_OK = 200, + ETrackingResult_TrackingResult_Running_OutOfRange = 201, +} ETrackingResult; + +typedef enum ETrackedDeviceClass +{ + ETrackedDeviceClass_TrackedDeviceClass_Invalid = 0, + ETrackedDeviceClass_TrackedDeviceClass_HMD = 1, + ETrackedDeviceClass_TrackedDeviceClass_Controller = 2, + ETrackedDeviceClass_TrackedDeviceClass_TrackingReference = 4, + ETrackedDeviceClass_TrackedDeviceClass_Other = 1000, +} ETrackedDeviceClass; + +typedef enum ETrackedControllerRole +{ + ETrackedControllerRole_TrackedControllerRole_Invalid = 0, + ETrackedControllerRole_TrackedControllerRole_LeftHand = 1, + ETrackedControllerRole_TrackedControllerRole_RightHand = 2, +} ETrackedControllerRole; + +typedef enum ETrackingUniverseOrigin +{ + ETrackingUniverseOrigin_TrackingUniverseSeated = 0, + ETrackingUniverseOrigin_TrackingUniverseStanding = 1, + ETrackingUniverseOrigin_TrackingUniverseRawAndUncalibrated = 2, +} ETrackingUniverseOrigin; + +typedef enum ETrackedDeviceProperty +{ + ETrackedDeviceProperty_Prop_TrackingSystemName_String = 1000, + ETrackedDeviceProperty_Prop_ModelNumber_String = 1001, + ETrackedDeviceProperty_Prop_SerialNumber_String = 1002, + ETrackedDeviceProperty_Prop_RenderModelName_String = 1003, + ETrackedDeviceProperty_Prop_WillDriftInYaw_Bool = 1004, + ETrackedDeviceProperty_Prop_ManufacturerName_String = 1005, + ETrackedDeviceProperty_Prop_TrackingFirmwareVersion_String = 1006, + ETrackedDeviceProperty_Prop_HardwareRevision_String = 1007, + ETrackedDeviceProperty_Prop_AllWirelessDongleDescriptions_String = 1008, + ETrackedDeviceProperty_Prop_ConnectedWirelessDongle_String = 1009, + ETrackedDeviceProperty_Prop_DeviceIsWireless_Bool = 1010, + ETrackedDeviceProperty_Prop_DeviceIsCharging_Bool = 1011, + ETrackedDeviceProperty_Prop_DeviceBatteryPercentage_Float = 1012, + ETrackedDeviceProperty_Prop_StatusDisplayTransform_Matrix34 = 1013, + ETrackedDeviceProperty_Prop_Firmware_UpdateAvailable_Bool = 1014, + ETrackedDeviceProperty_Prop_Firmware_ManualUpdate_Bool = 1015, + ETrackedDeviceProperty_Prop_Firmware_ManualUpdateURL_String = 1016, + ETrackedDeviceProperty_Prop_HardwareRevision_Uint64 = 1017, + ETrackedDeviceProperty_Prop_FirmwareVersion_Uint64 = 1018, + ETrackedDeviceProperty_Prop_FPGAVersion_Uint64 = 1019, + ETrackedDeviceProperty_Prop_VRCVersion_Uint64 = 1020, + ETrackedDeviceProperty_Prop_RadioVersion_Uint64 = 1021, + ETrackedDeviceProperty_Prop_DongleVersion_Uint64 = 1022, + ETrackedDeviceProperty_Prop_BlockServerShutdown_Bool = 1023, + ETrackedDeviceProperty_Prop_CanUnifyCoordinateSystemWithHmd_Bool = 1024, + ETrackedDeviceProperty_Prop_ContainsProximitySensor_Bool = 1025, + ETrackedDeviceProperty_Prop_DeviceProvidesBatteryStatus_Bool = 1026, + ETrackedDeviceProperty_Prop_DeviceCanPowerOff_Bool = 1027, + ETrackedDeviceProperty_Prop_Firmware_ProgrammingTarget_String = 1028, + ETrackedDeviceProperty_Prop_DeviceClass_Int32 = 1029, + ETrackedDeviceProperty_Prop_HasCamera_Bool = 1030, + ETrackedDeviceProperty_Prop_DriverVersion_String = 1031, + ETrackedDeviceProperty_Prop_Firmware_ForceUpdateRequired_Bool = 1032, + ETrackedDeviceProperty_Prop_ReportsTimeSinceVSync_Bool = 2000, + ETrackedDeviceProperty_Prop_SecondsFromVsyncToPhotons_Float = 2001, + ETrackedDeviceProperty_Prop_DisplayFrequency_Float = 2002, + ETrackedDeviceProperty_Prop_UserIpdMeters_Float = 2003, + ETrackedDeviceProperty_Prop_CurrentUniverseId_Uint64 = 2004, + ETrackedDeviceProperty_Prop_PreviousUniverseId_Uint64 = 2005, + ETrackedDeviceProperty_Prop_DisplayFirmwareVersion_Uint64 = 2006, + ETrackedDeviceProperty_Prop_IsOnDesktop_Bool = 2007, + ETrackedDeviceProperty_Prop_DisplayMCType_Int32 = 2008, + ETrackedDeviceProperty_Prop_DisplayMCOffset_Float = 2009, + ETrackedDeviceProperty_Prop_DisplayMCScale_Float = 2010, + ETrackedDeviceProperty_Prop_EdidVendorID_Int32 = 2011, + ETrackedDeviceProperty_Prop_DisplayMCImageLeft_String = 2012, + ETrackedDeviceProperty_Prop_DisplayMCImageRight_String = 2013, + ETrackedDeviceProperty_Prop_DisplayGCBlackClamp_Float = 2014, + ETrackedDeviceProperty_Prop_EdidProductID_Int32 = 2015, + ETrackedDeviceProperty_Prop_CameraToHeadTransform_Matrix34 = 2016, + ETrackedDeviceProperty_Prop_DisplayGCType_Int32 = 2017, + ETrackedDeviceProperty_Prop_DisplayGCOffset_Float = 2018, + ETrackedDeviceProperty_Prop_DisplayGCScale_Float = 2019, + ETrackedDeviceProperty_Prop_DisplayGCPrescale_Float = 2020, + ETrackedDeviceProperty_Prop_DisplayGCImage_String = 2021, + ETrackedDeviceProperty_Prop_LensCenterLeftU_Float = 2022, + ETrackedDeviceProperty_Prop_LensCenterLeftV_Float = 2023, + ETrackedDeviceProperty_Prop_LensCenterRightU_Float = 2024, + ETrackedDeviceProperty_Prop_LensCenterRightV_Float = 2025, + ETrackedDeviceProperty_Prop_UserHeadToEyeDepthMeters_Float = 2026, + ETrackedDeviceProperty_Prop_CameraFirmwareVersion_Uint64 = 2027, + ETrackedDeviceProperty_Prop_CameraFirmwareDescription_String = 2028, + ETrackedDeviceProperty_Prop_DisplayFPGAVersion_Uint64 = 2029, + ETrackedDeviceProperty_Prop_DisplayBootloaderVersion_Uint64 = 2030, + ETrackedDeviceProperty_Prop_DisplayHardwareVersion_Uint64 = 2031, + ETrackedDeviceProperty_Prop_AudioFirmwareVersion_Uint64 = 2032, + ETrackedDeviceProperty_Prop_CameraCompatibilityMode_Int32 = 2033, + ETrackedDeviceProperty_Prop_ScreenshotHorizontalFieldOfViewDegrees_Float = 2034, + ETrackedDeviceProperty_Prop_ScreenshotVerticalFieldOfViewDegrees_Float = 2035, + ETrackedDeviceProperty_Prop_DisplaySuppressed_Bool = 2036, + ETrackedDeviceProperty_Prop_AttachedDeviceId_String = 3000, + ETrackedDeviceProperty_Prop_SupportedButtons_Uint64 = 3001, + ETrackedDeviceProperty_Prop_Axis0Type_Int32 = 3002, + ETrackedDeviceProperty_Prop_Axis1Type_Int32 = 3003, + ETrackedDeviceProperty_Prop_Axis2Type_Int32 = 3004, + ETrackedDeviceProperty_Prop_Axis3Type_Int32 = 3005, + ETrackedDeviceProperty_Prop_Axis4Type_Int32 = 3006, + ETrackedDeviceProperty_Prop_FieldOfViewLeftDegrees_Float = 4000, + ETrackedDeviceProperty_Prop_FieldOfViewRightDegrees_Float = 4001, + ETrackedDeviceProperty_Prop_FieldOfViewTopDegrees_Float = 4002, + ETrackedDeviceProperty_Prop_FieldOfViewBottomDegrees_Float = 4003, + ETrackedDeviceProperty_Prop_TrackingRangeMinimumMeters_Float = 4004, + ETrackedDeviceProperty_Prop_TrackingRangeMaximumMeters_Float = 4005, + ETrackedDeviceProperty_Prop_ModeLabel_String = 4006, + ETrackedDeviceProperty_Prop_VendorSpecific_Reserved_Start = 10000, + ETrackedDeviceProperty_Prop_VendorSpecific_Reserved_End = 10999, +} ETrackedDeviceProperty; + +typedef enum ETrackedPropertyError +{ + ETrackedPropertyError_TrackedProp_Success = 0, + ETrackedPropertyError_TrackedProp_WrongDataType = 1, + ETrackedPropertyError_TrackedProp_WrongDeviceClass = 2, + ETrackedPropertyError_TrackedProp_BufferTooSmall = 3, + ETrackedPropertyError_TrackedProp_UnknownProperty = 4, + ETrackedPropertyError_TrackedProp_InvalidDevice = 5, + ETrackedPropertyError_TrackedProp_CouldNotContactServer = 6, + ETrackedPropertyError_TrackedProp_ValueNotProvidedByDevice = 7, + ETrackedPropertyError_TrackedProp_StringExceedsMaximumLength = 8, + ETrackedPropertyError_TrackedProp_NotYetAvailable = 9, +} ETrackedPropertyError; + +typedef enum EVRSubmitFlags +{ + EVRSubmitFlags_Submit_Default = 0, + EVRSubmitFlags_Submit_LensDistortionAlreadyApplied = 1, + EVRSubmitFlags_Submit_GlRenderBuffer = 2, +} EVRSubmitFlags; + +typedef enum EVRState +{ + EVRState_VRState_Undefined = -1, + EVRState_VRState_Off = 0, + EVRState_VRState_Searching = 1, + EVRState_VRState_Searching_Alert = 2, + EVRState_VRState_Ready = 3, + EVRState_VRState_Ready_Alert = 4, + EVRState_VRState_NotReady = 5, + EVRState_VRState_Standby = 6, +} EVRState; + +typedef enum EVREventType +{ + EVREventType_VREvent_None = 0, + EVREventType_VREvent_TrackedDeviceActivated = 100, + EVREventType_VREvent_TrackedDeviceDeactivated = 101, + EVREventType_VREvent_TrackedDeviceUpdated = 102, + EVREventType_VREvent_TrackedDeviceUserInteractionStarted = 103, + EVREventType_VREvent_TrackedDeviceUserInteractionEnded = 104, + EVREventType_VREvent_IpdChanged = 105, + EVREventType_VREvent_EnterStandbyMode = 106, + EVREventType_VREvent_LeaveStandbyMode = 107, + EVREventType_VREvent_TrackedDeviceRoleChanged = 108, + EVREventType_VREvent_ButtonPress = 200, + EVREventType_VREvent_ButtonUnpress = 201, + EVREventType_VREvent_ButtonTouch = 202, + EVREventType_VREvent_ButtonUntouch = 203, + EVREventType_VREvent_MouseMove = 300, + EVREventType_VREvent_MouseButtonDown = 301, + EVREventType_VREvent_MouseButtonUp = 302, + EVREventType_VREvent_FocusEnter = 303, + EVREventType_VREvent_FocusLeave = 304, + EVREventType_VREvent_Scroll = 305, + EVREventType_VREvent_TouchPadMove = 306, + EVREventType_VREvent_InputFocusCaptured = 400, + EVREventType_VREvent_InputFocusReleased = 401, + EVREventType_VREvent_SceneFocusLost = 402, + EVREventType_VREvent_SceneFocusGained = 403, + EVREventType_VREvent_SceneApplicationChanged = 404, + EVREventType_VREvent_SceneFocusChanged = 405, + EVREventType_VREvent_InputFocusChanged = 406, + EVREventType_VREvent_SceneApplicationSecondaryRenderingStarted = 407, + EVREventType_VREvent_HideRenderModels = 410, + EVREventType_VREvent_ShowRenderModels = 411, + EVREventType_VREvent_OverlayShown = 500, + EVREventType_VREvent_OverlayHidden = 501, + EVREventType_VREvent_DashboardActivated = 502, + EVREventType_VREvent_DashboardDeactivated = 503, + EVREventType_VREvent_DashboardThumbSelected = 504, + EVREventType_VREvent_DashboardRequested = 505, + EVREventType_VREvent_ResetDashboard = 506, + EVREventType_VREvent_RenderToast = 507, + EVREventType_VREvent_ImageLoaded = 508, + EVREventType_VREvent_ShowKeyboard = 509, + EVREventType_VREvent_HideKeyboard = 510, + EVREventType_VREvent_OverlayGamepadFocusGained = 511, + EVREventType_VREvent_OverlayGamepadFocusLost = 512, + EVREventType_VREvent_OverlaySharedTextureChanged = 513, + EVREventType_VREvent_DashboardGuideButtonDown = 514, + EVREventType_VREvent_DashboardGuideButtonUp = 515, + EVREventType_VREvent_ScreenshotTriggered = 516, + EVREventType_VREvent_ImageFailed = 517, + EVREventType_VREvent_RequestScreenshot = 520, + EVREventType_VREvent_ScreenshotTaken = 521, + EVREventType_VREvent_ScreenshotFailed = 522, + EVREventType_VREvent_SubmitScreenshotToDashboard = 523, + EVREventType_VREvent_Notification_Shown = 600, + EVREventType_VREvent_Notification_Hidden = 601, + EVREventType_VREvent_Notification_BeginInteraction = 602, + EVREventType_VREvent_Notification_Destroyed = 603, + EVREventType_VREvent_Quit = 700, + EVREventType_VREvent_ProcessQuit = 701, + EVREventType_VREvent_QuitAborted_UserPrompt = 702, + EVREventType_VREvent_QuitAcknowledged = 703, + EVREventType_VREvent_DriverRequestedQuit = 704, + EVREventType_VREvent_ChaperoneDataHasChanged = 800, + EVREventType_VREvent_ChaperoneUniverseHasChanged = 801, + EVREventType_VREvent_ChaperoneTempDataHasChanged = 802, + EVREventType_VREvent_ChaperoneSettingsHaveChanged = 803, + EVREventType_VREvent_SeatedZeroPoseReset = 804, + EVREventType_VREvent_AudioSettingsHaveChanged = 820, + EVREventType_VREvent_BackgroundSettingHasChanged = 850, + EVREventType_VREvent_CameraSettingsHaveChanged = 851, + EVREventType_VREvent_ReprojectionSettingHasChanged = 852, + EVREventType_VREvent_ModelSkinSettingsHaveChanged = 853, + EVREventType_VREvent_EnvironmentSettingsHaveChanged = 854, + EVREventType_VREvent_StatusUpdate = 900, + EVREventType_VREvent_MCImageUpdated = 1000, + EVREventType_VREvent_FirmwareUpdateStarted = 1100, + EVREventType_VREvent_FirmwareUpdateFinished = 1101, + EVREventType_VREvent_KeyboardClosed = 1200, + EVREventType_VREvent_KeyboardCharInput = 1201, + EVREventType_VREvent_KeyboardDone = 1202, + EVREventType_VREvent_ApplicationTransitionStarted = 1300, + EVREventType_VREvent_ApplicationTransitionAborted = 1301, + EVREventType_VREvent_ApplicationTransitionNewAppStarted = 1302, + EVREventType_VREvent_ApplicationListUpdated = 1303, + EVREventType_VREvent_Compositor_MirrorWindowShown = 1400, + EVREventType_VREvent_Compositor_MirrorWindowHidden = 1401, + EVREventType_VREvent_Compositor_ChaperoneBoundsShown = 1410, + EVREventType_VREvent_Compositor_ChaperoneBoundsHidden = 1411, + EVREventType_VREvent_TrackedCamera_StartVideoStream = 1500, + EVREventType_VREvent_TrackedCamera_StopVideoStream = 1501, + EVREventType_VREvent_TrackedCamera_PauseVideoStream = 1502, + EVREventType_VREvent_TrackedCamera_ResumeVideoStream = 1503, + EVREventType_VREvent_PerformanceTest_EnableCapture = 1600, + EVREventType_VREvent_PerformanceTest_DisableCapture = 1601, + EVREventType_VREvent_PerformanceTest_FidelityLevel = 1602, + EVREventType_VREvent_VendorSpecific_Reserved_Start = 10000, + EVREventType_VREvent_VendorSpecific_Reserved_End = 19999, +} EVREventType; + +typedef enum EDeviceActivityLevel +{ + EDeviceActivityLevel_k_EDeviceActivityLevel_Unknown = -1, + EDeviceActivityLevel_k_EDeviceActivityLevel_Idle = 0, + EDeviceActivityLevel_k_EDeviceActivityLevel_UserInteraction = 1, + EDeviceActivityLevel_k_EDeviceActivityLevel_UserInteraction_Timeout = 2, + EDeviceActivityLevel_k_EDeviceActivityLevel_Standby = 3, +} EDeviceActivityLevel; + +typedef enum EVRButtonId +{ + EVRButtonId_k_EButton_System = 0, + EVRButtonId_k_EButton_ApplicationMenu = 1, + EVRButtonId_k_EButton_Grip = 2, + EVRButtonId_k_EButton_DPad_Left = 3, + EVRButtonId_k_EButton_DPad_Up = 4, + EVRButtonId_k_EButton_DPad_Right = 5, + EVRButtonId_k_EButton_DPad_Down = 6, + EVRButtonId_k_EButton_A = 7, + EVRButtonId_k_EButton_Axis0 = 32, + EVRButtonId_k_EButton_Axis1 = 33, + EVRButtonId_k_EButton_Axis2 = 34, + EVRButtonId_k_EButton_Axis3 = 35, + EVRButtonId_k_EButton_Axis4 = 36, + EVRButtonId_k_EButton_SteamVR_Touchpad = 32, + EVRButtonId_k_EButton_SteamVR_Trigger = 33, + EVRButtonId_k_EButton_Dashboard_Back = 2, + EVRButtonId_k_EButton_Max = 64, +} EVRButtonId; + +typedef enum EVRMouseButton +{ + EVRMouseButton_VRMouseButton_Left = 1, + EVRMouseButton_VRMouseButton_Right = 2, + EVRMouseButton_VRMouseButton_Middle = 4, +} EVRMouseButton; + +typedef enum EVRControllerAxisType +{ + EVRControllerAxisType_k_eControllerAxis_None = 0, + EVRControllerAxisType_k_eControllerAxis_TrackPad = 1, + EVRControllerAxisType_k_eControllerAxis_Joystick = 2, + EVRControllerAxisType_k_eControllerAxis_Trigger = 3, +} EVRControllerAxisType; + +typedef enum EVRControllerEventOutputType +{ + EVRControllerEventOutputType_ControllerEventOutput_OSEvents = 0, + EVRControllerEventOutputType_ControllerEventOutput_VREvents = 1, +} EVRControllerEventOutputType; + +typedef enum ECollisionBoundsStyle +{ + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_BEGINNER = 0, + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_INTERMEDIATE = 1, + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_SQUARES = 2, + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_ADVANCED = 3, + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_NONE = 4, + ECollisionBoundsStyle_COLLISION_BOUNDS_STYLE_COUNT = 5, +} ECollisionBoundsStyle; + +typedef enum EVROverlayError +{ + EVROverlayError_VROverlayError_None = 0, + EVROverlayError_VROverlayError_UnknownOverlay = 10, + EVROverlayError_VROverlayError_InvalidHandle = 11, + EVROverlayError_VROverlayError_PermissionDenied = 12, + EVROverlayError_VROverlayError_OverlayLimitExceeded = 13, + EVROverlayError_VROverlayError_WrongVisibilityType = 14, + EVROverlayError_VROverlayError_KeyTooLong = 15, + EVROverlayError_VROverlayError_NameTooLong = 16, + EVROverlayError_VROverlayError_KeyInUse = 17, + EVROverlayError_VROverlayError_WrongTransformType = 18, + EVROverlayError_VROverlayError_InvalidTrackedDevice = 19, + EVROverlayError_VROverlayError_InvalidParameter = 20, + EVROverlayError_VROverlayError_ThumbnailCantBeDestroyed = 21, + EVROverlayError_VROverlayError_ArrayTooSmall = 22, + EVROverlayError_VROverlayError_RequestFailed = 23, + EVROverlayError_VROverlayError_InvalidTexture = 24, + EVROverlayError_VROverlayError_UnableToLoadFile = 25, + EVROverlayError_VROVerlayError_KeyboardAlreadyInUse = 26, + EVROverlayError_VROverlayError_NoNeighbor = 27, +} EVROverlayError; + +typedef enum EVRApplicationType +{ + EVRApplicationType_VRApplication_Other = 0, + EVRApplicationType_VRApplication_Scene = 1, + EVRApplicationType_VRApplication_Overlay = 2, + EVRApplicationType_VRApplication_Background = 3, + EVRApplicationType_VRApplication_Utility = 4, + EVRApplicationType_VRApplication_VRMonitor = 5, +} EVRApplicationType; + +typedef enum EVRFirmwareError +{ + EVRFirmwareError_VRFirmwareError_None = 0, + EVRFirmwareError_VRFirmwareError_Success = 1, + EVRFirmwareError_VRFirmwareError_Fail = 2, +} EVRFirmwareError; + +typedef enum EVRNotificationError +{ + EVRNotificationError_VRNotificationError_OK = 0, + EVRNotificationError_VRNotificationError_InvalidNotificationId = 100, + EVRNotificationError_VRNotificationError_NotificationQueueFull = 101, + EVRNotificationError_VRNotificationError_InvalidOverlayHandle = 102, + EVRNotificationError_VRNotificationError_SystemWithUserValueAlreadyExists = 103, +} EVRNotificationError; + +typedef enum EVRInitError +{ + EVRInitError_VRInitError_None = 0, + EVRInitError_VRInitError_Unknown = 1, + EVRInitError_VRInitError_Init_InstallationNotFound = 100, + EVRInitError_VRInitError_Init_InstallationCorrupt = 101, + EVRInitError_VRInitError_Init_VRClientDLLNotFound = 102, + EVRInitError_VRInitError_Init_FileNotFound = 103, + EVRInitError_VRInitError_Init_FactoryNotFound = 104, + EVRInitError_VRInitError_Init_InterfaceNotFound = 105, + EVRInitError_VRInitError_Init_InvalidInterface = 106, + EVRInitError_VRInitError_Init_UserConfigDirectoryInvalid = 107, + EVRInitError_VRInitError_Init_HmdNotFound = 108, + EVRInitError_VRInitError_Init_NotInitialized = 109, + EVRInitError_VRInitError_Init_PathRegistryNotFound = 110, + EVRInitError_VRInitError_Init_NoConfigPath = 111, + EVRInitError_VRInitError_Init_NoLogPath = 112, + EVRInitError_VRInitError_Init_PathRegistryNotWritable = 113, + EVRInitError_VRInitError_Init_AppInfoInitFailed = 114, + EVRInitError_VRInitError_Init_Retry = 115, + EVRInitError_VRInitError_Init_InitCanceledByUser = 116, + EVRInitError_VRInitError_Init_AnotherAppLaunching = 117, + EVRInitError_VRInitError_Init_SettingsInitFailed = 118, + EVRInitError_VRInitError_Init_ShuttingDown = 119, + EVRInitError_VRInitError_Init_TooManyObjects = 120, + EVRInitError_VRInitError_Init_NoServerForBackgroundApp = 121, + EVRInitError_VRInitError_Init_NotSupportedWithCompositor = 122, + EVRInitError_VRInitError_Init_NotAvailableToUtilityApps = 123, + EVRInitError_VRInitError_Init_Internal = 124, + EVRInitError_VRInitError_Driver_Failed = 200, + EVRInitError_VRInitError_Driver_Unknown = 201, + EVRInitError_VRInitError_Driver_HmdUnknown = 202, + EVRInitError_VRInitError_Driver_NotLoaded = 203, + EVRInitError_VRInitError_Driver_RuntimeOutOfDate = 204, + EVRInitError_VRInitError_Driver_HmdInUse = 205, + EVRInitError_VRInitError_Driver_NotCalibrated = 206, + EVRInitError_VRInitError_Driver_CalibrationInvalid = 207, + EVRInitError_VRInitError_Driver_HmdDisplayNotFound = 208, + EVRInitError_VRInitError_IPC_ServerInitFailed = 300, + EVRInitError_VRInitError_IPC_ConnectFailed = 301, + EVRInitError_VRInitError_IPC_SharedStateInitFailed = 302, + EVRInitError_VRInitError_IPC_CompositorInitFailed = 303, + EVRInitError_VRInitError_IPC_MutexInitFailed = 304, + EVRInitError_VRInitError_IPC_Failed = 305, + EVRInitError_VRInitError_Compositor_Failed = 400, + EVRInitError_VRInitError_Compositor_D3D11HardwareRequired = 401, + EVRInitError_VRInitError_Compositor_FirmwareRequiresUpdate = 402, + EVRInitError_VRInitError_Compositor_OverlayInitFailed = 403, + EVRInitError_VRInitError_Compositor_ScreenshotsInitFailed = 404, + EVRInitError_VRInitError_VendorSpecific_UnableToConnectToOculusRuntime = 1000, + EVRInitError_VRInitError_VendorSpecific_HmdFound_CantOpenDevice = 1101, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UnableToRequestConfigStart = 1102, + EVRInitError_VRInitError_VendorSpecific_HmdFound_NoStoredConfig = 1103, + EVRInitError_VRInitError_VendorSpecific_HmdFound_ConfigTooBig = 1104, + EVRInitError_VRInitError_VendorSpecific_HmdFound_ConfigTooSmall = 1105, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UnableToInitZLib = 1106, + EVRInitError_VRInitError_VendorSpecific_HmdFound_CantReadFirmwareVersion = 1107, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UnableToSendUserDataStart = 1108, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataStart = 1109, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataNext = 1110, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UserDataAddressRange = 1111, + EVRInitError_VRInitError_VendorSpecific_HmdFound_UserDataError = 1112, + EVRInitError_VRInitError_VendorSpecific_HmdFound_ConfigFailedSanityCheck = 1113, + EVRInitError_VRInitError_Steam_SteamInstallationNotFound = 2000, +} EVRInitError; + +typedef enum EVRScreenshotType +{ + EVRScreenshotType_VRScreenshotType_None = 0, + EVRScreenshotType_VRScreenshotType_Mono = 1, + EVRScreenshotType_VRScreenshotType_Stereo = 2, + EVRScreenshotType_VRScreenshotType_Cubemap = 3, + EVRScreenshotType_VRScreenshotType_MonoPanorama = 4, + EVRScreenshotType_VRScreenshotType_StereoPanorama = 5, +} EVRScreenshotType; + +typedef enum EVRScreenshotPropertyFilenames +{ + EVRScreenshotPropertyFilenames_VRScreenshotPropertyFilenames_Preview = 0, + EVRScreenshotPropertyFilenames_VRScreenshotPropertyFilenames_VR = 1, +} EVRScreenshotPropertyFilenames; + +typedef enum EVRTrackedCameraError +{ + EVRTrackedCameraError_VRTrackedCameraError_None = 0, + EVRTrackedCameraError_VRTrackedCameraError_OperationFailed = 100, + EVRTrackedCameraError_VRTrackedCameraError_InvalidHandle = 101, + EVRTrackedCameraError_VRTrackedCameraError_InvalidFrameHeaderVersion = 102, + EVRTrackedCameraError_VRTrackedCameraError_OutOfHandles = 103, + EVRTrackedCameraError_VRTrackedCameraError_IPCFailure = 104, + EVRTrackedCameraError_VRTrackedCameraError_NotSupportedForThisDevice = 105, + EVRTrackedCameraError_VRTrackedCameraError_SharedMemoryFailure = 106, + EVRTrackedCameraError_VRTrackedCameraError_FrameBufferingFailure = 107, + EVRTrackedCameraError_VRTrackedCameraError_StreamSetupFailure = 108, + EVRTrackedCameraError_VRTrackedCameraError_InvalidGLTextureId = 109, + EVRTrackedCameraError_VRTrackedCameraError_InvalidSharedTextureHandle = 110, + EVRTrackedCameraError_VRTrackedCameraError_FailedToGetGLTextureId = 111, + EVRTrackedCameraError_VRTrackedCameraError_SharedTextureFailure = 112, + EVRTrackedCameraError_VRTrackedCameraError_NoFrameAvailable = 113, + EVRTrackedCameraError_VRTrackedCameraError_InvalidArgument = 114, + EVRTrackedCameraError_VRTrackedCameraError_InvalidFrameBufferSize = 115, +} EVRTrackedCameraError; + +typedef enum EVRTrackedCameraFrameType +{ + EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Distorted = 0, + EVRTrackedCameraFrameType_VRTrackedCameraFrameType_Undistorted = 1, + EVRTrackedCameraFrameType_VRTrackedCameraFrameType_MaximumUndistorted = 2, + EVRTrackedCameraFrameType_MAX_CAMERA_FRAME_TYPES = 3, +} EVRTrackedCameraFrameType; + +typedef enum EVRApplicationError +{ + EVRApplicationError_VRApplicationError_None = 0, + EVRApplicationError_VRApplicationError_AppKeyAlreadyExists = 100, + EVRApplicationError_VRApplicationError_NoManifest = 101, + EVRApplicationError_VRApplicationError_NoApplication = 102, + EVRApplicationError_VRApplicationError_InvalidIndex = 103, + EVRApplicationError_VRApplicationError_UnknownApplication = 104, + EVRApplicationError_VRApplicationError_IPCFailed = 105, + EVRApplicationError_VRApplicationError_ApplicationAlreadyRunning = 106, + EVRApplicationError_VRApplicationError_InvalidManifest = 107, + EVRApplicationError_VRApplicationError_InvalidApplication = 108, + EVRApplicationError_VRApplicationError_LaunchFailed = 109, + EVRApplicationError_VRApplicationError_ApplicationAlreadyStarting = 110, + EVRApplicationError_VRApplicationError_LaunchInProgress = 111, + EVRApplicationError_VRApplicationError_OldApplicationQuitting = 112, + EVRApplicationError_VRApplicationError_TransitionAborted = 113, + EVRApplicationError_VRApplicationError_IsTemplate = 114, + EVRApplicationError_VRApplicationError_BufferTooSmall = 200, + EVRApplicationError_VRApplicationError_PropertyNotSet = 201, + EVRApplicationError_VRApplicationError_UnknownProperty = 202, + EVRApplicationError_VRApplicationError_InvalidParameter = 203, +} EVRApplicationError; + +typedef enum EVRApplicationProperty +{ + EVRApplicationProperty_VRApplicationProperty_Name_String = 0, + EVRApplicationProperty_VRApplicationProperty_LaunchType_String = 11, + EVRApplicationProperty_VRApplicationProperty_WorkingDirectory_String = 12, + EVRApplicationProperty_VRApplicationProperty_BinaryPath_String = 13, + EVRApplicationProperty_VRApplicationProperty_Arguments_String = 14, + EVRApplicationProperty_VRApplicationProperty_URL_String = 15, + EVRApplicationProperty_VRApplicationProperty_Description_String = 50, + EVRApplicationProperty_VRApplicationProperty_NewsURL_String = 51, + EVRApplicationProperty_VRApplicationProperty_ImagePath_String = 52, + EVRApplicationProperty_VRApplicationProperty_Source_String = 53, + EVRApplicationProperty_VRApplicationProperty_IsDashboardOverlay_Bool = 60, + EVRApplicationProperty_VRApplicationProperty_IsTemplate_Bool = 61, + EVRApplicationProperty_VRApplicationProperty_IsInstanced_Bool = 62, + EVRApplicationProperty_VRApplicationProperty_LastLaunchTime_Uint64 = 70, +} EVRApplicationProperty; + +typedef enum EVRApplicationTransitionState +{ + EVRApplicationTransitionState_VRApplicationTransition_None = 0, + EVRApplicationTransitionState_VRApplicationTransition_OldAppQuitSent = 10, + EVRApplicationTransitionState_VRApplicationTransition_WaitingForExternalLaunch = 11, + EVRApplicationTransitionState_VRApplicationTransition_NewAppLaunched = 20, +} EVRApplicationTransitionState; + +typedef enum ChaperoneCalibrationState +{ + ChaperoneCalibrationState_OK = 1, + ChaperoneCalibrationState_Warning = 100, + ChaperoneCalibrationState_Warning_BaseStationMayHaveMoved = 101, + ChaperoneCalibrationState_Warning_BaseStationRemoved = 102, + ChaperoneCalibrationState_Warning_SeatedBoundsInvalid = 103, + ChaperoneCalibrationState_Error = 200, + ChaperoneCalibrationState_Error_BaseStationUninitalized = 201, + ChaperoneCalibrationState_Error_BaseStationConflict = 202, + ChaperoneCalibrationState_Error_PlayAreaInvalid = 203, + ChaperoneCalibrationState_Error_CollisionBoundsInvalid = 204, +} ChaperoneCalibrationState; + +typedef enum EChaperoneConfigFile +{ + EChaperoneConfigFile_Live = 1, + EChaperoneConfigFile_Temp = 2, +} EChaperoneConfigFile; + +typedef enum EChaperoneImportFlags +{ + EChaperoneImportFlags_EChaperoneImport_BoundsOnly = 1, +} EChaperoneImportFlags; + +typedef enum EVRCompositorError +{ + EVRCompositorError_VRCompositorError_None = 0, + EVRCompositorError_VRCompositorError_RequestFailed = 1, + EVRCompositorError_VRCompositorError_IncompatibleVersion = 100, + EVRCompositorError_VRCompositorError_DoNotHaveFocus = 101, + EVRCompositorError_VRCompositorError_InvalidTexture = 102, + EVRCompositorError_VRCompositorError_IsNotSceneApplication = 103, + EVRCompositorError_VRCompositorError_TextureIsOnWrongDevice = 104, + EVRCompositorError_VRCompositorError_TextureUsesUnsupportedFormat = 105, + EVRCompositorError_VRCompositorError_SharedTexturesNotSupported = 106, + EVRCompositorError_VRCompositorError_IndexOutOfRange = 107, +} EVRCompositorError; + +typedef enum VROverlayInputMethod +{ + VROverlayInputMethod_None = 0, + VROverlayInputMethod_Mouse = 1, +} VROverlayInputMethod; + +typedef enum VROverlayTransformType +{ + VROverlayTransformType_VROverlayTransform_Absolute = 0, + VROverlayTransformType_VROverlayTransform_TrackedDeviceRelative = 1, + VROverlayTransformType_VROverlayTransform_SystemOverlay = 2, + VROverlayTransformType_VROverlayTransform_TrackedComponent = 3, +} VROverlayTransformType; + +typedef enum VROverlayFlags +{ + VROverlayFlags_None = 0, + VROverlayFlags_Curved = 1, + VROverlayFlags_RGSS4X = 2, + VROverlayFlags_NoDashboardTab = 3, + VROverlayFlags_AcceptsGamepadEvents = 4, + VROverlayFlags_ShowGamepadFocus = 5, + VROverlayFlags_SendVRScrollEvents = 6, + VROverlayFlags_SendVRTouchpadEvents = 7, + VROverlayFlags_ShowTouchPadScrollWheel = 8, + VROverlayFlags_TransferOwnershipToInternalProcess = 9, + VROverlayFlags_SideBySide_Parallel = 10, + VROverlayFlags_SideBySide_Crossed = 11, + VROverlayFlags_Panorama = 12, + VROverlayFlags_StereoPanorama = 13, +} VROverlayFlags; + +typedef enum EGamepadTextInputMode +{ + EGamepadTextInputMode_k_EGamepadTextInputModeNormal = 0, + EGamepadTextInputMode_k_EGamepadTextInputModePassword = 1, + EGamepadTextInputMode_k_EGamepadTextInputModeSubmit = 2, +} EGamepadTextInputMode; + +typedef enum EGamepadTextInputLineMode +{ + EGamepadTextInputLineMode_k_EGamepadTextInputLineModeSingleLine = 0, + EGamepadTextInputLineMode_k_EGamepadTextInputLineModeMultipleLines = 1, +} EGamepadTextInputLineMode; + +typedef enum EOverlayDirection +{ + EOverlayDirection_OverlayDirection_Up = 0, + EOverlayDirection_OverlayDirection_Down = 1, + EOverlayDirection_OverlayDirection_Left = 2, + EOverlayDirection_OverlayDirection_Right = 3, + EOverlayDirection_OverlayDirection_Count = 4, +} EOverlayDirection; + +typedef enum EVRRenderModelError +{ + EVRRenderModelError_VRRenderModelError_None = 0, + EVRRenderModelError_VRRenderModelError_Loading = 100, + EVRRenderModelError_VRRenderModelError_NotSupported = 200, + EVRRenderModelError_VRRenderModelError_InvalidArg = 300, + EVRRenderModelError_VRRenderModelError_InvalidModel = 301, + EVRRenderModelError_VRRenderModelError_NoShapes = 302, + EVRRenderModelError_VRRenderModelError_MultipleShapes = 303, + EVRRenderModelError_VRRenderModelError_TooManyVertices = 304, + EVRRenderModelError_VRRenderModelError_MultipleTextures = 305, + EVRRenderModelError_VRRenderModelError_BufferTooSmall = 306, + EVRRenderModelError_VRRenderModelError_NotEnoughNormals = 307, + EVRRenderModelError_VRRenderModelError_NotEnoughTexCoords = 308, + EVRRenderModelError_VRRenderModelError_InvalidTexture = 400, +} EVRRenderModelError; + +typedef enum EVRComponentProperty +{ + EVRComponentProperty_VRComponentProperty_IsStatic = 1, + EVRComponentProperty_VRComponentProperty_IsVisible = 2, + EVRComponentProperty_VRComponentProperty_IsTouched = 4, + EVRComponentProperty_VRComponentProperty_IsPressed = 8, + EVRComponentProperty_VRComponentProperty_IsScrolled = 16, +} EVRComponentProperty; + +typedef enum EVRNotificationType +{ + EVRNotificationType_Transient = 0, + EVRNotificationType_Persistent = 1, + EVRNotificationType_Transient_SystemWithUserValue = 2, +} EVRNotificationType; + +typedef enum EVRNotificationStyle +{ + EVRNotificationStyle_None = 0, + EVRNotificationStyle_Application = 100, + EVRNotificationStyle_Contact_Disabled = 200, + EVRNotificationStyle_Contact_Enabled = 201, + EVRNotificationStyle_Contact_Active = 202, +} EVRNotificationStyle; + +typedef enum EVRSettingsError +{ + EVRSettingsError_VRSettingsError_None = 0, + EVRSettingsError_VRSettingsError_IPCFailed = 1, + EVRSettingsError_VRSettingsError_WriteFailed = 2, + EVRSettingsError_VRSettingsError_ReadFailed = 3, +} EVRSettingsError; + +typedef enum EVRScreenshotError +{ + EVRScreenshotError_VRScreenshotError_None = 0, + EVRScreenshotError_VRScreenshotError_RequestFailed = 1, + EVRScreenshotError_VRScreenshotError_IncompatibleVersion = 100, + EVRScreenshotError_VRScreenshotError_NotFound = 101, + EVRScreenshotError_VRScreenshotError_BufferTooSmall = 102, + EVRScreenshotError_VRScreenshotError_ScreenshotAlreadyInProgress = 108, +} EVRScreenshotError; + + +// OpenVR typedefs + +typedef uint32_t TrackedDeviceIndex_t; +typedef uint32_t VRNotificationId; +typedef uint64_t VROverlayHandle_t; +typedef void * glSharedTextureHandle_t; +typedef int32_t glInt_t; +typedef uint32_t glUInt_t; +typedef uint32_t TrackedDeviceIndex_t; +typedef uint64_t VROverlayHandle_t; +typedef uint64_t TrackedCameraHandle_t; +typedef uint32_t ScreenshotHandle_t; +typedef uint32_t VRComponentProperties; +typedef int32_t TextureID_t; +typedef uint32_t VRNotificationId; +typedef EVRInitError HmdError; +typedef EVREye Hmd_Eye; +typedef EGraphicsAPIConvention GraphicsAPIConvention; +typedef EColorSpace ColorSpace; +typedef ETrackingResult HmdTrackingResult; +typedef ETrackedDeviceClass TrackedDeviceClass; +typedef ETrackingUniverseOrigin TrackingUniverseOrigin; +typedef ETrackedDeviceProperty TrackedDeviceProperty; +typedef ETrackedPropertyError TrackedPropertyError; +typedef EVRSubmitFlags VRSubmitFlags_t; +typedef EVRState VRState_t; +typedef ECollisionBoundsStyle CollisionBoundsStyle_t; +typedef EVROverlayError VROverlayError; +typedef EVRFirmwareError VRFirmwareError; +typedef EVRCompositorError VRCompositorError; +typedef EVRScreenshotError VRScreenshotsError; + +// OpenVR Structs + +typedef struct HmdMatrix34_t +{ + float m[3][4]; //float[3][4] +} HmdMatrix34_t; + +typedef struct HmdMatrix44_t +{ + float m[4][4]; //float[4][4] +} HmdMatrix44_t; + +typedef struct HmdVector3_t +{ + float v[3]; //float[3] +} HmdVector3_t; + +typedef struct HmdVector4_t +{ + float v[4]; //float[4] +} HmdVector4_t; + +typedef struct HmdVector3d_t +{ + double v[3]; //double[3] +} HmdVector3d_t; + +typedef struct HmdVector2_t +{ + float v[2]; //float[2] +} HmdVector2_t; + +typedef struct HmdQuaternion_t +{ + double w; + double x; + double y; + double z; +} HmdQuaternion_t; + +typedef struct HmdColor_t +{ + float r; + float g; + float b; + float a; +} HmdColor_t; + +typedef struct HmdQuad_t +{ + struct HmdVector3_t vCorners[4]; //struct vr::HmdVector3_t[4] +} HmdQuad_t; + +typedef struct HmdRect2_t +{ + struct HmdVector2_t vTopLeft; + struct HmdVector2_t vBottomRight; +} HmdRect2_t; + +typedef struct DistortionCoordinates_t +{ + float rfRed[2]; //float[2] + float rfGreen[2]; //float[2] + float rfBlue[2]; //float[2] +} DistortionCoordinates_t; + +typedef struct Texture_t +{ + void * handle; // void * + enum EGraphicsAPIConvention eType; + enum EColorSpace eColorSpace; +} Texture_t; + +typedef struct TrackedDevicePose_t +{ + struct HmdMatrix34_t mDeviceToAbsoluteTracking; + struct HmdVector3_t vVelocity; + struct HmdVector3_t vAngularVelocity; + enum ETrackingResult eTrackingResult; + bool bPoseIsValid; + bool bDeviceIsConnected; +} TrackedDevicePose_t; + +typedef struct VRTextureBounds_t +{ + float uMin; + float vMin; + float uMax; + float vMax; +} VRTextureBounds_t; + +typedef struct VREvent_Controller_t +{ + uint32_t button; +} VREvent_Controller_t; + +typedef struct VREvent_Mouse_t +{ + float x; + float y; + uint32_t button; +} VREvent_Mouse_t; + +typedef struct VREvent_Scroll_t +{ + float xdelta; + float ydelta; + uint32_t repeatCount; +} VREvent_Scroll_t; + +typedef struct VREvent_TouchPadMove_t +{ + bool bFingerDown; + float flSecondsFingerDown; + float fValueXFirst; + float fValueYFirst; + float fValueXRaw; + float fValueYRaw; +} VREvent_TouchPadMove_t; + +typedef struct VREvent_Notification_t +{ + uint64_t ulUserValue; + uint32_t notificationId; +} VREvent_Notification_t; + +typedef struct VREvent_Process_t +{ + uint32_t pid; + uint32_t oldPid; + bool bForced; +} VREvent_Process_t; + +typedef struct VREvent_Overlay_t +{ + uint64_t overlayHandle; +} VREvent_Overlay_t; + +typedef struct VREvent_Status_t +{ + uint32_t statusState; +} VREvent_Status_t; + +typedef struct VREvent_Keyboard_t +{ + char * cNewInput[8]; //char[8] + uint64_t uUserValue; +} VREvent_Keyboard_t; + +typedef struct VREvent_Ipd_t +{ + float ipdMeters; +} VREvent_Ipd_t; + +typedef struct VREvent_Chaperone_t +{ + uint64_t m_nPreviousUniverse; + uint64_t m_nCurrentUniverse; +} VREvent_Chaperone_t; + +typedef struct VREvent_Reserved_t +{ + uint64_t reserved0; + uint64_t reserved1; +} VREvent_Reserved_t; + +typedef struct VREvent_PerformanceTest_t +{ + uint32_t m_nFidelityLevel; +} VREvent_PerformanceTest_t; + +typedef struct VREvent_SeatedZeroPoseReset_t +{ + bool bResetBySystemMenu; +} VREvent_SeatedZeroPoseReset_t; + +typedef struct VREvent_Screenshot_t +{ + uint32_t handle; + uint32_t type; +} VREvent_Screenshot_t; + +typedef struct HiddenAreaMesh_t +{ + struct HmdVector2_t * pVertexData; // const struct vr::HmdVector2_t * + uint32_t unTriangleCount; +} HiddenAreaMesh_t; + +typedef struct VRControllerAxis_t +{ + float x; + float y; +} VRControllerAxis_t; + +typedef struct VRControllerState_t +{ + uint32_t unPacketNum; + uint64_t ulButtonPressed; + uint64_t ulButtonTouched; + struct VRControllerAxis_t rAxis[5]; //struct vr::VRControllerAxis_t[5] +} VRControllerState_t; + +typedef struct Compositor_OverlaySettings +{ + uint32_t size; + bool curved; + bool antialias; + float scale; + float distance; + float alpha; + float uOffset; + float vOffset; + float uScale; + float vScale; + float gridDivs; + float gridWidth; + float gridScale; + struct HmdMatrix44_t transform; +} Compositor_OverlaySettings; + +typedef struct CameraVideoStreamFrameHeader_t +{ + enum EVRTrackedCameraFrameType eFrameType; + uint32_t nWidth; + uint32_t nHeight; + uint32_t nBytesPerPixel; + uint32_t nFrameSequence; + struct TrackedDevicePose_t standingTrackedDevicePose; +} CameraVideoStreamFrameHeader_t; + +typedef struct AppOverrideKeys_t +{ + char * pchKey; // const char * + char * pchValue; // const char * +} AppOverrideKeys_t; + +typedef struct Compositor_FrameTiming +{ + uint32_t m_nSize; + uint32_t m_nFrameIndex; + uint32_t m_nNumFramePresents; + uint32_t m_nNumDroppedFrames; + double m_flSystemTimeInSeconds; + float m_flSceneRenderGpuMs; + float m_flTotalRenderGpuMs; + float m_flCompositorRenderGpuMs; + float m_flCompositorRenderCpuMs; + float m_flCompositorIdleCpuMs; + float m_flClientFrameIntervalMs; + float m_flPresentCallCpuMs; + float m_flWaitForPresentCpuMs; + float m_flSubmitFrameMs; + float m_flWaitGetPosesCalledMs; + float m_flNewPosesReadyMs; + float m_flNewFrameReadyMs; + float m_flCompositorUpdateStartMs; + float m_flCompositorUpdateEndMs; + float m_flCompositorRenderStartMs; + TrackedDevicePose_t m_HmdPose; + int32_t m_nFidelityLevel; + uint32_t m_nReprojectionFlags; +} Compositor_FrameTiming; + +typedef struct Compositor_CumulativeStats +{ + uint32_t m_nPid; + uint32_t m_nNumFramePresents; + uint32_t m_nNumDroppedFrames; + uint32_t m_nNumReprojectedFrames; + uint32_t m_nNumFramePresentsOnStartup; + uint32_t m_nNumDroppedFramesOnStartup; + uint32_t m_nNumReprojectedFramesOnStartup; + uint32_t m_nNumLoading; + uint32_t m_nNumFramePresentsLoading; + uint32_t m_nNumDroppedFramesLoading; + uint32_t m_nNumReprojectedFramesLoading; + uint32_t m_nNumTimedOut; + uint32_t m_nNumFramePresentsTimedOut; + uint32_t m_nNumDroppedFramesTimedOut; + uint32_t m_nNumReprojectedFramesTimedOut; +} Compositor_CumulativeStats; + +typedef struct VROverlayIntersectionParams_t +{ + struct HmdVector3_t vSource; + struct HmdVector3_t vDirection; + enum ETrackingUniverseOrigin eOrigin; +} VROverlayIntersectionParams_t; + +typedef struct VROverlayIntersectionResults_t +{ + struct HmdVector3_t vPoint; + struct HmdVector3_t vNormal; + struct HmdVector2_t vUVs; + float fDistance; +} VROverlayIntersectionResults_t; + +typedef struct RenderModel_ComponentState_t +{ + struct HmdMatrix34_t mTrackingToComponentRenderModel; + struct HmdMatrix34_t mTrackingToComponentLocal; + VRComponentProperties uProperties; +} RenderModel_ComponentState_t; + +typedef struct RenderModel_Vertex_t +{ + struct HmdVector3_t vPosition; + struct HmdVector3_t vNormal; + float rfTextureCoord[2]; //float[2] +} RenderModel_Vertex_t; + +typedef struct RenderModel_TextureMap_t +{ + uint16_t unWidth; + uint16_t unHeight; + uint8_t * rubTextureMapData; // const uint8_t * +} RenderModel_TextureMap_t; + +typedef struct RenderModel_t +{ + struct RenderModel_Vertex_t * rVertexData; // const struct vr::RenderModel_Vertex_t * + uint32_t unVertexCount; + uint16_t * rIndexData; // const uint16_t * + uint32_t unTriangleCount; + TextureID_t diffuseTextureId; +} RenderModel_t; + +typedef struct RenderModel_ControllerMode_State_t +{ + bool bScrollWheelVisible; +} RenderModel_ControllerMode_State_t; + +typedef struct NotificationBitmap_t +{ + void * m_pImageData; // void * + int32_t m_nWidth; + int32_t m_nHeight; + int32_t m_nBytesPerPixel; +} NotificationBitmap_t; + +typedef struct COpenVRContext +{ + intptr_t m_pVRSystem; // class vr::IVRSystem * + intptr_t m_pVRChaperone; // class vr::IVRChaperone * + intptr_t m_pVRChaperoneSetup; // class vr::IVRChaperoneSetup * + intptr_t m_pVRCompositor; // class vr::IVRCompositor * + intptr_t m_pVROverlay; // class vr::IVROverlay * + intptr_t m_pVRRenderModels; // class vr::IVRRenderModels * + intptr_t m_pVRExtendedDisplay; // class vr::IVRExtendedDisplay * + intptr_t m_pVRSettings; // class vr::IVRSettings * + intptr_t m_pVRApplications; // class vr::IVRApplications * + intptr_t m_pVRTrackedCamera; // class vr::IVRTrackedCamera * + intptr_t m_pVRScreenshots; // class vr::IVRScreenshots * +} COpenVRContext; + + +typedef union +{ + VREvent_Reserved_t reserved; + VREvent_Controller_t controller; + VREvent_Mouse_t mouse; + VREvent_Scroll_t scroll; + VREvent_Process_t process; + VREvent_Notification_t notification; + VREvent_Overlay_t overlay; + VREvent_Status_t status; + VREvent_Keyboard_t keyboard; + VREvent_Ipd_t ipd; + VREvent_Chaperone_t chaperone; + VREvent_PerformanceTest_t performanceTest; + VREvent_TouchPadMove_t touchPadMove; + VREvent_SeatedZeroPoseReset_t seatedZeroPoseReset; +} VREvent_Data_t; + +/** An event posted by the server to all running applications */ +struct VREvent_t +{ + uint32_t eventType; // EVREventType enum + TrackedDeviceIndex_t trackedDeviceIndex; + float eventAgeSeconds; + // event data must be the end of the struct as its size is variable + VREvent_Data_t data; +}; + + +// OpenVR Function Pointer Tables + +struct VR_IVRSystem_FnTable +{ + void (OPENVR_FNTABLE_CALLTYPE *GetRecommendedRenderTargetSize)(uint32_t * pnWidth, uint32_t * pnHeight); + struct HmdMatrix44_t (OPENVR_FNTABLE_CALLTYPE *GetProjectionMatrix)(EVREye eEye, float fNearZ, float fFarZ, EGraphicsAPIConvention eProjType); + void (OPENVR_FNTABLE_CALLTYPE *GetProjectionRaw)(EVREye eEye, float * pfLeft, float * pfRight, float * pfTop, float * pfBottom); + struct DistortionCoordinates_t (OPENVR_FNTABLE_CALLTYPE *ComputeDistortion)(EVREye eEye, float fU, float fV); + struct HmdMatrix34_t (OPENVR_FNTABLE_CALLTYPE *GetEyeToHeadTransform)(EVREye eEye); + bool (OPENVR_FNTABLE_CALLTYPE *GetTimeSinceLastVsync)(float * pfSecondsSinceLastVsync, uint64_t * pulFrameCounter); + int32_t (OPENVR_FNTABLE_CALLTYPE *GetD3D9AdapterIndex)(); + void (OPENVR_FNTABLE_CALLTYPE *GetDXGIOutputInfo)(int32_t * pnAdapterIndex); + bool (OPENVR_FNTABLE_CALLTYPE *IsDisplayOnDesktop)(); + bool (OPENVR_FNTABLE_CALLTYPE *SetDisplayVisibility)(bool bIsVisibleOnDesktop); + void (OPENVR_FNTABLE_CALLTYPE *GetDeviceToAbsoluteTrackingPose)(ETrackingUniverseOrigin eOrigin, float fPredictedSecondsToPhotonsFromNow, struct TrackedDevicePose_t * pTrackedDevicePoseArray, uint32_t unTrackedDevicePoseArrayCount); + void (OPENVR_FNTABLE_CALLTYPE *ResetSeatedZeroPose)(); + struct HmdMatrix34_t (OPENVR_FNTABLE_CALLTYPE *GetSeatedZeroPoseToStandingAbsoluteTrackingPose)(); + struct HmdMatrix34_t (OPENVR_FNTABLE_CALLTYPE *GetRawZeroPoseToStandingAbsoluteTrackingPose)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetSortedTrackedDeviceIndicesOfClass)(ETrackedDeviceClass eTrackedDeviceClass, TrackedDeviceIndex_t * punTrackedDeviceIndexArray, uint32_t unTrackedDeviceIndexArrayCount, TrackedDeviceIndex_t unRelativeToTrackedDeviceIndex); + EDeviceActivityLevel (OPENVR_FNTABLE_CALLTYPE *GetTrackedDeviceActivityLevel)(TrackedDeviceIndex_t unDeviceId); + void (OPENVR_FNTABLE_CALLTYPE *ApplyTransform)(struct TrackedDevicePose_t * pOutputPose, struct TrackedDevicePose_t * pTrackedDevicePose, struct HmdMatrix34_t * pTransform); + TrackedDeviceIndex_t (OPENVR_FNTABLE_CALLTYPE *GetTrackedDeviceIndexForControllerRole)(ETrackedControllerRole unDeviceType); + ETrackedControllerRole (OPENVR_FNTABLE_CALLTYPE *GetControllerRoleForTrackedDeviceIndex)(TrackedDeviceIndex_t unDeviceIndex); + ETrackedDeviceClass (OPENVR_FNTABLE_CALLTYPE *GetTrackedDeviceClass)(TrackedDeviceIndex_t unDeviceIndex); + bool (OPENVR_FNTABLE_CALLTYPE *IsTrackedDeviceConnected)(TrackedDeviceIndex_t unDeviceIndex); + bool (OPENVR_FNTABLE_CALLTYPE *GetBoolTrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError * pError); + float (OPENVR_FNTABLE_CALLTYPE *GetFloatTrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError * pError); + int32_t (OPENVR_FNTABLE_CALLTYPE *GetInt32TrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError * pError); + uint64_t (OPENVR_FNTABLE_CALLTYPE *GetUint64TrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError * pError); + struct HmdMatrix34_t (OPENVR_FNTABLE_CALLTYPE *GetMatrix34TrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError * pError); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetStringTrackedDeviceProperty)(TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, char * pchValue, uint32_t unBufferSize, ETrackedPropertyError * pError); + char * (OPENVR_FNTABLE_CALLTYPE *GetPropErrorNameFromEnum)(ETrackedPropertyError error); + bool (OPENVR_FNTABLE_CALLTYPE *PollNextEvent)(struct VREvent_t * pEvent, uint32_t uncbVREvent); + bool (OPENVR_FNTABLE_CALLTYPE *PollNextEventWithPose)(ETrackingUniverseOrigin eOrigin, struct VREvent_t * pEvent, uint32_t uncbVREvent, TrackedDevicePose_t * pTrackedDevicePose); + char * (OPENVR_FNTABLE_CALLTYPE *GetEventTypeNameFromEnum)(EVREventType eType); + struct HiddenAreaMesh_t (OPENVR_FNTABLE_CALLTYPE *GetHiddenAreaMesh)(EVREye eEye); + bool (OPENVR_FNTABLE_CALLTYPE *GetControllerState)(TrackedDeviceIndex_t unControllerDeviceIndex, VRControllerState_t * pControllerState); + bool (OPENVR_FNTABLE_CALLTYPE *GetControllerStateWithPose)(ETrackingUniverseOrigin eOrigin, TrackedDeviceIndex_t unControllerDeviceIndex, VRControllerState_t * pControllerState, struct TrackedDevicePose_t * pTrackedDevicePose); + void (OPENVR_FNTABLE_CALLTYPE *TriggerHapticPulse)(TrackedDeviceIndex_t unControllerDeviceIndex, uint32_t unAxisId, unsigned short usDurationMicroSec); + char * (OPENVR_FNTABLE_CALLTYPE *GetButtonIdNameFromEnum)(EVRButtonId eButtonId); + char * (OPENVR_FNTABLE_CALLTYPE *GetControllerAxisTypeNameFromEnum)(EVRControllerAxisType eAxisType); + bool (OPENVR_FNTABLE_CALLTYPE *CaptureInputFocus)(); + void (OPENVR_FNTABLE_CALLTYPE *ReleaseInputFocus)(); + bool (OPENVR_FNTABLE_CALLTYPE *IsInputFocusCapturedByAnotherProcess)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *DriverDebugRequest)(TrackedDeviceIndex_t unDeviceIndex, char * pchRequest, char * pchResponseBuffer, uint32_t unResponseBufferSize); + EVRFirmwareError (OPENVR_FNTABLE_CALLTYPE *PerformFirmwareUpdate)(TrackedDeviceIndex_t unDeviceIndex); + void (OPENVR_FNTABLE_CALLTYPE *AcknowledgeQuit_Exiting)(); + void (OPENVR_FNTABLE_CALLTYPE *AcknowledgeQuit_UserPrompt)(); +}; + +struct VR_IVRExtendedDisplay_FnTable +{ + void (OPENVR_FNTABLE_CALLTYPE *GetWindowBounds)(int32_t * pnX, int32_t * pnY, uint32_t * pnWidth, uint32_t * pnHeight); + void (OPENVR_FNTABLE_CALLTYPE *GetEyeOutputViewport)(EVREye eEye, uint32_t * pnX, uint32_t * pnY, uint32_t * pnWidth, uint32_t * pnHeight); + void (OPENVR_FNTABLE_CALLTYPE *GetDXGIOutputInfo)(int32_t * pnAdapterIndex, int32_t * pnAdapterOutputIndex); +}; + +struct VR_IVRTrackedCamera_FnTable +{ + char * (OPENVR_FNTABLE_CALLTYPE *GetCameraErrorNameFromEnum)(EVRTrackedCameraError eCameraError); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *HasCamera)(TrackedDeviceIndex_t nDeviceIndex, bool * pHasCamera); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *GetCameraFrameSize)(TrackedDeviceIndex_t nDeviceIndex, EVRTrackedCameraFrameType eFrameType, uint32_t * pnWidth, uint32_t * pnHeight, uint32_t * pnFrameBufferSize); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *GetCameraIntrinisics)(TrackedDeviceIndex_t nDeviceIndex, EVRTrackedCameraFrameType eFrameType, HmdVector2_t * pFocalLength, HmdVector2_t * pCenter); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *GetCameraProjection)(TrackedDeviceIndex_t nDeviceIndex, EVRTrackedCameraFrameType eFrameType, float flZNear, float flZFar, HmdMatrix44_t * pProjection); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *AcquireVideoStreamingService)(TrackedDeviceIndex_t nDeviceIndex, TrackedCameraHandle_t * pHandle); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *ReleaseVideoStreamingService)(TrackedCameraHandle_t hTrackedCamera); + EVRTrackedCameraError (OPENVR_FNTABLE_CALLTYPE *GetVideoStreamFrameBuffer)(TrackedCameraHandle_t hTrackedCamera, EVRTrackedCameraFrameType eFrameType, void * pFrameBuffer, uint32_t nFrameBufferSize, CameraVideoStreamFrameHeader_t * pFrameHeader, uint32_t nFrameHeaderSize); +}; + +struct VR_IVRApplications_FnTable +{ + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *AddApplicationManifest)(char * pchApplicationManifestFullPath, bool bTemporary); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *RemoveApplicationManifest)(char * pchApplicationManifestFullPath); + bool (OPENVR_FNTABLE_CALLTYPE *IsApplicationInstalled)(char * pchAppKey); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetApplicationCount)(); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *GetApplicationKeyByIndex)(uint32_t unApplicationIndex, char * pchAppKeyBuffer, uint32_t unAppKeyBufferLen); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *GetApplicationKeyByProcessId)(uint32_t unProcessId, char * pchAppKeyBuffer, uint32_t unAppKeyBufferLen); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *LaunchApplication)(char * pchAppKey); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *LaunchTemplateApplication)(char * pchTemplateAppKey, char * pchNewAppKey, struct AppOverrideKeys_t * pKeys, uint32_t unKeys); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *LaunchDashboardOverlay)(char * pchAppKey); + bool (OPENVR_FNTABLE_CALLTYPE *CancelApplicationLaunch)(char * pchAppKey); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *IdentifyApplication)(uint32_t unProcessId, char * pchAppKey); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetApplicationProcessId)(char * pchAppKey); + char * (OPENVR_FNTABLE_CALLTYPE *GetApplicationsErrorNameFromEnum)(EVRApplicationError error); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetApplicationPropertyString)(char * pchAppKey, EVRApplicationProperty eProperty, char * pchPropertyValueBuffer, uint32_t unPropertyValueBufferLen, EVRApplicationError * peError); + bool (OPENVR_FNTABLE_CALLTYPE *GetApplicationPropertyBool)(char * pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError * peError); + uint64_t (OPENVR_FNTABLE_CALLTYPE *GetApplicationPropertyUint64)(char * pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError * peError); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *SetApplicationAutoLaunch)(char * pchAppKey, bool bAutoLaunch); + bool (OPENVR_FNTABLE_CALLTYPE *GetApplicationAutoLaunch)(char * pchAppKey); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *GetStartingApplication)(char * pchAppKeyBuffer, uint32_t unAppKeyBufferLen); + EVRApplicationTransitionState (OPENVR_FNTABLE_CALLTYPE *GetTransitionState)(); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *PerformApplicationPrelaunchCheck)(char * pchAppKey); + char * (OPENVR_FNTABLE_CALLTYPE *GetApplicationsTransitionStateNameFromEnum)(EVRApplicationTransitionState state); + bool (OPENVR_FNTABLE_CALLTYPE *IsQuitUserPromptRequested)(); + EVRApplicationError (OPENVR_FNTABLE_CALLTYPE *LaunchInternalProcess)(char * pchBinaryPath, char * pchArguments, char * pchWorkingDirectory); +}; + +struct VR_IVRChaperone_FnTable +{ + ChaperoneCalibrationState (OPENVR_FNTABLE_CALLTYPE *GetCalibrationState)(); + bool (OPENVR_FNTABLE_CALLTYPE *GetPlayAreaSize)(float * pSizeX, float * pSizeZ); + bool (OPENVR_FNTABLE_CALLTYPE *GetPlayAreaRect)(struct HmdQuad_t * rect); + void (OPENVR_FNTABLE_CALLTYPE *ReloadInfo)(); + void (OPENVR_FNTABLE_CALLTYPE *SetSceneColor)(struct HmdColor_t color); + void (OPENVR_FNTABLE_CALLTYPE *GetBoundsColor)(struct HmdColor_t * pOutputColorArray, int nNumOutputColors, float flCollisionBoundsFadeDistance, struct HmdColor_t * pOutputCameraColor); + bool (OPENVR_FNTABLE_CALLTYPE *AreBoundsVisible)(); + void (OPENVR_FNTABLE_CALLTYPE *ForceBoundsVisible)(bool bForce); +}; + +struct VR_IVRChaperoneSetup_FnTable +{ + bool (OPENVR_FNTABLE_CALLTYPE *CommitWorkingCopy)(EChaperoneConfigFile configFile); + void (OPENVR_FNTABLE_CALLTYPE *RevertWorkingCopy)(); + bool (OPENVR_FNTABLE_CALLTYPE *GetWorkingPlayAreaSize)(float * pSizeX, float * pSizeZ); + bool (OPENVR_FNTABLE_CALLTYPE *GetWorkingPlayAreaRect)(struct HmdQuad_t * rect); + bool (OPENVR_FNTABLE_CALLTYPE *GetWorkingCollisionBoundsInfo)(struct HmdQuad_t * pQuadsBuffer, uint32_t * punQuadsCount); + bool (OPENVR_FNTABLE_CALLTYPE *GetLiveCollisionBoundsInfo)(struct HmdQuad_t * pQuadsBuffer, uint32_t * punQuadsCount); + bool (OPENVR_FNTABLE_CALLTYPE *GetWorkingSeatedZeroPoseToRawTrackingPose)(struct HmdMatrix34_t * pmatSeatedZeroPoseToRawTrackingPose); + bool (OPENVR_FNTABLE_CALLTYPE *GetWorkingStandingZeroPoseToRawTrackingPose)(struct HmdMatrix34_t * pmatStandingZeroPoseToRawTrackingPose); + void (OPENVR_FNTABLE_CALLTYPE *SetWorkingPlayAreaSize)(float sizeX, float sizeZ); + void (OPENVR_FNTABLE_CALLTYPE *SetWorkingCollisionBoundsInfo)(struct HmdQuad_t * pQuadsBuffer, uint32_t unQuadsCount); + void (OPENVR_FNTABLE_CALLTYPE *SetWorkingSeatedZeroPoseToRawTrackingPose)(struct HmdMatrix34_t * pMatSeatedZeroPoseToRawTrackingPose); + void (OPENVR_FNTABLE_CALLTYPE *SetWorkingStandingZeroPoseToRawTrackingPose)(struct HmdMatrix34_t * pMatStandingZeroPoseToRawTrackingPose); + void (OPENVR_FNTABLE_CALLTYPE *ReloadFromDisk)(EChaperoneConfigFile configFile); + bool (OPENVR_FNTABLE_CALLTYPE *GetLiveSeatedZeroPoseToRawTrackingPose)(struct HmdMatrix34_t * pmatSeatedZeroPoseToRawTrackingPose); + void (OPENVR_FNTABLE_CALLTYPE *SetWorkingCollisionBoundsTagsInfo)(uint8_t * pTagsBuffer, uint32_t unTagCount); + bool (OPENVR_FNTABLE_CALLTYPE *GetLiveCollisionBoundsTagsInfo)(uint8_t * pTagsBuffer, uint32_t * punTagCount); + bool (OPENVR_FNTABLE_CALLTYPE *SetWorkingPhysicalBoundsInfo)(struct HmdQuad_t * pQuadsBuffer, uint32_t unQuadsCount); + bool (OPENVR_FNTABLE_CALLTYPE *GetLivePhysicalBoundsInfo)(struct HmdQuad_t * pQuadsBuffer, uint32_t * punQuadsCount); + bool (OPENVR_FNTABLE_CALLTYPE *ExportLiveToBuffer)(char * pBuffer, uint32_t * pnBufferLength); + bool (OPENVR_FNTABLE_CALLTYPE *ImportFromBufferToWorking)(char * pBuffer, uint32_t nImportFlags); +}; + +struct VR_IVRCompositor_FnTable +{ + void (OPENVR_FNTABLE_CALLTYPE *SetTrackingSpace)(ETrackingUniverseOrigin eOrigin); + ETrackingUniverseOrigin (OPENVR_FNTABLE_CALLTYPE *GetTrackingSpace)(); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *WaitGetPoses)(struct TrackedDevicePose_t * pRenderPoseArray, uint32_t unRenderPoseArrayCount, struct TrackedDevicePose_t * pGamePoseArray, uint32_t unGamePoseArrayCount); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *GetLastPoses)(struct TrackedDevicePose_t * pRenderPoseArray, uint32_t unRenderPoseArrayCount, struct TrackedDevicePose_t * pGamePoseArray, uint32_t unGamePoseArrayCount); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *GetLastPoseForTrackedDeviceIndex)(TrackedDeviceIndex_t unDeviceIndex, struct TrackedDevicePose_t * pOutputPose, struct TrackedDevicePose_t * pOutputGamePose); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *Submit)(EVREye eEye, struct Texture_t * pTexture, struct VRTextureBounds_t * pBounds, EVRSubmitFlags nSubmitFlags); + void (OPENVR_FNTABLE_CALLTYPE *ClearLastSubmittedFrame)(); + void (OPENVR_FNTABLE_CALLTYPE *PostPresentHandoff)(); + bool (OPENVR_FNTABLE_CALLTYPE *GetFrameTiming)(struct Compositor_FrameTiming * pTiming, uint32_t unFramesAgo); + float (OPENVR_FNTABLE_CALLTYPE *GetFrameTimeRemaining)(); + void (OPENVR_FNTABLE_CALLTYPE *GetCumulativeStats)(struct Compositor_CumulativeStats * pStats, uint32_t nStatsSizeInBytes); + void (OPENVR_FNTABLE_CALLTYPE *FadeToColor)(float fSeconds, float fRed, float fGreen, float fBlue, float fAlpha, bool bBackground); + void (OPENVR_FNTABLE_CALLTYPE *FadeGrid)(float fSeconds, bool bFadeIn); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *SetSkyboxOverride)(struct Texture_t * pTextures, uint32_t unTextureCount); + void (OPENVR_FNTABLE_CALLTYPE *ClearSkyboxOverride)(); + void (OPENVR_FNTABLE_CALLTYPE *CompositorBringToFront)(); + void (OPENVR_FNTABLE_CALLTYPE *CompositorGoToBack)(); + void (OPENVR_FNTABLE_CALLTYPE *CompositorQuit)(); + bool (OPENVR_FNTABLE_CALLTYPE *IsFullscreen)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetCurrentSceneFocusProcess)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetLastFrameRenderer)(); + bool (OPENVR_FNTABLE_CALLTYPE *CanRenderScene)(); + void (OPENVR_FNTABLE_CALLTYPE *ShowMirrorWindow)(); + void (OPENVR_FNTABLE_CALLTYPE *HideMirrorWindow)(); + bool (OPENVR_FNTABLE_CALLTYPE *IsMirrorWindowVisible)(); + void (OPENVR_FNTABLE_CALLTYPE *CompositorDumpImages)(); + bool (OPENVR_FNTABLE_CALLTYPE *ShouldAppRenderWithLowResources)(); + void (OPENVR_FNTABLE_CALLTYPE *ForceInterleavedReprojectionOn)(bool bOverride); + void (OPENVR_FNTABLE_CALLTYPE *ForceReconnectProcess)(); + void (OPENVR_FNTABLE_CALLTYPE *SuspendRendering)(bool bSuspend); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *RequestScreenshot)(EVRScreenshotType type, char * pchDestinationFileName, char * pchVRDestinationFileName); + EVRScreenshotType (OPENVR_FNTABLE_CALLTYPE *GetCurrentScreenshotType)(); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *GetMirrorTextureD3D11)(EVREye eEye, void * pD3D11DeviceOrResource, void ** ppD3D11ShaderResourceView); + EVRCompositorError (OPENVR_FNTABLE_CALLTYPE *GetMirrorTextureGL)(EVREye eEye, glUInt_t * pglTextureId, glSharedTextureHandle_t * pglSharedTextureHandle); + bool (OPENVR_FNTABLE_CALLTYPE *ReleaseSharedGLTexture)(glUInt_t glTextureId, glSharedTextureHandle_t glSharedTextureHandle); + void (OPENVR_FNTABLE_CALLTYPE *LockGLSharedTextureForAccess)(glSharedTextureHandle_t glSharedTextureHandle); + void (OPENVR_FNTABLE_CALLTYPE *UnlockGLSharedTextureForAccess)(glSharedTextureHandle_t glSharedTextureHandle); +}; + +struct VR_IVROverlay_FnTable +{ + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *FindOverlay)(char * pchOverlayKey, VROverlayHandle_t * pOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *CreateOverlay)(char * pchOverlayKey, char * pchOverlayFriendlyName, VROverlayHandle_t * pOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *DestroyOverlay)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetHighQualityOverlay)(VROverlayHandle_t ulOverlayHandle); + VROverlayHandle_t (OPENVR_FNTABLE_CALLTYPE *GetHighQualityOverlay)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetOverlayKey)(VROverlayHandle_t ulOverlayHandle, char * pchValue, uint32_t unBufferSize, EVROverlayError * pError); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetOverlayName)(VROverlayHandle_t ulOverlayHandle, char * pchValue, uint32_t unBufferSize, EVROverlayError * pError); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayImageData)(VROverlayHandle_t ulOverlayHandle, void * pvBuffer, uint32_t unBufferSize, uint32_t * punWidth, uint32_t * punHeight); + char * (OPENVR_FNTABLE_CALLTYPE *GetOverlayErrorNameFromEnum)(EVROverlayError error); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayRenderingPid)(VROverlayHandle_t ulOverlayHandle, uint32_t unPID); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetOverlayRenderingPid)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayFlag)(VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool bEnabled); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayFlag)(VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool * pbEnabled); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayColor)(VROverlayHandle_t ulOverlayHandle, float fRed, float fGreen, float fBlue); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayColor)(VROverlayHandle_t ulOverlayHandle, float * pfRed, float * pfGreen, float * pfBlue); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayAlpha)(VROverlayHandle_t ulOverlayHandle, float fAlpha); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayAlpha)(VROverlayHandle_t ulOverlayHandle, float * pfAlpha); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayWidthInMeters)(VROverlayHandle_t ulOverlayHandle, float fWidthInMeters); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayWidthInMeters)(VROverlayHandle_t ulOverlayHandle, float * pfWidthInMeters); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayAutoCurveDistanceRangeInMeters)(VROverlayHandle_t ulOverlayHandle, float fMinDistanceInMeters, float fMaxDistanceInMeters); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayAutoCurveDistanceRangeInMeters)(VROverlayHandle_t ulOverlayHandle, float * pfMinDistanceInMeters, float * pfMaxDistanceInMeters); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTextureColorSpace)(VROverlayHandle_t ulOverlayHandle, EColorSpace eTextureColorSpace); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTextureColorSpace)(VROverlayHandle_t ulOverlayHandle, EColorSpace * peTextureColorSpace); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTextureBounds)(VROverlayHandle_t ulOverlayHandle, struct VRTextureBounds_t * pOverlayTextureBounds); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTextureBounds)(VROverlayHandle_t ulOverlayHandle, struct VRTextureBounds_t * pOverlayTextureBounds); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTransformType)(VROverlayHandle_t ulOverlayHandle, VROverlayTransformType * peTransformType); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTransformAbsolute)(VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, struct HmdMatrix34_t * pmatTrackingOriginToOverlayTransform); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTransformAbsolute)(VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin * peTrackingOrigin, struct HmdMatrix34_t * pmatTrackingOriginToOverlayTransform); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTransformTrackedDeviceRelative)(VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unTrackedDevice, struct HmdMatrix34_t * pmatTrackedDeviceToOverlayTransform); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTransformTrackedDeviceRelative)(VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t * punTrackedDevice, struct HmdMatrix34_t * pmatTrackedDeviceToOverlayTransform); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTransformTrackedDeviceComponent)(VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unDeviceIndex, char * pchComponentName); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTransformTrackedDeviceComponent)(VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t * punDeviceIndex, char * pchComponentName, uint32_t unComponentNameSize); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *ShowOverlay)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *HideOverlay)(VROverlayHandle_t ulOverlayHandle); + bool (OPENVR_FNTABLE_CALLTYPE *IsOverlayVisible)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetTransformForOverlayCoordinates)(VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, struct HmdVector2_t coordinatesInOverlay, struct HmdMatrix34_t * pmatTransform); + bool (OPENVR_FNTABLE_CALLTYPE *PollNextOverlayEvent)(VROverlayHandle_t ulOverlayHandle, struct VREvent_t * pEvent, uint32_t uncbVREvent); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayInputMethod)(VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod * peInputMethod); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayInputMethod)(VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod eInputMethod); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayMouseScale)(VROverlayHandle_t ulOverlayHandle, struct HmdVector2_t * pvecMouseScale); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayMouseScale)(VROverlayHandle_t ulOverlayHandle, struct HmdVector2_t * pvecMouseScale); + bool (OPENVR_FNTABLE_CALLTYPE *ComputeOverlayIntersection)(VROverlayHandle_t ulOverlayHandle, struct VROverlayIntersectionParams_t * pParams, struct VROverlayIntersectionResults_t * pResults); + bool (OPENVR_FNTABLE_CALLTYPE *HandleControllerOverlayInteractionAsMouse)(VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unControllerDeviceIndex); + bool (OPENVR_FNTABLE_CALLTYPE *IsHoverTargetOverlay)(VROverlayHandle_t ulOverlayHandle); + VROverlayHandle_t (OPENVR_FNTABLE_CALLTYPE *GetGamepadFocusOverlay)(); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetGamepadFocusOverlay)(VROverlayHandle_t ulNewFocusOverlay); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayNeighbor)(EOverlayDirection eDirection, VROverlayHandle_t ulFrom, VROverlayHandle_t ulTo); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *MoveGamepadFocusToNeighbor)(EOverlayDirection eDirection, VROverlayHandle_t ulFrom); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayTexture)(VROverlayHandle_t ulOverlayHandle, struct Texture_t * pTexture); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *ClearOverlayTexture)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayRaw)(VROverlayHandle_t ulOverlayHandle, void * pvBuffer, uint32_t unWidth, uint32_t unHeight, uint32_t unDepth); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetOverlayFromFile)(VROverlayHandle_t ulOverlayHandle, char * pchFilePath); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTexture)(VROverlayHandle_t ulOverlayHandle, void ** pNativeTextureHandle, void * pNativeTextureRef, uint32_t * pWidth, uint32_t * pHeight, uint32_t * pNativeFormat, EGraphicsAPIConvention * pAPI, EColorSpace * pColorSpace); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *ReleaseNativeOverlayHandle)(VROverlayHandle_t ulOverlayHandle, void * pNativeTextureHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetOverlayTextureSize)(VROverlayHandle_t ulOverlayHandle, uint32_t * pWidth, uint32_t * pHeight); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *CreateDashboardOverlay)(char * pchOverlayKey, char * pchOverlayFriendlyName, VROverlayHandle_t * pMainHandle, VROverlayHandle_t * pThumbnailHandle); + bool (OPENVR_FNTABLE_CALLTYPE *IsDashboardVisible)(); + bool (OPENVR_FNTABLE_CALLTYPE *IsActiveDashboardOverlay)(VROverlayHandle_t ulOverlayHandle); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *SetDashboardOverlaySceneProcess)(VROverlayHandle_t ulOverlayHandle, uint32_t unProcessId); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *GetDashboardOverlaySceneProcess)(VROverlayHandle_t ulOverlayHandle, uint32_t * punProcessId); + void (OPENVR_FNTABLE_CALLTYPE *ShowDashboard)(char * pchOverlayToShow); + TrackedDeviceIndex_t (OPENVR_FNTABLE_CALLTYPE *GetPrimaryDashboardDevice)(); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *ShowKeyboard)(EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, char * pchDescription, uint32_t unCharMax, char * pchExistingText, bool bUseMinimalMode, uint64_t uUserValue); + EVROverlayError (OPENVR_FNTABLE_CALLTYPE *ShowKeyboardForOverlay)(VROverlayHandle_t ulOverlayHandle, EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, char * pchDescription, uint32_t unCharMax, char * pchExistingText, bool bUseMinimalMode, uint64_t uUserValue); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetKeyboardText)(char * pchText, uint32_t cchText); + void (OPENVR_FNTABLE_CALLTYPE *HideKeyboard)(); + void (OPENVR_FNTABLE_CALLTYPE *SetKeyboardTransformAbsolute)(ETrackingUniverseOrigin eTrackingOrigin, struct HmdMatrix34_t * pmatTrackingOriginToKeyboardTransform); + void (OPENVR_FNTABLE_CALLTYPE *SetKeyboardPositionForOverlay)(VROverlayHandle_t ulOverlayHandle, struct HmdRect2_t avoidRect); +}; + +struct VR_IVRRenderModels_FnTable +{ + EVRRenderModelError (OPENVR_FNTABLE_CALLTYPE *LoadRenderModel_Async)(char * pchRenderModelName, struct RenderModel_t ** ppRenderModel); + void (OPENVR_FNTABLE_CALLTYPE *FreeRenderModel)(struct RenderModel_t * pRenderModel); + EVRRenderModelError (OPENVR_FNTABLE_CALLTYPE *LoadTexture_Async)(TextureID_t textureId, struct RenderModel_TextureMap_t ** ppTexture); + void (OPENVR_FNTABLE_CALLTYPE *FreeTexture)(struct RenderModel_TextureMap_t * pTexture); + EVRRenderModelError (OPENVR_FNTABLE_CALLTYPE *LoadTextureD3D11_Async)(TextureID_t textureId, void * pD3D11Device, void ** ppD3D11Texture2D); + EVRRenderModelError (OPENVR_FNTABLE_CALLTYPE *LoadIntoTextureD3D11_Async)(TextureID_t textureId, void * pDstTexture); + void (OPENVR_FNTABLE_CALLTYPE *FreeTextureD3D11)(void * pD3D11Texture2D); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetRenderModelName)(uint32_t unRenderModelIndex, char * pchRenderModelName, uint32_t unRenderModelNameLen); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetRenderModelCount)(); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetComponentCount)(char * pchRenderModelName); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetComponentName)(char * pchRenderModelName, uint32_t unComponentIndex, char * pchComponentName, uint32_t unComponentNameLen); + uint64_t (OPENVR_FNTABLE_CALLTYPE *GetComponentButtonMask)(char * pchRenderModelName, char * pchComponentName); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetComponentRenderModelName)(char * pchRenderModelName, char * pchComponentName, char * pchComponentRenderModelName, uint32_t unComponentRenderModelNameLen); + bool (OPENVR_FNTABLE_CALLTYPE *GetComponentState)(char * pchRenderModelName, char * pchComponentName, VRControllerState_t * pControllerState, struct RenderModel_ControllerMode_State_t * pState, struct RenderModel_ComponentState_t * pComponentState); + bool (OPENVR_FNTABLE_CALLTYPE *RenderModelHasComponent)(char * pchRenderModelName, char * pchComponentName); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetRenderModelThumbnailURL)(char * pchRenderModelName, char * pchThumbnailURL, uint32_t unThumbnailURLLen, EVRRenderModelError * peError); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetRenderModelOriginalPath)(char * pchRenderModelName, char * pchOriginalPath, uint32_t unOriginalPathLen, EVRRenderModelError * peError); + char * (OPENVR_FNTABLE_CALLTYPE *GetRenderModelErrorNameFromEnum)(EVRRenderModelError error); +}; + +struct VR_IVRNotifications_FnTable +{ + EVRNotificationError (OPENVR_FNTABLE_CALLTYPE *CreateNotification)(VROverlayHandle_t ulOverlayHandle, uint64_t ulUserValue, EVRNotificationType type, char * pchText, EVRNotificationStyle style, struct NotificationBitmap_t * pImage, VRNotificationId * pNotificationId); + EVRNotificationError (OPENVR_FNTABLE_CALLTYPE *RemoveNotification)(VRNotificationId notificationId); +}; + +struct VR_IVRSettings_FnTable +{ + char * (OPENVR_FNTABLE_CALLTYPE *GetSettingsErrorNameFromEnum)(EVRSettingsError eError); + bool (OPENVR_FNTABLE_CALLTYPE *Sync)(bool bForce, EVRSettingsError * peError); + bool (OPENVR_FNTABLE_CALLTYPE *GetBool)(char * pchSection, char * pchSettingsKey, bool bDefaultValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *SetBool)(char * pchSection, char * pchSettingsKey, bool bValue, EVRSettingsError * peError); + int32_t (OPENVR_FNTABLE_CALLTYPE *GetInt32)(char * pchSection, char * pchSettingsKey, int32_t nDefaultValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *SetInt32)(char * pchSection, char * pchSettingsKey, int32_t nValue, EVRSettingsError * peError); + float (OPENVR_FNTABLE_CALLTYPE *GetFloat)(char * pchSection, char * pchSettingsKey, float flDefaultValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *SetFloat)(char * pchSection, char * pchSettingsKey, float flValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *GetString)(char * pchSection, char * pchSettingsKey, char * pchValue, uint32_t unValueLen, char * pchDefaultValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *SetString)(char * pchSection, char * pchSettingsKey, char * pchValue, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *RemoveSection)(char * pchSection, EVRSettingsError * peError); + void (OPENVR_FNTABLE_CALLTYPE *RemoveKeyInSection)(char * pchSection, char * pchSettingsKey, EVRSettingsError * peError); +}; + +struct VR_IVRScreenshots_FnTable +{ + EVRScreenshotError (OPENVR_FNTABLE_CALLTYPE *RequestScreenshot)(ScreenshotHandle_t * pOutScreenshotHandle, EVRScreenshotType type, char * pchPreviewFilename, char * pchVRFilename); + EVRScreenshotError (OPENVR_FNTABLE_CALLTYPE *HookScreenshot)(EVRScreenshotType * pSupportedTypes, int numTypes); + EVRScreenshotType (OPENVR_FNTABLE_CALLTYPE *GetScreenshotPropertyType)(ScreenshotHandle_t screenshotHandle, EVRScreenshotError * pError); + uint32_t (OPENVR_FNTABLE_CALLTYPE *GetScreenshotPropertyFilename)(ScreenshotHandle_t screenshotHandle, EVRScreenshotPropertyFilenames filenameType, char * pchFilename, uint32_t cchFilename, EVRScreenshotError * pError); + EVRScreenshotError (OPENVR_FNTABLE_CALLTYPE *UpdateScreenshotProgress)(ScreenshotHandle_t screenshotHandle, float flProgress); + EVRScreenshotError (OPENVR_FNTABLE_CALLTYPE *TakeStereoScreenshot)(ScreenshotHandle_t * pOutScreenshotHandle, char * pchPreviewFilename, char * pchVRFilename); + EVRScreenshotError (OPENVR_FNTABLE_CALLTYPE *SubmitScreenshot)(ScreenshotHandle_t screenshotHandle, EVRScreenshotType type, char * pchSourcePreviewFilename, char * pchSourceVRFilename); +}; + + +#if 0 +// Global entry points +S_API intptr_t VR_InitInternal( EVRInitError *peError, EVRApplicationType eType ); +S_API void VR_ShutdownInternal(); +S_API bool VR_IsHmdPresent(); +S_API intptr_t VR_GetGenericInterface( const char *pchInterfaceVersion, EVRInitError *peError ); +S_API bool VR_IsRuntimeInstalled(); +S_API const char * VR_GetVRInitErrorAsSymbol( EVRInitError error ); +S_API const char * VR_GetVRInitErrorAsEnglishDescription( EVRInitError error ); +#endif + +#endif // __OPENVR_API_FLAT_H__ + + diff --git a/examples/ThirdPartyLibs/openvr/headers/openvr_driver.h b/examples/ThirdPartyLibs/openvr/headers/openvr_driver.h new file mode 100644 index 000000000..3f2a21df4 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/headers/openvr_driver.h @@ -0,0 +1,1829 @@ +#pragma once + +// openvr_driver.h +//========= Copyright Valve Corporation ============// +// Dynamically generated file. Do not modify this file directly. + +#ifndef _OPENVR_DRIVER_API +#define _OPENVR_DRIVER_API + +#include + + + +// vrtypes.h +#ifndef _INCLUDE_VRTYPES_H +#define _INCLUDE_VRTYPES_H + +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +typedef void* glSharedTextureHandle_t; +typedef int32_t glInt_t; +typedef uint32_t glUInt_t; + +// right-handed system +// +y is up +// +x is to the right +// -z is going away from you +// Distance unit is meters +struct HmdMatrix34_t +{ + float m[3][4]; +}; + +struct HmdMatrix44_t +{ + float m[4][4]; +}; + +struct HmdVector3_t +{ + float v[3]; +}; + +struct HmdVector4_t +{ + float v[4]; +}; + +struct HmdVector3d_t +{ + double v[3]; +}; + +struct HmdVector2_t +{ + float v[2]; +}; + +struct HmdQuaternion_t +{ + double w, x, y, z; +}; + +struct HmdColor_t +{ + float r, g, b, a; +}; + +struct HmdQuad_t +{ + HmdVector3_t vCorners[ 4 ]; +}; + +struct HmdRect2_t +{ + HmdVector2_t vTopLeft; + HmdVector2_t vBottomRight; +}; + +/** Used to return the post-distortion UVs for each color channel. +* UVs range from 0 to 1 with 0,0 in the upper left corner of the +* source render target. The 0,0 to 1,1 range covers a single eye. */ +struct DistortionCoordinates_t +{ + float rfRed[2]; + float rfGreen[2]; + float rfBlue[2]; +}; + +enum EVREye +{ + Eye_Left = 0, + Eye_Right = 1 +}; + +enum EGraphicsAPIConvention +{ + API_DirectX = 0, // Normalized Z goes from 0 at the viewer to 1 at the far clip plane + API_OpenGL = 1, // Normalized Z goes from 1 at the viewer to -1 at the far clip plane +}; + +enum EColorSpace +{ + ColorSpace_Auto = 0, // Assumes 'gamma' for 8-bit per component formats, otherwise 'linear'. This mirrors the DXGI formats which have _SRGB variants. + ColorSpace_Gamma = 1, // Texture data can be displayed directly on the display without any conversion (a.k.a. display native format). + ColorSpace_Linear = 2, // Same as gamma but has been converted to a linear representation using DXGI's sRGB conversion algorithm. +}; + +struct Texture_t +{ + void* handle; // Native d3d texture pointer or GL texture id. + EGraphicsAPIConvention eType; + EColorSpace eColorSpace; +}; + +enum ETrackingResult +{ + TrackingResult_Uninitialized = 1, + + TrackingResult_Calibrating_InProgress = 100, + TrackingResult_Calibrating_OutOfRange = 101, + + TrackingResult_Running_OK = 200, + TrackingResult_Running_OutOfRange = 201, +}; + +static const uint32_t k_unTrackingStringSize = 32; +static const uint32_t k_unMaxDriverDebugResponseSize = 32768; + +/** Used to pass device IDs to API calls */ +typedef uint32_t TrackedDeviceIndex_t; +static const uint32_t k_unTrackedDeviceIndex_Hmd = 0; +static const uint32_t k_unMaxTrackedDeviceCount = 16; +static const uint32_t k_unTrackedDeviceIndexOther = 0xFFFFFFFE; +static const uint32_t k_unTrackedDeviceIndexInvalid = 0xFFFFFFFF; + +/** Describes what kind of object is being tracked at a given ID */ +enum ETrackedDeviceClass +{ + TrackedDeviceClass_Invalid = 0, // the ID was not valid. + TrackedDeviceClass_HMD = 1, // Head-Mounted Displays + TrackedDeviceClass_Controller = 2, // Tracked controllers + TrackedDeviceClass_TrackingReference = 4, // Camera and base stations that serve as tracking reference points + + TrackedDeviceClass_Other = 1000, +}; + + +/** Describes what specific role associated with a tracked device */ +enum ETrackedControllerRole +{ + TrackedControllerRole_Invalid = 0, // Invalid value for controller type + TrackedControllerRole_LeftHand = 1, // Tracked device associated with the left hand + TrackedControllerRole_RightHand = 2, // Tracked device associated with the right hand +}; + + +/** describes a single pose for a tracked object */ +struct TrackedDevicePose_t +{ + HmdMatrix34_t mDeviceToAbsoluteTracking; + HmdVector3_t vVelocity; // velocity in tracker space in m/s + HmdVector3_t vAngularVelocity; // angular velocity in radians/s (?) + ETrackingResult eTrackingResult; + bool bPoseIsValid; + + // This indicates that there is a device connected for this spot in the pose array. + // It could go from true to false if the user unplugs the device. + bool bDeviceIsConnected; +}; + +/** Identifies which style of tracking origin the application wants to use +* for the poses it is requesting */ +enum ETrackingUniverseOrigin +{ + TrackingUniverseSeated = 0, // Poses are provided relative to the seated zero pose + TrackingUniverseStanding = 1, // Poses are provided relative to the safe bounds configured by the user + TrackingUniverseRawAndUncalibrated = 2, // Poses are provided in the coordinate system defined by the driver. You probably don't want this one. +}; + + +/** Each entry in this enum represents a property that can be retrieved about a +* tracked device. Many fields are only valid for one ETrackedDeviceClass. */ +enum ETrackedDeviceProperty +{ + // general properties that apply to all device classes + Prop_TrackingSystemName_String = 1000, + Prop_ModelNumber_String = 1001, + Prop_SerialNumber_String = 1002, + Prop_RenderModelName_String = 1003, + Prop_WillDriftInYaw_Bool = 1004, + Prop_ManufacturerName_String = 1005, + Prop_TrackingFirmwareVersion_String = 1006, + Prop_HardwareRevision_String = 1007, + Prop_AllWirelessDongleDescriptions_String = 1008, + Prop_ConnectedWirelessDongle_String = 1009, + Prop_DeviceIsWireless_Bool = 1010, + Prop_DeviceIsCharging_Bool = 1011, + Prop_DeviceBatteryPercentage_Float = 1012, // 0 is empty, 1 is full + Prop_StatusDisplayTransform_Matrix34 = 1013, + Prop_Firmware_UpdateAvailable_Bool = 1014, + Prop_Firmware_ManualUpdate_Bool = 1015, + Prop_Firmware_ManualUpdateURL_String = 1016, + Prop_HardwareRevision_Uint64 = 1017, + Prop_FirmwareVersion_Uint64 = 1018, + Prop_FPGAVersion_Uint64 = 1019, + Prop_VRCVersion_Uint64 = 1020, + Prop_RadioVersion_Uint64 = 1021, + Prop_DongleVersion_Uint64 = 1022, + Prop_BlockServerShutdown_Bool = 1023, + Prop_CanUnifyCoordinateSystemWithHmd_Bool = 1024, + Prop_ContainsProximitySensor_Bool = 1025, + Prop_DeviceProvidesBatteryStatus_Bool = 1026, + Prop_DeviceCanPowerOff_Bool = 1027, + Prop_Firmware_ProgrammingTarget_String = 1028, + Prop_DeviceClass_Int32 = 1029, + Prop_HasCamera_Bool = 1030, + Prop_DriverVersion_String = 1031, + Prop_Firmware_ForceUpdateRequired_Bool = 1032, + + // Properties that are unique to TrackedDeviceClass_HMD + Prop_ReportsTimeSinceVSync_Bool = 2000, + Prop_SecondsFromVsyncToPhotons_Float = 2001, + Prop_DisplayFrequency_Float = 2002, + Prop_UserIpdMeters_Float = 2003, + Prop_CurrentUniverseId_Uint64 = 2004, + Prop_PreviousUniverseId_Uint64 = 2005, + Prop_DisplayFirmwareVersion_Uint64 = 2006, + Prop_IsOnDesktop_Bool = 2007, + Prop_DisplayMCType_Int32 = 2008, + Prop_DisplayMCOffset_Float = 2009, + Prop_DisplayMCScale_Float = 2010, + Prop_EdidVendorID_Int32 = 2011, + Prop_DisplayMCImageLeft_String = 2012, + Prop_DisplayMCImageRight_String = 2013, + Prop_DisplayGCBlackClamp_Float = 2014, + Prop_EdidProductID_Int32 = 2015, + Prop_CameraToHeadTransform_Matrix34 = 2016, + Prop_DisplayGCType_Int32 = 2017, + Prop_DisplayGCOffset_Float = 2018, + Prop_DisplayGCScale_Float = 2019, + Prop_DisplayGCPrescale_Float = 2020, + Prop_DisplayGCImage_String = 2021, + Prop_LensCenterLeftU_Float = 2022, + Prop_LensCenterLeftV_Float = 2023, + Prop_LensCenterRightU_Float = 2024, + Prop_LensCenterRightV_Float = 2025, + Prop_UserHeadToEyeDepthMeters_Float = 2026, + Prop_CameraFirmwareVersion_Uint64 = 2027, + Prop_CameraFirmwareDescription_String = 2028, + Prop_DisplayFPGAVersion_Uint64 = 2029, + Prop_DisplayBootloaderVersion_Uint64 = 2030, + Prop_DisplayHardwareVersion_Uint64 = 2031, + Prop_AudioFirmwareVersion_Uint64 = 2032, + Prop_CameraCompatibilityMode_Int32 = 2033, + Prop_ScreenshotHorizontalFieldOfViewDegrees_Float = 2034, + Prop_ScreenshotVerticalFieldOfViewDegrees_Float = 2035, + Prop_DisplaySuppressed_Bool = 2036, + + // Properties that are unique to TrackedDeviceClass_Controller + Prop_AttachedDeviceId_String = 3000, + Prop_SupportedButtons_Uint64 = 3001, + Prop_Axis0Type_Int32 = 3002, // Return value is of type EVRControllerAxisType + Prop_Axis1Type_Int32 = 3003, // Return value is of type EVRControllerAxisType + Prop_Axis2Type_Int32 = 3004, // Return value is of type EVRControllerAxisType + Prop_Axis3Type_Int32 = 3005, // Return value is of type EVRControllerAxisType + Prop_Axis4Type_Int32 = 3006, // Return value is of type EVRControllerAxisType + + // Properties that are unique to TrackedDeviceClass_TrackingReference + Prop_FieldOfViewLeftDegrees_Float = 4000, + Prop_FieldOfViewRightDegrees_Float = 4001, + Prop_FieldOfViewTopDegrees_Float = 4002, + Prop_FieldOfViewBottomDegrees_Float = 4003, + Prop_TrackingRangeMinimumMeters_Float = 4004, + Prop_TrackingRangeMaximumMeters_Float = 4005, + Prop_ModeLabel_String = 4006, + + // Vendors are free to expose private debug data in this reserved region + Prop_VendorSpecific_Reserved_Start = 10000, + Prop_VendorSpecific_Reserved_End = 10999, +}; + +/** No string property will ever be longer than this length */ +static const uint32_t k_unMaxPropertyStringSize = 32 * 1024; + +/** Used to return errors that occur when reading properties. */ +enum ETrackedPropertyError +{ + TrackedProp_Success = 0, + TrackedProp_WrongDataType = 1, + TrackedProp_WrongDeviceClass = 2, + TrackedProp_BufferTooSmall = 3, + TrackedProp_UnknownProperty = 4, + TrackedProp_InvalidDevice = 5, + TrackedProp_CouldNotContactServer = 6, + TrackedProp_ValueNotProvidedByDevice = 7, + TrackedProp_StringExceedsMaximumLength = 8, + TrackedProp_NotYetAvailable = 9, // The property value isn't known yet, but is expected soon. Call again later. +}; + +/** Allows the application to control what part of the provided texture will be used in the +* frame buffer. */ +struct VRTextureBounds_t +{ + float uMin, vMin; + float uMax, vMax; +}; + + +/** Allows the application to control how scene textures are used by the compositor when calling Submit. */ +enum EVRSubmitFlags +{ + // Simple render path. App submits rendered left and right eye images with no lens distortion correction applied. + Submit_Default = 0x00, + + // App submits final left and right eye images with lens distortion already applied (lens distortion makes the images appear + // barrel distorted with chromatic aberration correction applied). The app would have used the data returned by + // vr::IVRSystem::ComputeDistortion() to apply the correct distortion to the rendered images before calling Submit(). + Submit_LensDistortionAlreadyApplied = 0x01, + + // If the texture pointer passed in is actually a renderbuffer (e.g. for MSAA in OpenGL) then set this flag. + Submit_GlRenderBuffer = 0x02, +}; + + +/** Status of the overall system or tracked objects */ +enum EVRState +{ + VRState_Undefined = -1, + VRState_Off = 0, + VRState_Searching = 1, + VRState_Searching_Alert = 2, + VRState_Ready = 3, + VRState_Ready_Alert = 4, + VRState_NotReady = 5, + VRState_Standby = 6, +}; + +/** The types of events that could be posted (and what the parameters mean for each event type) */ +enum EVREventType +{ + VREvent_None = 0, + + VREvent_TrackedDeviceActivated = 100, + VREvent_TrackedDeviceDeactivated = 101, + VREvent_TrackedDeviceUpdated = 102, + VREvent_TrackedDeviceUserInteractionStarted = 103, + VREvent_TrackedDeviceUserInteractionEnded = 104, + VREvent_IpdChanged = 105, + VREvent_EnterStandbyMode = 106, + VREvent_LeaveStandbyMode = 107, + VREvent_TrackedDeviceRoleChanged = 108, + + VREvent_ButtonPress = 200, // data is controller + VREvent_ButtonUnpress = 201, // data is controller + VREvent_ButtonTouch = 202, // data is controller + VREvent_ButtonUntouch = 203, // data is controller + + VREvent_MouseMove = 300, // data is mouse + VREvent_MouseButtonDown = 301, // data is mouse + VREvent_MouseButtonUp = 302, // data is mouse + VREvent_FocusEnter = 303, // data is overlay + VREvent_FocusLeave = 304, // data is overlay + VREvent_Scroll = 305, // data is mouse + VREvent_TouchPadMove = 306, // data is mouse + + VREvent_InputFocusCaptured = 400, // data is process DEPRECATED + VREvent_InputFocusReleased = 401, // data is process DEPRECATED + VREvent_SceneFocusLost = 402, // data is process + VREvent_SceneFocusGained = 403, // data is process + VREvent_SceneApplicationChanged = 404, // data is process - The App actually drawing the scene changed (usually to or from the compositor) + VREvent_SceneFocusChanged = 405, // data is process - New app got access to draw the scene + VREvent_InputFocusChanged = 406, // data is process + VREvent_SceneApplicationSecondaryRenderingStarted = 407, // data is process + + VREvent_HideRenderModels = 410, // Sent to the scene application to request hiding render models temporarily + VREvent_ShowRenderModels = 411, // Sent to the scene application to request restoring render model visibility + + VREvent_OverlayShown = 500, + VREvent_OverlayHidden = 501, + VREvent_DashboardActivated = 502, + VREvent_DashboardDeactivated = 503, + VREvent_DashboardThumbSelected = 504, // Sent to the overlay manager - data is overlay + VREvent_DashboardRequested = 505, // Sent to the overlay manager - data is overlay + VREvent_ResetDashboard = 506, // Send to the overlay manager + VREvent_RenderToast = 507, // Send to the dashboard to render a toast - data is the notification ID + VREvent_ImageLoaded = 508, // Sent to overlays when a SetOverlayRaw or SetOverlayFromFile call finishes loading + VREvent_ShowKeyboard = 509, // Sent to keyboard renderer in the dashboard to invoke it + VREvent_HideKeyboard = 510, // Sent to keyboard renderer in the dashboard to hide it + VREvent_OverlayGamepadFocusGained = 511, // Sent to an overlay when IVROverlay::SetFocusOverlay is called on it + VREvent_OverlayGamepadFocusLost = 512, // Send to an overlay when it previously had focus and IVROverlay::SetFocusOverlay is called on something else + VREvent_OverlaySharedTextureChanged = 513, + VREvent_DashboardGuideButtonDown = 514, + VREvent_DashboardGuideButtonUp = 515, + VREvent_ScreenshotTriggered = 516, // Screenshot button combo was pressed, Dashboard should request a screenshot + VREvent_ImageFailed = 517, // Sent to overlays when a SetOverlayRaw or SetOverlayfromFail fails to load + + // Screenshot API + VREvent_RequestScreenshot = 520, // Sent by vrclient application to compositor to take a screenshot + VREvent_ScreenshotTaken = 521, // Sent by compositor to the application that the screenshot has been taken + VREvent_ScreenshotFailed = 522, // Sent by compositor to the application that the screenshot failed to be taken + VREvent_SubmitScreenshotToDashboard = 523, // Sent by compositor to the dashboard that a completed screenshot was submitted + + VREvent_Notification_Shown = 600, + VREvent_Notification_Hidden = 601, + VREvent_Notification_BeginInteraction = 602, + VREvent_Notification_Destroyed = 603, + + VREvent_Quit = 700, // data is process + VREvent_ProcessQuit = 701, // data is process + VREvent_QuitAborted_UserPrompt = 702, // data is process + VREvent_QuitAcknowledged = 703, // data is process + VREvent_DriverRequestedQuit = 704, // The driver has requested that SteamVR shut down + + VREvent_ChaperoneDataHasChanged = 800, + VREvent_ChaperoneUniverseHasChanged = 801, + VREvent_ChaperoneTempDataHasChanged = 802, + VREvent_ChaperoneSettingsHaveChanged = 803, + VREvent_SeatedZeroPoseReset = 804, + + VREvent_AudioSettingsHaveChanged = 820, + + VREvent_BackgroundSettingHasChanged = 850, + VREvent_CameraSettingsHaveChanged = 851, + VREvent_ReprojectionSettingHasChanged = 852, + VREvent_ModelSkinSettingsHaveChanged = 853, + VREvent_EnvironmentSettingsHaveChanged = 854, + + VREvent_StatusUpdate = 900, + + VREvent_MCImageUpdated = 1000, + + VREvent_FirmwareUpdateStarted = 1100, + VREvent_FirmwareUpdateFinished = 1101, + + VREvent_KeyboardClosed = 1200, + VREvent_KeyboardCharInput = 1201, + VREvent_KeyboardDone = 1202, // Sent when DONE button clicked on keyboard + + VREvent_ApplicationTransitionStarted = 1300, + VREvent_ApplicationTransitionAborted = 1301, + VREvent_ApplicationTransitionNewAppStarted = 1302, + VREvent_ApplicationListUpdated = 1303, + + VREvent_Compositor_MirrorWindowShown = 1400, + VREvent_Compositor_MirrorWindowHidden = 1401, + VREvent_Compositor_ChaperoneBoundsShown = 1410, + VREvent_Compositor_ChaperoneBoundsHidden = 1411, + + VREvent_TrackedCamera_StartVideoStream = 1500, + VREvent_TrackedCamera_StopVideoStream = 1501, + VREvent_TrackedCamera_PauseVideoStream = 1502, + VREvent_TrackedCamera_ResumeVideoStream = 1503, + + VREvent_PerformanceTest_EnableCapture = 1600, + VREvent_PerformanceTest_DisableCapture = 1601, + VREvent_PerformanceTest_FidelityLevel = 1602, + + // Vendors are free to expose private events in this reserved region + VREvent_VendorSpecific_Reserved_Start = 10000, + VREvent_VendorSpecific_Reserved_End = 19999, +}; + + +/** Level of Hmd activity */ +enum EDeviceActivityLevel +{ + k_EDeviceActivityLevel_Unknown = -1, + k_EDeviceActivityLevel_Idle = 0, + k_EDeviceActivityLevel_UserInteraction = 1, + k_EDeviceActivityLevel_UserInteraction_Timeout = 2, + k_EDeviceActivityLevel_Standby = 3, +}; + + +/** VR controller button and axis IDs */ +enum EVRButtonId +{ + k_EButton_System = 0, + k_EButton_ApplicationMenu = 1, + k_EButton_Grip = 2, + k_EButton_DPad_Left = 3, + k_EButton_DPad_Up = 4, + k_EButton_DPad_Right = 5, + k_EButton_DPad_Down = 6, + k_EButton_A = 7, + + k_EButton_Axis0 = 32, + k_EButton_Axis1 = 33, + k_EButton_Axis2 = 34, + k_EButton_Axis3 = 35, + k_EButton_Axis4 = 36, + + // aliases for well known controllers + k_EButton_SteamVR_Touchpad = k_EButton_Axis0, + k_EButton_SteamVR_Trigger = k_EButton_Axis1, + + k_EButton_Dashboard_Back = k_EButton_Grip, + + k_EButton_Max = 64 +}; + +inline uint64_t ButtonMaskFromId( EVRButtonId id ) { return 1ull << id; } + +/** used for controller button events */ +struct VREvent_Controller_t +{ + uint32_t button; // EVRButtonId enum +}; + + +/** used for simulated mouse events in overlay space */ +enum EVRMouseButton +{ + VRMouseButton_Left = 0x0001, + VRMouseButton_Right = 0x0002, + VRMouseButton_Middle = 0x0004, +}; + + +/** used for simulated mouse events in overlay space */ +struct VREvent_Mouse_t +{ + float x, y; // co-ords are in GL space, bottom left of the texture is 0,0 + uint32_t button; // EVRMouseButton enum +}; + +/** used for simulated mouse wheel scroll in overlay space */ +struct VREvent_Scroll_t +{ + float xdelta, ydelta; // movement in fraction of the pad traversed since last delta, 1.0 for a full swipe + uint32_t repeatCount; +}; + +/** when in mouse input mode you can receive data from the touchpad, these events are only sent if the users finger + is on the touchpad (or just released from it) +**/ +struct VREvent_TouchPadMove_t +{ + // true if the users finger is detected on the touch pad + bool bFingerDown; + + // How long the finger has been down in seconds + float flSecondsFingerDown; + + // These values indicate the starting finger position (so you can do some basic swipe stuff) + float fValueXFirst; + float fValueYFirst; + + // This is the raw sampled coordinate without deadzoning + float fValueXRaw; + float fValueYRaw; +}; + +/** notification related events. Details will still change at this point */ +struct VREvent_Notification_t +{ + uint64_t ulUserValue; + uint32_t notificationId; +}; + +/** Used for events about processes */ +struct VREvent_Process_t +{ + uint32_t pid; + uint32_t oldPid; + bool bForced; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Overlay_t +{ + uint64_t overlayHandle; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Status_t +{ + uint32_t statusState; // EVRState enum +}; + +/** Used for keyboard events **/ +struct VREvent_Keyboard_t +{ + char cNewInput[8]; // Up to 11 bytes of new input + uint64_t uUserValue; // Possible flags about the new input +}; + +struct VREvent_Ipd_t +{ + float ipdMeters; +}; + +struct VREvent_Chaperone_t +{ + uint64_t m_nPreviousUniverse; + uint64_t m_nCurrentUniverse; +}; + +/** Not actually used for any events */ +struct VREvent_Reserved_t +{ + uint64_t reserved0; + uint64_t reserved1; +}; + +struct VREvent_PerformanceTest_t +{ + uint32_t m_nFidelityLevel; +}; + +struct VREvent_SeatedZeroPoseReset_t +{ + bool bResetBySystemMenu; +}; + +struct VREvent_Screenshot_t +{ + uint32_t handle; + uint32_t type; +}; + +/** If you change this you must manually update openvr_interop.cs.py */ +typedef union +{ + VREvent_Reserved_t reserved; + VREvent_Controller_t controller; + VREvent_Mouse_t mouse; + VREvent_Scroll_t scroll; + VREvent_Process_t process; + VREvent_Notification_t notification; + VREvent_Overlay_t overlay; + VREvent_Status_t status; + VREvent_Keyboard_t keyboard; + VREvent_Ipd_t ipd; + VREvent_Chaperone_t chaperone; + VREvent_PerformanceTest_t performanceTest; + VREvent_TouchPadMove_t touchPadMove; + VREvent_SeatedZeroPoseReset_t seatedZeroPoseReset; + VREvent_Screenshot_t screenshot; +} VREvent_Data_t; + +/** An event posted by the server to all running applications */ +struct VREvent_t +{ + uint32_t eventType; // EVREventType enum + TrackedDeviceIndex_t trackedDeviceIndex; + float eventAgeSeconds; + // event data must be the end of the struct as its size is variable + VREvent_Data_t data; +}; + + +/** The mesh to draw into the stencil (or depth) buffer to perform +* early stencil (or depth) kills of pixels that will never appear on the HMD. +* This mesh draws on all the pixels that will be hidden after distortion. +* +* If the HMD does not provide a visible area mesh pVertexData will be +* NULL and unTriangleCount will be 0. */ +struct HiddenAreaMesh_t +{ + const HmdVector2_t *pVertexData; + uint32_t unTriangleCount; +}; + + +/** Identifies what kind of axis is on the controller at index n. Read this type +* with pVRSystem->Get( nControllerDeviceIndex, Prop_Axis0Type_Int32 + n ); +*/ +enum EVRControllerAxisType +{ + k_eControllerAxis_None = 0, + k_eControllerAxis_TrackPad = 1, + k_eControllerAxis_Joystick = 2, + k_eControllerAxis_Trigger = 3, // Analog trigger data is in the X axis +}; + + +/** contains information about one axis on the controller */ +struct VRControllerAxis_t +{ + float x; // Ranges from -1.0 to 1.0 for joysticks and track pads. Ranges from 0.0 to 1.0 for triggers were 0 is fully released. + float y; // Ranges from -1.0 to 1.0 for joysticks and track pads. Is always 0.0 for triggers. +}; + + +/** the number of axes in the controller state */ +static const uint32_t k_unControllerStateAxisCount = 5; + + +/** Holds all the state of a controller at one moment in time. */ +struct VRControllerState001_t +{ + // If packet num matches that on your prior call, then the controller state hasn't been changed since + // your last call and there is no need to process it + uint32_t unPacketNum; + + // bit flags for each of the buttons. Use ButtonMaskFromId to turn an ID into a mask + uint64_t ulButtonPressed; + uint64_t ulButtonTouched; + + // Axis data for the controller's analog inputs + VRControllerAxis_t rAxis[ k_unControllerStateAxisCount ]; +}; + + +typedef VRControllerState001_t VRControllerState_t; + + +/** determines how to provide output to the application of various event processing functions. */ +enum EVRControllerEventOutputType +{ + ControllerEventOutput_OSEvents = 0, + ControllerEventOutput_VREvents = 1, +}; + + + +/** Collision Bounds Style */ +enum ECollisionBoundsStyle +{ + COLLISION_BOUNDS_STYLE_BEGINNER = 0, + COLLISION_BOUNDS_STYLE_INTERMEDIATE, + COLLISION_BOUNDS_STYLE_SQUARES, + COLLISION_BOUNDS_STYLE_ADVANCED, + COLLISION_BOUNDS_STYLE_NONE, + + COLLISION_BOUNDS_STYLE_COUNT +}; + +/** Allows the application to customize how the overlay appears in the compositor */ +struct Compositor_OverlaySettings +{ + uint32_t size; // sizeof(Compositor_OverlaySettings) + bool curved, antialias; + float scale, distance, alpha; + float uOffset, vOffset, uScale, vScale; + float gridDivs, gridWidth, gridScale; + HmdMatrix44_t transform; +}; + +/** used to refer to a single VR overlay */ +typedef uint64_t VROverlayHandle_t; + +static const VROverlayHandle_t k_ulOverlayHandleInvalid = 0; + +/** Errors that can occur around VR overlays */ +enum EVROverlayError +{ + VROverlayError_None = 0, + + VROverlayError_UnknownOverlay = 10, + VROverlayError_InvalidHandle = 11, + VROverlayError_PermissionDenied = 12, + VROverlayError_OverlayLimitExceeded = 13, // No more overlays could be created because the maximum number already exist + VROverlayError_WrongVisibilityType = 14, + VROverlayError_KeyTooLong = 15, + VROverlayError_NameTooLong = 16, + VROverlayError_KeyInUse = 17, + VROverlayError_WrongTransformType = 18, + VROverlayError_InvalidTrackedDevice = 19, + VROverlayError_InvalidParameter = 20, + VROverlayError_ThumbnailCantBeDestroyed = 21, + VROverlayError_ArrayTooSmall = 22, + VROverlayError_RequestFailed = 23, + VROverlayError_InvalidTexture = 24, + VROverlayError_UnableToLoadFile = 25, + VROVerlayError_KeyboardAlreadyInUse = 26, + VROverlayError_NoNeighbor = 27, +}; + +/** enum values to pass in to VR_Init to identify whether the application will +* draw a 3D scene. */ +enum EVRApplicationType +{ + VRApplication_Other = 0, // Some other kind of application that isn't covered by the other entries + VRApplication_Scene = 1, // Application will submit 3D frames + VRApplication_Overlay = 2, // Application only interacts with overlays + VRApplication_Background = 3, // Application should not start SteamVR if it's not already running, and should not + // keep it running if everything else quits. + VRApplication_Utility = 4, // Init should not try to load any drivers. The application needs access to utility + // interfaces (like IVRSettings and IVRApplications) but not hardware. + VRApplication_VRMonitor = 5, // Reserved for vrmonitor +}; + + +/** error codes for firmware */ +enum EVRFirmwareError +{ + VRFirmwareError_None = 0, + VRFirmwareError_Success = 1, + VRFirmwareError_Fail = 2, +}; + + +/** error codes for notifications */ +enum EVRNotificationError +{ + VRNotificationError_OK = 0, + VRNotificationError_InvalidNotificationId = 100, + VRNotificationError_NotificationQueueFull = 101, + VRNotificationError_InvalidOverlayHandle = 102, + VRNotificationError_SystemWithUserValueAlreadyExists = 103, +}; + + +/** error codes returned by Vr_Init */ + +// Please add adequate error description to https://developer.valvesoftware.com/w/index.php?title=Category:SteamVRHelp +enum EVRInitError +{ + VRInitError_None = 0, + VRInitError_Unknown = 1, + + VRInitError_Init_InstallationNotFound = 100, + VRInitError_Init_InstallationCorrupt = 101, + VRInitError_Init_VRClientDLLNotFound = 102, + VRInitError_Init_FileNotFound = 103, + VRInitError_Init_FactoryNotFound = 104, + VRInitError_Init_InterfaceNotFound = 105, + VRInitError_Init_InvalidInterface = 106, + VRInitError_Init_UserConfigDirectoryInvalid = 107, + VRInitError_Init_HmdNotFound = 108, + VRInitError_Init_NotInitialized = 109, + VRInitError_Init_PathRegistryNotFound = 110, + VRInitError_Init_NoConfigPath = 111, + VRInitError_Init_NoLogPath = 112, + VRInitError_Init_PathRegistryNotWritable = 113, + VRInitError_Init_AppInfoInitFailed = 114, + VRInitError_Init_Retry = 115, // Used internally to cause retries to vrserver + VRInitError_Init_InitCanceledByUser = 116, // The calling application should silently exit. The user canceled app startup + VRInitError_Init_AnotherAppLaunching = 117, + VRInitError_Init_SettingsInitFailed = 118, + VRInitError_Init_ShuttingDown = 119, + VRInitError_Init_TooManyObjects = 120, + VRInitError_Init_NoServerForBackgroundApp = 121, + VRInitError_Init_NotSupportedWithCompositor = 122, + VRInitError_Init_NotAvailableToUtilityApps = 123, + VRInitError_Init_Internal = 124, + + VRInitError_Driver_Failed = 200, + VRInitError_Driver_Unknown = 201, + VRInitError_Driver_HmdUnknown = 202, + VRInitError_Driver_NotLoaded = 203, + VRInitError_Driver_RuntimeOutOfDate = 204, + VRInitError_Driver_HmdInUse = 205, + VRInitError_Driver_NotCalibrated = 206, + VRInitError_Driver_CalibrationInvalid = 207, + VRInitError_Driver_HmdDisplayNotFound = 208, + + VRInitError_IPC_ServerInitFailed = 300, + VRInitError_IPC_ConnectFailed = 301, + VRInitError_IPC_SharedStateInitFailed = 302, + VRInitError_IPC_CompositorInitFailed = 303, + VRInitError_IPC_MutexInitFailed = 304, + VRInitError_IPC_Failed = 305, + + VRInitError_Compositor_Failed = 400, + VRInitError_Compositor_D3D11HardwareRequired = 401, + VRInitError_Compositor_FirmwareRequiresUpdate = 402, + VRInitError_Compositor_OverlayInitFailed = 403, + VRInitError_Compositor_ScreenshotsInitFailed = 404, + + VRInitError_VendorSpecific_UnableToConnectToOculusRuntime = 1000, + + VRInitError_VendorSpecific_HmdFound_CantOpenDevice = 1101, + VRInitError_VendorSpecific_HmdFound_UnableToRequestConfigStart = 1102, + VRInitError_VendorSpecific_HmdFound_NoStoredConfig = 1103, + VRInitError_VendorSpecific_HmdFound_ConfigTooBig = 1104, + VRInitError_VendorSpecific_HmdFound_ConfigTooSmall = 1105, + VRInitError_VendorSpecific_HmdFound_UnableToInitZLib = 1106, + VRInitError_VendorSpecific_HmdFound_CantReadFirmwareVersion = 1107, + VRInitError_VendorSpecific_HmdFound_UnableToSendUserDataStart = 1108, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataStart = 1109, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataNext = 1110, + VRInitError_VendorSpecific_HmdFound_UserDataAddressRange = 1111, + VRInitError_VendorSpecific_HmdFound_UserDataError = 1112, + VRInitError_VendorSpecific_HmdFound_ConfigFailedSanityCheck = 1113, + + VRInitError_Steam_SteamInstallationNotFound = 2000, +}; + +enum EVRScreenshotType +{ + VRScreenshotType_None = 0, + VRScreenshotType_Mono = 1, // left eye only + VRScreenshotType_Stereo = 2, + VRScreenshotType_Cubemap = 3, + VRScreenshotType_MonoPanorama = 4, + VRScreenshotType_StereoPanorama = 5 +}; + +enum EVRScreenshotPropertyFilenames +{ + VRScreenshotPropertyFilenames_Preview = 0, + VRScreenshotPropertyFilenames_VR = 1, +}; + +enum EVRTrackedCameraError +{ + VRTrackedCameraError_None = 0, + VRTrackedCameraError_OperationFailed = 100, + VRTrackedCameraError_InvalidHandle = 101, + VRTrackedCameraError_InvalidFrameHeaderVersion = 102, + VRTrackedCameraError_OutOfHandles = 103, + VRTrackedCameraError_IPCFailure = 104, + VRTrackedCameraError_NotSupportedForThisDevice = 105, + VRTrackedCameraError_SharedMemoryFailure = 106, + VRTrackedCameraError_FrameBufferingFailure = 107, + VRTrackedCameraError_StreamSetupFailure = 108, + VRTrackedCameraError_InvalidGLTextureId = 109, + VRTrackedCameraError_InvalidSharedTextureHandle = 110, + VRTrackedCameraError_FailedToGetGLTextureId = 111, + VRTrackedCameraError_SharedTextureFailure = 112, + VRTrackedCameraError_NoFrameAvailable = 113, + VRTrackedCameraError_InvalidArgument = 114, + VRTrackedCameraError_InvalidFrameBufferSize = 115, +}; + +enum EVRTrackedCameraFrameType +{ + VRTrackedCameraFrameType_Distorted = 0, // This is the camera video frame size in pixels, still distorted. + VRTrackedCameraFrameType_Undistorted, // In pixels, an undistorted inscribed rectangle region without invalid regions. This size is subject to changes shortly. + VRTrackedCameraFrameType_MaximumUndistorted, // In pixels, maximum undistorted with invalid regions. Non zero alpha component identifies valid regions. + MAX_CAMERA_FRAME_TYPES +}; + +typedef uint64_t TrackedCameraHandle_t; +#define INVALID_TRACKED_CAMERA_HANDLE ((vr::TrackedCameraHandle_t)0) + +struct CameraVideoStreamFrameHeader_t +{ + EVRTrackedCameraFrameType eFrameType; + + uint32_t nWidth; + uint32_t nHeight; + uint32_t nBytesPerPixel; + + uint32_t nFrameSequence; + + TrackedDevicePose_t standingTrackedDevicePose; +}; + +// Screenshot types +typedef uint32_t ScreenshotHandle_t; + +static const uint32_t k_unScreenshotHandleInvalid = 0; + +#pragma pack( pop ) + +// figure out how to import from the VR API dll +#if defined(_WIN32) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __declspec( dllexport ) +#else +#define VR_INTERFACE extern "C" __declspec( dllimport ) +#endif + +#elif defined(GNUC) || defined(COMPILER_GCC) || defined(__APPLE__) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __attribute__((visibility("default"))) +#else +#define VR_INTERFACE extern "C" +#endif + +#else +#error "Unsupported Platform." +#endif + + +#if defined( _WIN32 ) +#define VR_CALLTYPE __cdecl +#else +#define VR_CALLTYPE +#endif + +} // namespace vr + +#endif // _INCLUDE_VRTYPES_H + + +// vrtrackedcameratypes.h +#ifndef _VRTRACKEDCAMERATYPES_H +#define _VRTRACKEDCAMERATYPES_H + +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +enum ECameraVideoStreamFormat +{ + CVS_FORMAT_UNKNOWN = 0, + CVS_FORMAT_RAW10 = 1, // 10 bits per pixel + CVS_FORMAT_NV12 = 2, // 12 bits per pixel + CVS_FORMAT_RGB24 = 3, // 24 bits per pixel + CVS_MAX_FORMATS +}; + +enum ECameraCompatibilityMode +{ + CAMERA_COMPAT_MODE_BULK_DEFAULT = 0, + CAMERA_COMPAT_MODE_BULK_64K_DMA, + CAMERA_COMPAT_MODE_BULK_16K_DMA, + CAMERA_COMPAT_MODE_BULK_8K_DMA, + CAMERA_COMPAT_MODE_ISO_52FPS, + CAMERA_COMPAT_MODE_ISO_50FPS, + CAMERA_COMPAT_MODE_ISO_48FPS, + CAMERA_COMPAT_MODE_ISO_46FPS, + CAMERA_COMPAT_MODE_ISO_44FPS, + CAMERA_COMPAT_MODE_ISO_42FPS, + CAMERA_COMPAT_MODE_ISO_40FPS, + CAMERA_COMPAT_MODE_ISO_35FPS, + CAMERA_COMPAT_MODE_ISO_30FPS, + MAX_CAMERA_COMPAT_MODES +}; + +#ifdef _MSC_VER +#define VR_CAMERA_DECL_ALIGN( x ) __declspec( align( x ) ) +#else +#define VR_CAMERA_DECL_ALIGN( x ) // +#endif + +#define MAX_CAMERA_FRAME_SHARED_HANDLES 4 + +VR_CAMERA_DECL_ALIGN( 8 ) struct CameraVideoStreamFrame_t +{ + ECameraVideoStreamFormat m_nStreamFormat; + + uint32_t m_nWidth; + uint32_t m_nHeight; + + uint32_t m_nImageDataSize; // Based on stream format, width, height + + uint32_t m_nFrameSequence; // Starts from 0 when stream starts. + + uint32_t m_nBufferIndex; // Identifies which buffer the image data is hosted + uint32_t m_nBufferCount; // Total number of configured buffers + + uint32_t m_nExposureTime; + + uint32_t m_nISPFrameTimeStamp; // Driver provided time stamp per driver centric time base + uint32_t m_nISPReferenceTimeStamp; + uint32_t m_nSyncCounter; + + uint32_t m_nCamSyncEvents; + uint32_t m_nISPSyncEvents; + + double m_flReferenceCamSyncTime; + + double m_flFrameElapsedTime; // Starts from 0 when stream starts. In seconds. + double m_flFrameDeliveryRate; + + double m_flFrameCaptureTime_DriverAbsolute; // In USB time, via AuxEvent + double m_flFrameCaptureTime_ServerRelative; // In System time within the server + uint64_t m_nFrameCaptureTicks_ServerAbsolute; // In system ticks within the server + double m_flFrameCaptureTime_ClientRelative; // At the client, relative to when the frame was exposed/captured. + + double m_flSyncMarkerError; + + TrackedDevicePose_t m_StandingTrackedDevicePose; // Supplied by HMD layer when used as a tracked camera + + uint64_t m_pImageData; +}; + +#pragma pack( pop ) + +} + +#endif // _VRTRACKEDCAMERATYPES_H +// ivrsettings.h +namespace vr +{ + enum EVRSettingsError + { + VRSettingsError_None = 0, + VRSettingsError_IPCFailed = 1, + VRSettingsError_WriteFailed = 2, + VRSettingsError_ReadFailed = 3, + }; + + // The maximum length of a settings key + static const uint32_t k_unMaxSettingsKeyLength = 128; + + class IVRSettings + { + public: + virtual const char *GetSettingsErrorNameFromEnum( EVRSettingsError eError ) = 0; + + // Returns true if file sync occurred (force or settings dirty) + virtual bool Sync( bool bForce = false, EVRSettingsError *peError = nullptr ) = 0; + + virtual bool GetBool( const char *pchSection, const char *pchSettingsKey, bool bDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetBool( const char *pchSection, const char *pchSettingsKey, bool bValue, EVRSettingsError *peError = nullptr ) = 0; + virtual int32_t GetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nValue, EVRSettingsError *peError = nullptr ) = 0; + virtual float GetFloat( const char *pchSection, const char *pchSettingsKey, float flDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetFloat( const char *pchSection, const char *pchSettingsKey, float flValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void GetString( const char *pchSection, const char *pchSettingsKey, char *pchValue, uint32_t unValueLen, const char *pchDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetString( const char *pchSection, const char *pchSettingsKey, const char *pchValue, EVRSettingsError *peError = nullptr ) = 0; + + virtual void RemoveSection( const char *pchSection, EVRSettingsError *peError = nullptr ) = 0; + virtual void RemoveKeyInSection( const char *pchSection, const char *pchSettingsKey, EVRSettingsError *peError = nullptr ) = 0; + }; + + //----------------------------------------------------------------------------- + static const char * const IVRSettings_Version = "IVRSettings_001"; + + //----------------------------------------------------------------------------- + // steamvr keys + + static const char * const k_pch_SteamVR_Section = "steamvr"; + static const char * const k_pch_SteamVR_RequireHmd_String = "requireHmd"; + static const char * const k_pch_SteamVR_ForcedDriverKey_String = "forcedDriver"; + static const char * const k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd"; + static const char * const k_pch_SteamVR_DisplayDebug_Bool = "displayDebug"; + static const char * const k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe"; + static const char * const k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion"; + static const char * const k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX"; + static const char * const k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY"; + static const char * const k_pch_SteamVR_SendSystemButtonToAllApps_Bool= "sendSystemButtonToAllApps"; + static const char * const k_pch_SteamVR_LogLevel_Int32 = "loglevel"; + static const char * const k_pch_SteamVR_IPD_Float = "ipd"; + static const char * const k_pch_SteamVR_Background_String = "background"; + static const char * const k_pch_SteamVR_BackgroundCameraHeight_Float = "backgroundCameraHeight"; + static const char * const k_pch_SteamVR_BackgroundDomeRadius_Float = "backgroundDomeRadius"; + static const char * const k_pch_SteamVR_Environment_String = "environment"; + static const char * const k_pch_SteamVR_GridColor_String = "gridColor"; + static const char * const k_pch_SteamVR_PlayAreaColor_String = "playAreaColor"; + static const char * const k_pch_SteamVR_ShowStage_Bool = "showStage"; + static const char * const k_pch_SteamVR_ActivateMultipleDrivers_Bool = "activateMultipleDrivers"; + static const char * const k_pch_SteamVR_PowerOffOnExit_Bool = "powerOffOnExit"; + static const char * const k_pch_SteamVR_StandbyAppRunningTimeout_Float = "standbyAppRunningTimeout"; + static const char * const k_pch_SteamVR_StandbyNoAppTimeout_Float = "standbyNoAppTimeout"; + static const char * const k_pch_SteamVR_DirectMode_Bool = "directMode"; + static const char * const k_pch_SteamVR_DirectModeEdidVid_Int32 = "directModeEdidVid"; + static const char * const k_pch_SteamVR_DirectModeEdidPid_Int32 = "directModeEdidPid"; + static const char * const k_pch_SteamVR_UsingSpeakers_Bool = "usingSpeakers"; + static const char * const k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees"; + static const char * const k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement"; + static const char * const k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses"; + static const char * const k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier"; + static const char * const k_pch_SteamVR_AllowReprojection_Bool = "allowReprojection"; + static const char * const k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection"; + static const char * const k_pch_SteamVR_ForceFadeOnBadTracking_Bool = "forceFadeOnBadTracking"; + static const char * const k_pch_SteamVR_DefaultMirrorView_Int32 = "defaultMirrorView"; + static const char * const k_pch_SteamVR_ShowMirrorView_Bool = "showMirrorView"; + + //----------------------------------------------------------------------------- + // lighthouse keys + + static const char * const k_pch_Lighthouse_Section = "driver_lighthouse"; + static const char * const k_pch_Lighthouse_DisableIMU_Bool = "disableimu"; + static const char * const k_pch_Lighthouse_UseDisambiguation_String = "usedisambiguation"; + static const char * const k_pch_Lighthouse_DisambiguationDebug_Int32 = "disambiguationdebug"; + + static const char * const k_pch_Lighthouse_PrimaryBasestation_Int32 = "primarybasestation"; + static const char * const k_pch_Lighthouse_LighthouseName_String = "lighthousename"; + static const char * const k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float = "maxincidenceangledegrees"; + static const char * const k_pch_Lighthouse_UseLighthouseDirect_Bool = "uselighthousedirect"; + static const char * const k_pch_Lighthouse_DBHistory_Bool = "dbhistory"; + + //----------------------------------------------------------------------------- + // null keys + + static const char * const k_pch_Null_Section = "driver_null"; + static const char * const k_pch_Null_EnableNullDriver_Bool = "enable"; + static const char * const k_pch_Null_SerialNumber_String = "serialNumber"; + static const char * const k_pch_Null_ModelNumber_String = "modelNumber"; + static const char * const k_pch_Null_WindowX_Int32 = "windowX"; + static const char * const k_pch_Null_WindowY_Int32 = "windowY"; + static const char * const k_pch_Null_WindowWidth_Int32 = "windowWidth"; + static const char * const k_pch_Null_WindowHeight_Int32 = "windowHeight"; + static const char * const k_pch_Null_RenderWidth_Int32 = "renderWidth"; + static const char * const k_pch_Null_RenderHeight_Int32 = "renderHeight"; + static const char * const k_pch_Null_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons"; + static const char * const k_pch_Null_DisplayFrequency_Float = "displayFrequency"; + + //----------------------------------------------------------------------------- + // user interface keys + static const char * const k_pch_UserInterface_Section = "userinterface"; + static const char * const k_pch_UserInterface_StatusAlwaysOnTop_Bool = "StatusAlwaysOnTop"; + static const char * const k_pch_UserInterface_EnableScreenshots_Bool = "EnableScreenshots"; + + //----------------------------------------------------------------------------- + // notification keys + static const char * const k_pch_Notifications_Section = "notifications"; + static const char * const k_pch_Notifications_DoNotDisturb_Bool = "DoNotDisturb"; + + //----------------------------------------------------------------------------- + // keyboard keys + static const char * const k_pch_Keyboard_Section = "keyboard"; + static const char * const k_pch_Keyboard_TutorialCompletions = "TutorialCompletions"; + static const char * const k_pch_Keyboard_ScaleX = "ScaleX"; + static const char * const k_pch_Keyboard_ScaleY = "ScaleY"; + static const char * const k_pch_Keyboard_OffsetLeftX = "OffsetLeftX"; + static const char * const k_pch_Keyboard_OffsetRightX = "OffsetRightX"; + static const char * const k_pch_Keyboard_OffsetY = "OffsetY"; + static const char * const k_pch_Keyboard_Smoothing = "Smoothing"; + + //----------------------------------------------------------------------------- + // perf keys + static const char * const k_pch_Perf_Section = "perfcheck"; + static const char * const k_pch_Perf_HeuristicActive_Bool = "heuristicActive"; + static const char * const k_pch_Perf_NotifyInHMD_Bool = "warnInHMD"; + static const char * const k_pch_Perf_NotifyOnlyOnce_Bool = "warnOnlyOnce"; + static const char * const k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore"; + static const char * const k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit"; + static const char * const k_pch_Perf_TestData_Float = "perfTestData"; + + //----------------------------------------------------------------------------- + // collision bounds keys + static const char * const k_pch_CollisionBounds_Section = "collisionBounds"; + static const char * const k_pch_CollisionBounds_Style_Int32 = "CollisionBoundsStyle"; + static const char * const k_pch_CollisionBounds_GroundPerimeterOn_Bool = "CollisionBoundsGroundPerimeterOn"; + static const char * const k_pch_CollisionBounds_CenterMarkerOn_Bool = "CollisionBoundsCenterMarkerOn"; + static const char * const k_pch_CollisionBounds_PlaySpaceOn_Bool = "CollisionBoundsPlaySpaceOn"; + static const char * const k_pch_CollisionBounds_FadeDistance_Float = "CollisionBoundsFadeDistance"; + static const char * const k_pch_CollisionBounds_ColorGammaR_Int32 = "CollisionBoundsColorGammaR"; + static const char * const k_pch_CollisionBounds_ColorGammaG_Int32 = "CollisionBoundsColorGammaG"; + static const char * const k_pch_CollisionBounds_ColorGammaB_Int32 = "CollisionBoundsColorGammaB"; + static const char * const k_pch_CollisionBounds_ColorGammaA_Int32 = "CollisionBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // camera keys + static const char * const k_pch_Camera_Section = "camera"; + static const char * const k_pch_Camera_EnableCamera_Bool = "enableCamera"; + static const char * const k_pch_Camera_EnableCameraInDashboard_Bool = "enableCameraInDashboard"; + static const char * const k_pch_Camera_EnableCameraForCollisionBounds_Bool = "enableCameraForCollisionBounds"; + static const char * const k_pch_Camera_EnableCameraForRoomView_Bool = "enableCameraForRoomView"; + static const char * const k_pch_Camera_BoundsColorGammaR_Int32 = "cameraBoundsColorGammaR"; + static const char * const k_pch_Camera_BoundsColorGammaG_Int32 = "cameraBoundsColorGammaG"; + static const char * const k_pch_Camera_BoundsColorGammaB_Int32 = "cameraBoundsColorGammaB"; + static const char * const k_pch_Camera_BoundsColorGammaA_Int32 = "cameraBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // audio keys + static const char * const k_pch_audio_Section = "audio"; + static const char * const k_pch_audio_OnPlaybackDevice_String = "onPlaybackDevice"; + static const char * const k_pch_audio_OnRecordDevice_String = "onRecordDevice"; + static const char * const k_pch_audio_OnPlaybackMirrorDevice_String = "onPlaybackMirrorDevice"; + static const char * const k_pch_audio_OffPlaybackDevice_String = "offPlaybackDevice"; + static const char * const k_pch_audio_OffRecordDevice_String = "offRecordDevice"; + static const char * const k_pch_audio_VIVEHDMIGain = "viveHDMIGain"; + + //----------------------------------------------------------------------------- + // model skin keys + static const char * const k_pch_modelskin_Section = "modelskins"; + +} // namespace vr + +// iservertrackeddevicedriver.h +namespace vr +{ + + +struct DriverPoseQuaternion_t +{ + double w, x, y, z; +}; + +struct DriverPose_t +{ + /* Time offset of this pose, in seconds from the actual time of the pose, + * relative to the time of the PoseUpdated() call made by the driver. + */ + double poseTimeOffset; + + /* Generally, the pose maintained by a driver + * is in an inertial coordinate system different + * from the world system of x+ right, y+ up, z+ back. + * Also, the driver is not usually tracking the "head" position, + * but instead an internal IMU or another reference point in the HMD. + * The following two transforms transform positions and orientations + * to app world space from driver world space, + * and to HMD head space from driver local body space. + * + * We maintain the driver pose state in its internal coordinate system, + * so we can do the pose prediction math without having to + * use angular acceleration. A driver's angular acceleration is generally not measured, + * and is instead calculated from successive samples of angular velocity. + * This leads to a noisy angular acceleration values, which are also + * lagged due to the filtering required to reduce noise to an acceptable level. + */ + vr::HmdQuaternion_t qWorldFromDriverRotation; + double vecWorldFromDriverTranslation[ 3 ]; + + vr::HmdQuaternion_t qDriverFromHeadRotation; + double vecDriverFromHeadTranslation[ 3 ]; + + /* State of driver pose, in meters and radians. */ + /* Position of the driver tracking reference in driver world space + * +[0] (x) is right + * +[1] (y) is up + * -[2] (z) is forward + */ + double vecPosition[ 3 ]; + + /* Velocity of the pose in meters/second */ + double vecVelocity[ 3 ]; + + /* Acceleration of the pose in meters/second */ + double vecAcceleration[ 3 ]; + + /* Orientation of the tracker, represented as a quaternion */ + vr::HmdQuaternion_t qRotation; + + /* Angular velocity of the pose in axis-angle + * representation. The direction is the angle of + * rotation and the magnitude is the angle around + * that axis in radians/second. */ + double vecAngularVelocity[ 3 ]; + + /* Angular acceleration of the pose in axis-angle + * representation. The direction is the angle of + * rotation and the magnitude is the angle around + * that axis in radians/second^2. */ + double vecAngularAcceleration[ 3 ]; + + ETrackingResult result; + + bool poseIsValid; + bool willDriftInYaw; + bool shouldApplyHeadModel; + bool deviceIsConnected; +}; + + +// ---------------------------------------------------------------------------------------------- +// Purpose: Represents a single tracked device in a driver +// ---------------------------------------------------------------------------------------------- +class ITrackedDeviceServerDriver +{ +public: + + // ------------------------------------ + // Management Methods + // ------------------------------------ + /** This is called before an HMD is returned to the application. It will always be + * called before any display or tracking methods. Memory and processor use by the + * ITrackedDeviceServerDriver object should be kept to a minimum until it is activated. + * The pose listener is guaranteed to be valid until Deactivate is called, but + * should not be used after that point. */ + virtual EVRInitError Activate( uint32_t unObjectId ) = 0; + + /** This is called when The VR system is switching from this Hmd being the active display + * to another Hmd being the active display. The driver should clean whatever memory + * and thread use it can when it is deactivated */ + virtual void Deactivate() = 0; + + /** Handles a request from the system to power off this device */ + virtual void PowerOff() = 0; + + /** Requests a component interface of the driver for device-specific functionality. The driver should return NULL + * if the requested interface or version is not supported. */ + virtual void *GetComponent( const char *pchComponentNameAndVersion ) = 0; + + /** A VR Client has made this debug request of the driver. The set of valid requests is entirely + * up to the driver and the client to figure out, as is the format of the response. Responses that + * exceed the length of the supplied buffer should be truncated and null terminated */ + virtual void DebugRequest( const char *pchRequest, char *pchResponseBuffer, uint32_t unResponseBufferSize ) = 0; + + // ------------------------------------ + // Tracking Methods + // ------------------------------------ + virtual DriverPose_t GetPose() = 0; + + // ------------------------------------ + // Property Methods + // ------------------------------------ + + /** Returns a bool property. If the property is not available this function will return false. */ + virtual bool GetBoolTrackedDeviceProperty( ETrackedDeviceProperty prop, ETrackedPropertyError *pError ) = 0; + + /** Returns a float property. If the property is not available this function will return 0. */ + virtual float GetFloatTrackedDeviceProperty( ETrackedDeviceProperty prop, ETrackedPropertyError *pError ) = 0; + + /** Returns an int property. If the property is not available this function will return 0. */ + virtual int32_t GetInt32TrackedDeviceProperty( ETrackedDeviceProperty prop, ETrackedPropertyError *pError ) = 0; + + /** Returns a uint64 property. If the property is not available this function will return 0. */ + virtual uint64_t GetUint64TrackedDeviceProperty( ETrackedDeviceProperty prop, ETrackedPropertyError *pError ) = 0; + + /** Returns a matrix property. If the device index is not valid or the property is not a matrix type, this function will return identity. */ + virtual HmdMatrix34_t GetMatrix34TrackedDeviceProperty( ETrackedDeviceProperty prop, ETrackedPropertyError *pError ) = 0; + + /** Returns a string property. If the property is not available this function will return 0 and pError will be + * set to an error. Otherwise it returns the length of the number of bytes necessary to hold this string including + * the trailing null. If the buffer is too small the error will be TrackedProp_BufferTooSmall. Strings will + * generally fit in buffers of k_unTrackingStringSize characters. Drivers may not return strings longer than + * k_unMaxPropertyStringSize. */ + virtual uint32_t GetStringTrackedDeviceProperty( ETrackedDeviceProperty prop, char *pchValue, uint32_t unBufferSize, ETrackedPropertyError *pError ) = 0; + +}; + + + +static const char *ITrackedDeviceServerDriver_Version = "ITrackedDeviceServerDriver_004"; + +} +// ivrdisplaycomponent.h +namespace vr +{ + + + // ---------------------------------------------------------------------------------------------- + // Purpose: The display component on a single tracked device + // ---------------------------------------------------------------------------------------------- + class IVRDisplayComponent + { + public: + + // ------------------------------------ + // Display Methods + // ------------------------------------ + + /** Size and position that the window needs to be on the VR display. */ + virtual void GetWindowBounds( int32_t *pnX, int32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** Returns true if the display is extending the desktop. */ + virtual bool IsDisplayOnDesktop( ) = 0; + + /** Returns true if the display is real and not a fictional display. */ + virtual bool IsDisplayRealDisplay( ) = 0; + + /** Suggested size for the intermediate render target that the distortion pulls from. */ + virtual void GetRecommendedRenderTargetSize( uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** Gets the viewport in the frame buffer to draw the output of the distortion into */ + virtual void GetEyeOutputViewport( EVREye eEye, uint32_t *pnX, uint32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** The components necessary to build your own projection matrix in case your + * application is doing something fancy like infinite Z */ + virtual void GetProjectionRaw( EVREye eEye, float *pfLeft, float *pfRight, float *pfTop, float *pfBottom ) = 0; + + /** Returns the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in + * the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. */ + virtual DistortionCoordinates_t ComputeDistortion( EVREye eEye, float fU, float fV ) = 0; + + }; + + static const char *IVRDisplayComponent_Version = "IVRDisplayComponent_002"; + +} + +// ivrdriverdirectmodecomponent.h +namespace vr +{ + + + // ---------------------------------------------------------------------------------------------- + // Purpose: This component is used for drivers that implement direct mode entirely on their own + // without allowing the VR Compositor to own the window/device. Chances are you don't + // need to implement this component in your driver. + // ---------------------------------------------------------------------------------------------- + class IVRDriverDirectModeComponent + { + public: + + // ----------------------------------- + // Direct mode methods + // ----------------------------------- + + /** Specific to Oculus compositor support, textures supplied must be created using this method. */ + virtual void CreateSwapTextureSet( uint32_t unPid, uint32_t unFormat, uint32_t unWidth, uint32_t unHeight, void *(*pSharedTextureHandles)[3] ) {} + + /** Used to textures created using CreateSwapTextureSet. Only one of the set's handles needs to be used to destroy the entire set. */ + virtual void DestroySwapTextureSet( void *pSharedTextureHandle ) {} + + /** Used to purge all texture sets for a given process. */ + virtual void DestroyAllSwapTextureSets( uint32_t unPid ) {} + + /** After Present returns, calls this to get the next index to use for rendering. */ + virtual void GetNextSwapTextureSetIndex( void *pSharedTextureHandles[ 2 ], uint32_t( *pIndices )[ 2 ] ) {} + + /** Call once per layer to draw for this frame. One shared texture handle per eye. Textures must be created + * using CreateSwapTextureSet and should be alternated per frame. Call Present once all layers have been submitted. */ + virtual void SubmitLayer( void *pSharedTextureHandles[ 2 ], const vr::VRTextureBounds_t( &bounds )[ 2 ], const vr::HmdMatrix34_t *pPose ) {} + + /** Submits queued layers for display. */ + virtual void Present( void *hSyncTexture ) {} + + }; + + static const char *IVRDriverDirectModeComponent_Version = "IVRDriverDirectModeComponent_001"; + +} + +// ivrcontrollercomponent.h +namespace vr +{ + + + // ---------------------------------------------------------------------------------------------- + // Purpose: Controller access on a single tracked device. + // ---------------------------------------------------------------------------------------------- + class IVRControllerComponent + { + public: + + // ------------------------------------ + // Controller Methods + // ------------------------------------ + + /** Gets the current state of a controller. */ + virtual VRControllerState_t GetControllerState( ) = 0; + + /** Returns a uint64 property. If the property is not available this function will return 0. */ + virtual bool TriggerHapticPulse( uint32_t unAxisId, uint16_t usPulseDurationMicroseconds ) = 0; + + }; + + + + static const char *IVRControllerComponent_Version = "IVRControllerComponent_001"; + +} +// ivrcameracomponent.h +namespace vr +{ + + //----------------------------------------------------------------------------- + //----------------------------------------------------------------------------- + class ICameraVideoSinkCallback + { + public: + virtual void OnCameraVideoSinkCallback() = 0; + }; + + // ---------------------------------------------------------------------------------------------- + // Purpose: The camera on a single tracked device + // ---------------------------------------------------------------------------------------------- + class IVRCameraComponent + { + public: + // ------------------------------------ + // Camera Methods + // ------------------------------------ + virtual bool HasCamera() = 0; + virtual bool GetCameraFirmwareDescription( char *pBuffer, uint32_t nBufferLen ) = 0; + virtual bool GetCameraFrameDimensions( vr::ECameraVideoStreamFormat nVideoStreamFormat, uint32_t *pWidth, uint32_t *pHeight ) = 0; + virtual bool GetCameraFrameBufferingRequirements( int *pDefaultFrameQueueSize, uint32_t *pFrameBufferDataSize ) = 0; + virtual bool SetCameraFrameBuffering( int nFrameBufferCount, void **ppFrameBuffers, uint32_t nFrameBufferDataSize ) = 0; + virtual bool SetCameraVideoStreamFormat( vr::ECameraVideoStreamFormat nVideoStreamFormat ) = 0; + virtual vr::ECameraVideoStreamFormat GetCameraVideoStreamFormat() = 0; + virtual bool StartVideoStream() = 0; + virtual void StopVideoStream() = 0; + virtual bool IsVideoStreamActive() = 0; + virtual float GetVideoStreamElapsedTime() = 0; + virtual const vr::CameraVideoStreamFrame_t *GetVideoStreamFrame() = 0; + virtual void ReleaseVideoStreamFrame( const vr::CameraVideoStreamFrame_t *pFrameImage ) = 0; + virtual bool SetAutoExposure( bool bEnable ) = 0; + virtual bool PauseVideoStream() = 0; + virtual bool ResumeVideoStream() = 0; + virtual bool IsVideoStreamPaused() = 0; + virtual bool GetCameraDistortion( float flInputU, float flInputV, float *pflOutputU, float *pflOutputV ) = 0; + virtual bool GetCameraProjection( float flWidthPixels, float flHeightPixels, float flZNear, float flZFar, vr::HmdMatrix44_t *pProjection ) = 0; + virtual bool GetRecommendedCameraUndistortion( uint32_t *pUndistortionWidthPixels, uint32_t *pUndistortionHeightPixels ) = 0; + virtual bool SetCameraUndistortion( uint32_t nUndistortionWidthPixels, uint32_t nUndistortionHeightPixels ) = 0; + virtual bool GetCameraFirmwareVersion( uint64_t *pFirmwareVersion ) = 0; + virtual bool SetFrameRate( int nISPFrameRate, int nSensorFrameRate ) = 0; + virtual bool SetCameraVideoSinkCallback( vr::ICameraVideoSinkCallback *pCameraVideoSinkCallback ) = 0; + virtual bool GetCameraCompatibilityMode( vr::ECameraCompatibilityMode *pCameraCompatibilityMode ) = 0; + virtual bool SetCameraCompatibilityMode( vr::ECameraCompatibilityMode nCameraCompatibilityMode ) = 0; + virtual bool GetCameraFrameBounds( vr::EVRTrackedCameraFrameType eFrameType, uint32_t *pLeft, uint32_t *pTop, uint32_t *pWidth, uint32_t *pHeight ) = 0; + virtual bool GetCameraIntrinsics( vr::EVRTrackedCameraFrameType eFrameType, HmdVector2_t *pFocalLength, HmdVector2_t *pCenter ) = 0; + }; + + static const char *IVRCameraComponent_Version = "IVRCameraComponent_001"; +} +// itrackeddevicedriverprovider.h +namespace vr +{ + +class ITrackedDeviceServerDriver; +struct TrackedDeviceDriverInfo_t; +struct DriverPose_t; + +class IDriverLog +{ +public: + /** Writes a log message to the log file prefixed with the driver name */ + virtual void Log( const char *pchLogMessage ) = 0; +}; + +/** This interface is provided by vrserver to allow the driver to notify +* the system when something changes about a device. These changes must +* not change the serial number or class of the device because those values +* are permanently associated with the device's index. */ +class IServerDriverHost +{ +public: + /** Notifies the server that a tracked device has been added. If this function returns true + * the server will call Activate on the device. If it returns false some kind of error + * has occurred and the device will not be activated. */ + virtual bool TrackedDeviceAdded( const char *pchDeviceSerialNumber ) = 0; + + /** Notifies the server that a tracked device's pose has been updated */ + virtual void TrackedDevicePoseUpdated( uint32_t unWhichDevice, const DriverPose_t & newPose ) = 0; + + /** Notifies the server that the property cache for the specified device should be invalidated */ + virtual void TrackedDevicePropertiesChanged( uint32_t unWhichDevice ) = 0; + + /** Notifies the server that vsync has occurred on the the display attached to the device. This is + * only permitted on devices of the HMD class. */ + virtual void VsyncEvent( double vsyncTimeOffsetSeconds ) = 0; + + /** notifies the server that the button was pressed */ + virtual void TrackedDeviceButtonPressed( uint32_t unWhichDevice, EVRButtonId eButtonId, double eventTimeOffset ) = 0; + + /** notifies the server that the button was unpressed */ + virtual void TrackedDeviceButtonUnpressed( uint32_t unWhichDevice, EVRButtonId eButtonId, double eventTimeOffset ) = 0; + + /** notifies the server that the button was pressed */ + virtual void TrackedDeviceButtonTouched( uint32_t unWhichDevice, EVRButtonId eButtonId, double eventTimeOffset ) = 0; + + /** notifies the server that the button was unpressed */ + virtual void TrackedDeviceButtonUntouched( uint32_t unWhichDevice, EVRButtonId eButtonId, double eventTimeOffset ) = 0; + + /** notifies the server than a controller axis changed */ + virtual void TrackedDeviceAxisUpdated( uint32_t unWhichDevice, uint32_t unWhichAxis, const VRControllerAxis_t & axisState ) = 0; + + /** Notifies the server that the MC image has been updated for the display attached to the device. This is + * only permitted on devices of the HMD class. */ + virtual void MCImageUpdated() = 0; + + /** always returns a pointer to a valid interface pointer of IVRSettings */ + virtual IVRSettings *GetSettings( const char *pchInterfaceVersion ) = 0; + + /** Notifies the server that the physical IPD adjustment has been moved on the HMD */ + virtual void PhysicalIpdSet( uint32_t unWhichDevice, float fPhysicalIpdMeters ) = 0; + + /** Notifies the server that the proximity sensor on the specified device */ + virtual void ProximitySensorState( uint32_t unWhichDevice, bool bProximitySensorTriggered ) = 0; + + /** Sends a vendor specific event (VREvent_VendorSpecific_Reserved_Start..VREvent_VendorSpecific_Reserved_End */ + virtual void VendorSpecificEvent( uint32_t unWhichDevice, vr::EVREventType eventType, const VREvent_Data_t & eventData, double eventTimeOffset ) = 0; + + /** Returns true if SteamVR is exiting */ + virtual bool IsExiting() = 0; +}; + + +/** This interface must be implemented in each driver. It will be loaded in vrserver.exe */ +class IServerTrackedDeviceProvider +{ +public: + /** initializes the driver. This will be called before any other methods are called. + * If Init returns anything other than VRInitError_None the driver DLL will be unloaded. + * + * pDriverHost will never be NULL, and will always be a pointer to a IServerDriverHost interface + * + * pchUserDriverConfigDir - The absolute path of the directory where the driver should store user + * config files. + * pchDriverInstallDir - The absolute path of the root directory for the driver. + */ + virtual EVRInitError Init( IDriverLog *pDriverLog, vr::IServerDriverHost *pDriverHost, const char *pchUserDriverConfigDir, const char *pchDriverInstallDir ) = 0; + + /** cleans up the driver right before it is unloaded */ + virtual void Cleanup() = 0; + + /** Returns the version of the ITrackedDeviceServerDriver interface used by this driver */ + virtual const char * const *GetInterfaceVersions() = 0; + + /** returns the number of HMDs that this driver manages that are physically connected. */ + virtual uint32_t GetTrackedDeviceCount() = 0; + + /** returns a single HMD */ + virtual ITrackedDeviceServerDriver *GetTrackedDeviceDriver( uint32_t unWhich ) = 0; + + /** returns a single HMD by ID */ + virtual ITrackedDeviceServerDriver* FindTrackedDeviceDriver( const char *pchId ) = 0; + + /** Allows the driver do to some work in the main loop of the server. */ + virtual void RunFrame() = 0; + + + // ------------ Power State Functions ----------------------- // + + /** Returns true if the driver wants to block Standby mode. */ + virtual bool ShouldBlockStandbyMode() = 0; + + /** Called when the system is entering Standby mode. The driver should switch itself into whatever sort of low-power + * state it has. */ + virtual void EnterStandby() = 0; + + /** Called when the system is leaving Standby mode. The driver should switch itself back to + full operation. */ + virtual void LeaveStandby() = 0; + +}; + + +static const char *IServerTrackedDeviceProvider_Version = "IServerTrackedDeviceProvider_003"; + + +/** This interface is provided by vrclient to allow the driver call back and query various information */ +class IClientDriverHost +{ +public: + /** Returns the device class of a tracked device. If there has not been a device connected in this slot + * since the application started this function will return TrackedDevice_Invalid. For previous detected + * devices the function will return the previously observed device class. + * + * To determine which devices exist on the system, just loop from 0 to k_unMaxTrackedDeviceCount and check + * the device class. Every device with something other than TrackedDevice_Invalid is associated with an + * actual tracked device. */ + virtual ETrackedDeviceClass GetTrackedDeviceClass( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns true if there is a device connected in this slot. */ + virtual bool IsTrackedDeviceConnected( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false. */ + virtual bool GetBoolTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a float property. If the device index is not valid or the property is not a float type this function will return 0. */ + virtual float GetFloatTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns an int property. If the device index is not valid or the property is not a int type this function will return 0. */ + virtual int32_t GetInt32TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a uint64 property. If the device index is not valid or the property is not a uint64 type this function will return 0. */ + virtual uint64_t GetUint64TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a string property. If the device index is not valid or the property is not a float type this function will + * return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing + * null. Strings will generally fit in buffers of k_unTrackingStringSize characters. */ + virtual uint32_t GetStringTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, char *pchValue, uint32_t unBufferSize, ETrackedPropertyError *pError = 0L ) = 0; + + /** always returns a pointer to a valid interface pointer of IVRSettings */ + virtual IVRSettings *GetSettings( const char *pchInterfaceVersion ) = 0; +}; + + + +/** This interface must be implemented in each driver. It will be loaded in vrclient.dll */ +class IClientTrackedDeviceProvider +{ +public: + /** initializes the driver. This will be called before any other methods are called, + * except BIsHmdPresent(). BIsHmdPresent is called outside of the Init/Cleanup pair. + * If Init returns anything other than VRInitError_None the driver DLL will be unloaded. + * + * pDriverHost will never be NULL, and will always be a pointer to a IClientDriverHost interface + * + * pchUserDriverConfigDir - The absolute path of the directory where the driver should store user + * config files. + * pchDriverInstallDir - The absolute path of the root directory for the driver. + */ + virtual EVRInitError Init( IDriverLog *pDriverLog, vr::IClientDriverHost *pDriverHost, const char *pchUserDriverConfigDir, const char *pchDriverInstallDir ) = 0; + + /** cleans up the driver right before it is unloaded */ + virtual void Cleanup() = 0; + + /** Called when the client needs to inform an application if an HMD is attached that uses + * this driver. This method should be as lightweight as possible and should have no side effects + * such as hooking process functions or leaving resources loaded. Init will not be called before + * this method and Cleanup will not be called after it. + */ + virtual bool BIsHmdPresent( const char *pchUserConfigDir ) = 0; + + /** called when the client inits an HMD to let the client driver know which one is in use */ + virtual EVRInitError SetDisplayId( const char *pchDisplayId ) = 0; + + /** Returns the stencil mesh information for the current HMD. If this HMD does not have a stencil mesh the vertex data and count will be + * NULL and 0 respectively. This mesh is meant to be rendered into the stencil buffer (or into the depth buffer setting nearz) before rendering + * each eye's view. The pixels covered by this mesh will never be seen by the user after the lens distortion is applied and based on visibility to the panels. + * This will improve perf by letting the GPU early-reject pixels the user will never see before running the pixel shader. + * NOTE: Render this mesh with backface culling disabled since the winding order of the vertices can be different per-HMD or per-eye. + */ + virtual HiddenAreaMesh_t GetHiddenAreaMesh( EVREye eEye ) = 0; + + /** Get the MC image for the current HMD. + * Returns the size in bytes of the buffer required to hold the specified resource. */ + virtual uint32_t GetMCImage( uint32_t *pImgWidth, uint32_t *pImgHeight, uint32_t *pChannels, void *pDataBuffer, uint32_t unBufferLen ) = 0; +}; + +static const char *IClientTrackedDeviceProvider_Version = "IClientTrackedDeviceProvider_003"; + +} + + + + +namespace vr +{ + static const char * const k_InterfaceVersions[] = + { + IVRSettings_Version, + ITrackedDeviceServerDriver_Version, + IVRDisplayComponent_Version, + IVRDriverDirectModeComponent_Version, + IVRControllerComponent_Version, + IVRCameraComponent_Version, + IServerTrackedDeviceProvider_Version, + IClientTrackedDeviceProvider_Version, + nullptr + }; +} +// End + +#endif // _OPENVR_DRIVER_API + + diff --git a/examples/ThirdPartyLibs/openvr/lib/linux32/libopenvr_api.so b/examples/ThirdPartyLibs/openvr/lib/linux32/libopenvr_api.so new file mode 100644 index 000000000..27da01cbe Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/lib/linux32/libopenvr_api.so differ diff --git a/examples/ThirdPartyLibs/openvr/lib/linux64/libopenvr_api.so b/examples/ThirdPartyLibs/openvr/lib/linux64/libopenvr_api.so new file mode 100644 index 000000000..52fd2271b Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/lib/linux64/libopenvr_api.so differ diff --git a/examples/ThirdPartyLibs/openvr/lib/osx32/libopenvr_api.dylib b/examples/ThirdPartyLibs/openvr/lib/osx32/libopenvr_api.dylib new file mode 100644 index 000000000..89cad7710 Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/lib/osx32/libopenvr_api.dylib differ diff --git a/examples/ThirdPartyLibs/openvr/lib/win32/openvr_api.lib b/examples/ThirdPartyLibs/openvr/lib/win32/openvr_api.lib new file mode 100644 index 000000000..45a41aeb0 Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/lib/win32/openvr_api.lib differ diff --git a/examples/ThirdPartyLibs/openvr/lib/win64/openvr_api.lib b/examples/ThirdPartyLibs/openvr/lib/win64/openvr_api.lib new file mode 100644 index 000000000..12344317a Binary files /dev/null and b/examples/ThirdPartyLibs/openvr/lib/win64/openvr_api.lib differ diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.cpp b/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.cpp new file mode 100644 index 000000000..582b2854e --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.cpp @@ -0,0 +1,581 @@ +/////////////////////////////////////////////////////////////////////////////// +// Matrice.cpp +// =========== +// NxN Matrix Math classes +// +// The elements of the matrix are stored as column major order. +// | 0 2 | | 0 3 6 | | 0 4 8 12 | +// | 1 3 | | 1 4 7 | | 1 5 9 13 | +// | 2 5 8 | | 2 6 10 14 | +// | 3 7 11 15 | +// +// AUTHOR: Song Ho Ahn (song.ahn@gmail.com) +// CREATED: 2005-06-24 +// UPDATED: 2014-09-21 +// +// Copyright (C) 2005 Song Ho Ahn +/////////////////////////////////////////////////////////////////////////////// + +#include +#include +#include "Matrices.h" + +const float DEG2RAD = 3.141593f / 180; +const float EPSILON = 0.00001f; + + + +/////////////////////////////////////////////////////////////////////////////// +// transpose 2x2 matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix2& Matrix2::transpose() +{ + std::swap(m[1], m[2]); + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// return the determinant of 2x2 matrix +/////////////////////////////////////////////////////////////////////////////// +float Matrix2::getDeterminant() +{ + return m[0] * m[3] - m[1] * m[2]; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// inverse of 2x2 matrix +// If cannot find inverse, set identity matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix2& Matrix2::invert() +{ + float determinant = getDeterminant(); + if(fabs(determinant) <= EPSILON) + { + return identity(); + } + + float tmp = m[0]; // copy the first element + float invDeterminant = 1.0f / determinant; + m[0] = invDeterminant * m[3]; + m[1] = -invDeterminant * m[1]; + m[2] = -invDeterminant * m[2]; + m[3] = invDeterminant * tmp; + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// transpose 3x3 matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix3& Matrix3::transpose() +{ + std::swap(m[1], m[3]); + std::swap(m[2], m[6]); + std::swap(m[5], m[7]); + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// return determinant of 3x3 matrix +/////////////////////////////////////////////////////////////////////////////// +float Matrix3::getDeterminant() +{ + return m[0] * (m[4] * m[8] - m[5] * m[7]) - + m[1] * (m[3] * m[8] - m[5] * m[6]) + + m[2] * (m[3] * m[7] - m[4] * m[6]); +} + + + +/////////////////////////////////////////////////////////////////////////////// +// inverse 3x3 matrix +// If cannot find inverse, set identity matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix3& Matrix3::invert() +{ + float determinant, invDeterminant; + float tmp[9]; + + tmp[0] = m[4] * m[8] - m[5] * m[7]; + tmp[1] = m[2] * m[7] - m[1] * m[8]; + tmp[2] = m[1] * m[5] - m[2] * m[4]; + tmp[3] = m[5] * m[6] - m[3] * m[8]; + tmp[4] = m[0] * m[8] - m[2] * m[6]; + tmp[5] = m[2] * m[3] - m[0] * m[5]; + tmp[6] = m[3] * m[7] - m[4] * m[6]; + tmp[7] = m[1] * m[6] - m[0] * m[7]; + tmp[8] = m[0] * m[4] - m[1] * m[3]; + + // check determinant if it is 0 + determinant = m[0] * tmp[0] + m[1] * tmp[3] + m[2] * tmp[6]; + if(fabs(determinant) <= EPSILON) + { + return identity(); // cannot inverse, make it idenety matrix + } + + // divide by the determinant + invDeterminant = 1.0f / determinant; + m[0] = invDeterminant * tmp[0]; + m[1] = invDeterminant * tmp[1]; + m[2] = invDeterminant * tmp[2]; + m[3] = invDeterminant * tmp[3]; + m[4] = invDeterminant * tmp[4]; + m[5] = invDeterminant * tmp[5]; + m[6] = invDeterminant * tmp[6]; + m[7] = invDeterminant * tmp[7]; + m[8] = invDeterminant * tmp[8]; + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// transpose 4x4 matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::transpose() +{ + std::swap(m[1], m[4]); + std::swap(m[2], m[8]); + std::swap(m[3], m[12]); + std::swap(m[6], m[9]); + std::swap(m[7], m[13]); + std::swap(m[11], m[14]); + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// inverse 4x4 matrix +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::invert() +{ + // If the 4th row is [0,0,0,1] then it is affine matrix and + // it has no projective transformation. + if(m[3] == 0 && m[7] == 0 && m[11] == 0 && m[15] == 1) + this->invertAffine(); + else + { + this->invertGeneral(); + /*@@ invertProjective() is not optimized (slower than generic one) + if(fabs(m[0]*m[5] - m[1]*m[4]) > EPSILON) + this->invertProjective(); // inverse using matrix partition + else + this->invertGeneral(); // generalized inverse + */ + } + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// compute the inverse of 4x4 Euclidean transformation matrix +// +// Euclidean transformation is translation, rotation, and reflection. +// With Euclidean transform, only the position and orientation of the object +// will be changed. Euclidean transform does not change the shape of an object +// (no scaling). Length and angle are reserved. +// +// Use inverseAffine() if the matrix has scale and shear transformation. +// +// M = [ R | T ] +// [ --+-- ] (R denotes 3x3 rotation/reflection matrix) +// [ 0 | 1 ] (T denotes 1x3 translation matrix) +// +// y = M*x -> y = R*x + T -> x = R^-1*(y - T) -> x = R^T*y - R^T*T +// (R is orthogonal, R^-1 = R^T) +// +// [ R | T ]-1 [ R^T | -R^T * T ] (R denotes 3x3 rotation matrix) +// [ --+-- ] = [ ----+--------- ] (T denotes 1x3 translation) +// [ 0 | 1 ] [ 0 | 1 ] (R^T denotes R-transpose) +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::invertEuclidean() +{ + // transpose 3x3 rotation matrix part + // | R^T | 0 | + // | ----+-- | + // | 0 | 1 | + float tmp; + tmp = m[1]; m[1] = m[4]; m[4] = tmp; + tmp = m[2]; m[2] = m[8]; m[8] = tmp; + tmp = m[6]; m[6] = m[9]; m[9] = tmp; + + // compute translation part -R^T * T + // | 0 | -R^T x | + // | --+------- | + // | 0 | 0 | + float x = m[12]; + float y = m[13]; + float z = m[14]; + m[12] = -(m[0] * x + m[4] * y + m[8] * z); + m[13] = -(m[1] * x + m[5] * y + m[9] * z); + m[14] = -(m[2] * x + m[6] * y + m[10]* z); + + // last row should be unchanged (0,0,0,1) + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// compute the inverse of a 4x4 affine transformation matrix +// +// Affine transformations are generalizations of Euclidean transformations. +// Affine transformation includes translation, rotation, reflection, scaling, +// and shearing. Length and angle are NOT preserved. +// M = [ R | T ] +// [ --+-- ] (R denotes 3x3 rotation/scale/shear matrix) +// [ 0 | 1 ] (T denotes 1x3 translation matrix) +// +// y = M*x -> y = R*x + T -> x = R^-1*(y - T) -> x = R^-1*y - R^-1*T +// +// [ R | T ]-1 [ R^-1 | -R^-1 * T ] +// [ --+-- ] = [ -----+---------- ] +// [ 0 | 1 ] [ 0 + 1 ] +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::invertAffine() +{ + // R^-1 + Matrix3 r(m[0],m[1],m[2], m[4],m[5],m[6], m[8],m[9],m[10]); + r.invert(); + m[0] = r[0]; m[1] = r[1]; m[2] = r[2]; + m[4] = r[3]; m[5] = r[4]; m[6] = r[5]; + m[8] = r[6]; m[9] = r[7]; m[10]= r[8]; + + // -R^-1 * T + float x = m[12]; + float y = m[13]; + float z = m[14]; + m[12] = -(r[0] * x + r[3] * y + r[6] * z); + m[13] = -(r[1] * x + r[4] * y + r[7] * z); + m[14] = -(r[2] * x + r[5] * y + r[8] * z); + + // last row should be unchanged (0,0,0,1) + //m[3] = m[7] = m[11] = 0.0f; + //m[15] = 1.0f; + + return * this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// inverse matrix using matrix partitioning (blockwise inverse) +// It devides a 4x4 matrix into 4 of 2x2 matrices. It works in case of where +// det(A) != 0. If not, use the generic inverse method +// inverse formula. +// M = [ A | B ] A, B, C, D are 2x2 matrix blocks +// [ --+-- ] det(M) = |A| * |D - ((C * A^-1) * B)| +// [ C | D ] +// +// M^-1 = [ A' | B' ] A' = A^-1 - (A^-1 * B) * C' +// [ ---+--- ] B' = (A^-1 * B) * -D' +// [ C' | D' ] C' = -D' * (C * A^-1) +// D' = (D - ((C * A^-1) * B))^-1 +// +// NOTE: I wrap with () if it it used more than once. +// The matrix is invertable even if det(A)=0, so must check det(A) before +// calling this function, and use invertGeneric() instead. +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::invertProjective() +{ + // partition + Matrix2 a(m[0], m[1], m[4], m[5]); + Matrix2 b(m[8], m[9], m[12], m[13]); + Matrix2 c(m[2], m[3], m[6], m[7]); + Matrix2 d(m[10], m[11], m[14], m[15]); + + // pre-compute repeated parts + a.invert(); // A^-1 + Matrix2 ab = a * b; // A^-1 * B + Matrix2 ca = c * a; // C * A^-1 + Matrix2 cab = ca * b; // C * A^-1 * B + Matrix2 dcab = d - cab; // D - C * A^-1 * B + + // check determinant if |D - C * A^-1 * B| = 0 + //NOTE: this function assumes det(A) is already checked. if |A|=0 then, + // cannot use this function. + float determinant = dcab[0] * dcab[3] - dcab[1] * dcab[2]; + if(fabs(determinant) <= EPSILON) + { + return identity(); + } + + // compute D' and -D' + Matrix2 d1 = dcab; // (D - C * A^-1 * B) + d1.invert(); // (D - C * A^-1 * B)^-1 + Matrix2 d2 = -d1; // -(D - C * A^-1 * B)^-1 + + // compute C' + Matrix2 c1 = d2 * ca; // -D' * (C * A^-1) + + // compute B' + Matrix2 b1 = ab * d2; // (A^-1 * B) * -D' + + // compute A' + Matrix2 a1 = a - (ab * c1); // A^-1 - (A^-1 * B) * C' + + // assemble inverse matrix + m[0] = a1[0]; m[4] = a1[2]; /*|*/ m[8] = b1[0]; m[12]= b1[2]; + m[1] = a1[1]; m[5] = a1[3]; /*|*/ m[9] = b1[1]; m[13]= b1[3]; + /*-----------------------------+-----------------------------*/ + m[2] = c1[0]; m[6] = c1[2]; /*|*/ m[10]= d1[0]; m[14]= d1[2]; + m[3] = c1[1]; m[7] = c1[3]; /*|*/ m[11]= d1[1]; m[15]= d1[3]; + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// compute the inverse of a general 4x4 matrix using Cramer's Rule +// If cannot find inverse, return indentity matrix +// M^-1 = adj(M) / det(M) +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::invertGeneral() +{ + // get cofactors of minor matrices + float cofactor0 = getCofactor(m[5],m[6],m[7], m[9],m[10],m[11], m[13],m[14],m[15]); + float cofactor1 = getCofactor(m[4],m[6],m[7], m[8],m[10],m[11], m[12],m[14],m[15]); + float cofactor2 = getCofactor(m[4],m[5],m[7], m[8],m[9], m[11], m[12],m[13],m[15]); + float cofactor3 = getCofactor(m[4],m[5],m[6], m[8],m[9], m[10], m[12],m[13],m[14]); + + // get determinant + float determinant = m[0] * cofactor0 - m[1] * cofactor1 + m[2] * cofactor2 - m[3] * cofactor3; + if(fabs(determinant) <= EPSILON) + { + return identity(); + } + + // get rest of cofactors for adj(M) + float cofactor4 = getCofactor(m[1],m[2],m[3], m[9],m[10],m[11], m[13],m[14],m[15]); + float cofactor5 = getCofactor(m[0],m[2],m[3], m[8],m[10],m[11], m[12],m[14],m[15]); + float cofactor6 = getCofactor(m[0],m[1],m[3], m[8],m[9], m[11], m[12],m[13],m[15]); + float cofactor7 = getCofactor(m[0],m[1],m[2], m[8],m[9], m[10], m[12],m[13],m[14]); + + float cofactor8 = getCofactor(m[1],m[2],m[3], m[5],m[6], m[7], m[13],m[14],m[15]); + float cofactor9 = getCofactor(m[0],m[2],m[3], m[4],m[6], m[7], m[12],m[14],m[15]); + float cofactor10= getCofactor(m[0],m[1],m[3], m[4],m[5], m[7], m[12],m[13],m[15]); + float cofactor11= getCofactor(m[0],m[1],m[2], m[4],m[5], m[6], m[12],m[13],m[14]); + + float cofactor12= getCofactor(m[1],m[2],m[3], m[5],m[6], m[7], m[9], m[10],m[11]); + float cofactor13= getCofactor(m[0],m[2],m[3], m[4],m[6], m[7], m[8], m[10],m[11]); + float cofactor14= getCofactor(m[0],m[1],m[3], m[4],m[5], m[7], m[8], m[9], m[11]); + float cofactor15= getCofactor(m[0],m[1],m[2], m[4],m[5], m[6], m[8], m[9], m[10]); + + // build inverse matrix = adj(M) / det(M) + // adjugate of M is the transpose of the cofactor matrix of M + float invDeterminant = 1.0f / determinant; + m[0] = invDeterminant * cofactor0; + m[1] = -invDeterminant * cofactor4; + m[2] = invDeterminant * cofactor8; + m[3] = -invDeterminant * cofactor12; + + m[4] = -invDeterminant * cofactor1; + m[5] = invDeterminant * cofactor5; + m[6] = -invDeterminant * cofactor9; + m[7] = invDeterminant * cofactor13; + + m[8] = invDeterminant * cofactor2; + m[9] = -invDeterminant * cofactor6; + m[10]= invDeterminant * cofactor10; + m[11]= -invDeterminant * cofactor14; + + m[12]= -invDeterminant * cofactor3; + m[13]= invDeterminant * cofactor7; + m[14]= -invDeterminant * cofactor11; + m[15]= invDeterminant * cofactor15; + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// return determinant of 4x4 matrix +/////////////////////////////////////////////////////////////////////////////// +float Matrix4::getDeterminant() +{ + return m[0] * getCofactor(m[5],m[6],m[7], m[9],m[10],m[11], m[13],m[14],m[15]) - + m[1] * getCofactor(m[4],m[6],m[7], m[8],m[10],m[11], m[12],m[14],m[15]) + + m[2] * getCofactor(m[4],m[5],m[7], m[8],m[9], m[11], m[12],m[13],m[15]) - + m[3] * getCofactor(m[4],m[5],m[6], m[8],m[9], m[10], m[12],m[13],m[14]); +} + + + +/////////////////////////////////////////////////////////////////////////////// +// compute cofactor of 3x3 minor matrix without sign +// input params are 9 elements of the minor matrix +// NOTE: The caller must know its sign. +/////////////////////////////////////////////////////////////////////////////// +float Matrix4::getCofactor(float m0, float m1, float m2, + float m3, float m4, float m5, + float m6, float m7, float m8) +{ + return m0 * (m4 * m8 - m5 * m7) - + m1 * (m3 * m8 - m5 * m6) + + m2 * (m3 * m7 - m4 * m6); +} + + + +/////////////////////////////////////////////////////////////////////////////// +// translate this matrix by (x, y, z) +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::translate(const Vector3& v) +{ + return translate(v.x, v.y, v.z); +} + +Matrix4& Matrix4::translate(float x, float y, float z) +{ + m[0] += m[3] * x; m[4] += m[7] * x; m[8] += m[11]* x; m[12]+= m[15]* x; + m[1] += m[3] * y; m[5] += m[7] * y; m[9] += m[11]* y; m[13]+= m[15]* y; + m[2] += m[3] * z; m[6] += m[7] * z; m[10]+= m[11]* z; m[14]+= m[15]* z; + + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// uniform scale +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::scale(float s) +{ + return scale(s, s, s); +} + +Matrix4& Matrix4::scale(float x, float y, float z) +{ + m[0] *= x; m[4] *= x; m[8] *= x; m[12] *= x; + m[1] *= y; m[5] *= y; m[9] *= y; m[13] *= y; + m[2] *= z; m[6] *= z; m[10]*= z; m[14] *= z; + return *this; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// build a rotation matrix with given angle(degree) and rotation axis, then +// multiply it with this object +/////////////////////////////////////////////////////////////////////////////// +Matrix4& Matrix4::rotate(float angle, const Vector3& axis) +{ + return rotate(angle, axis.x, axis.y, axis.z); +} + +Matrix4& Matrix4::rotate(float angle, float x, float y, float z) +{ + float c = cosf(angle * DEG2RAD); // cosine + float s = sinf(angle * DEG2RAD); // sine + float c1 = 1.0f - c; // 1 - c + float m0 = m[0], m4 = m[4], m8 = m[8], m12= m[12], + m1 = m[1], m5 = m[5], m9 = m[9], m13= m[13], + m2 = m[2], m6 = m[6], m10= m[10], m14= m[14]; + + // build rotation matrix + float r0 = x * x * c1 + c; + float r1 = x * y * c1 + z * s; + float r2 = x * z * c1 - y * s; + float r4 = x * y * c1 - z * s; + float r5 = y * y * c1 + c; + float r6 = y * z * c1 + x * s; + float r8 = x * z * c1 + y * s; + float r9 = y * z * c1 - x * s; + float r10= z * z * c1 + c; + + // multiply rotation matrix + m[0] = r0 * m0 + r4 * m1 + r8 * m2; + m[1] = r1 * m0 + r5 * m1 + r9 * m2; + m[2] = r2 * m0 + r6 * m1 + r10* m2; + m[4] = r0 * m4 + r4 * m5 + r8 * m6; + m[5] = r1 * m4 + r5 * m5 + r9 * m6; + m[6] = r2 * m4 + r6 * m5 + r10* m6; + m[8] = r0 * m8 + r4 * m9 + r8 * m10; + m[9] = r1 * m8 + r5 * m9 + r9 * m10; + m[10]= r2 * m8 + r6 * m9 + r10* m10; + m[12]= r0 * m12+ r4 * m13+ r8 * m14; + m[13]= r1 * m12+ r5 * m13+ r9 * m14; + m[14]= r2 * m12+ r6 * m13+ r10* m14; + + return *this; +} + +Matrix4& Matrix4::rotateX(float angle) +{ + float c = cosf(angle * DEG2RAD); + float s = sinf(angle * DEG2RAD); + float m1 = m[1], m2 = m[2], + m5 = m[5], m6 = m[6], + m9 = m[9], m10= m[10], + m13= m[13], m14= m[14]; + + m[1] = m1 * c + m2 *-s; + m[2] = m1 * s + m2 * c; + m[5] = m5 * c + m6 *-s; + m[6] = m5 * s + m6 * c; + m[9] = m9 * c + m10*-s; + m[10]= m9 * s + m10* c; + m[13]= m13* c + m14*-s; + m[14]= m13* s + m14* c; + + return *this; +} + +Matrix4& Matrix4::rotateY(float angle) +{ + float c = cosf(angle * DEG2RAD); + float s = sinf(angle * DEG2RAD); + float m0 = m[0], m2 = m[2], + m4 = m[4], m6 = m[6], + m8 = m[8], m10= m[10], + m12= m[12], m14= m[14]; + + m[0] = m0 * c + m2 * s; + m[2] = m0 *-s + m2 * c; + m[4] = m4 * c + m6 * s; + m[6] = m4 *-s + m6 * c; + m[8] = m8 * c + m10* s; + m[10]= m8 *-s + m10* c; + m[12]= m12* c + m14* s; + m[14]= m12*-s + m14* c; + + return *this; +} + +Matrix4& Matrix4::rotateZ(float angle) +{ + float c = cosf(angle * DEG2RAD); + float s = sinf(angle * DEG2RAD); + float m0 = m[0], m1 = m[1], + m4 = m[4], m5 = m[5], + m8 = m[8], m9 = m[9], + m12= m[12], m13= m[13]; + + m[0] = m0 * c + m1 *-s; + m[1] = m0 * s + m1 * c; + m[4] = m4 * c + m5 *-s; + m[5] = m4 * s + m5 * c; + m[8] = m8 * c + m9 *-s; + m[9] = m8 * s + m9 * c; + m[12]= m12* c + m13*-s; + m[13]= m12* s + m13* c; + + return *this; +} diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.h b/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.h new file mode 100644 index 000000000..3515f546d --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/Matrices.h @@ -0,0 +1,909 @@ +/////////////////////////////////////////////////////////////////////////////// +// Matrice.h +// ========= +// NxN Matrix Math classes +// +// The elements of the matrix are stored as column major order. +// | 0 2 | | 0 3 6 | | 0 4 8 12 | +// | 1 3 | | 1 4 7 | | 1 5 9 13 | +// | 2 5 8 | | 2 6 10 14 | +// | 3 7 11 15 | +// +// AUTHOR: Song Ho Ahn (song.ahn@gmail.com) +// CREATED: 2005-06-24 +// UPDATED: 2013-09-30 +// +// Copyright (C) 2005 Song Ho Ahn +/////////////////////////////////////////////////////////////////////////////// + +#ifndef MATH_MATRICES_H +#define MATH_MATRICES_H + +#include +#include +#include "Vectors.h" + +/////////////////////////////////////////////////////////////////////////// +// 2x2 matrix +/////////////////////////////////////////////////////////////////////////// +class Matrix2 +{ +public: + // constructors + Matrix2(); // init with identity + Matrix2(const float src[4]); + Matrix2(float m0, float m1, float m2, float m3); + + void set(const float src[4]); + void set(float m0, float m1, float m2, float m3); + void setRow(int index, const float row[2]); + void setRow(int index, const Vector2& v); + void setColumn(int index, const float col[2]); + void setColumn(int index, const Vector2& v); + + const float* get() const; + float getDeterminant(); + + Matrix2& identity(); + Matrix2& transpose(); // transpose itself and return reference + Matrix2& invert(); + + // operators + Matrix2 operator+(const Matrix2& rhs) const; // add rhs + Matrix2 operator-(const Matrix2& rhs) const; // subtract rhs + Matrix2& operator+=(const Matrix2& rhs); // add rhs and update this object + Matrix2& operator-=(const Matrix2& rhs); // subtract rhs and update this object + Vector2 operator*(const Vector2& rhs) const; // multiplication: v' = M * v + Matrix2 operator*(const Matrix2& rhs) const; // multiplication: M3 = M1 * M2 + Matrix2& operator*=(const Matrix2& rhs); // multiplication: M1' = M1 * M2 + bool operator==(const Matrix2& rhs) const; // exact compare, no epsilon + bool operator!=(const Matrix2& rhs) const; // exact compare, no epsilon + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Matrix2 operator-(const Matrix2& m); // unary operator (-) + friend Matrix2 operator*(float scalar, const Matrix2& m); // pre-multiplication + friend Vector2 operator*(const Vector2& vec, const Matrix2& m); // pre-multiplication + friend std::ostream& operator<<(std::ostream& os, const Matrix2& m); + +protected: + +private: + float m[4]; + +}; + + + +/////////////////////////////////////////////////////////////////////////// +// 3x3 matrix +/////////////////////////////////////////////////////////////////////////// +class Matrix3 +{ +public: + // constructors + Matrix3(); // init with identity + Matrix3(const float src[9]); + Matrix3(float m0, float m1, float m2, // 1st column + float m3, float m4, float m5, // 2nd column + float m6, float m7, float m8); // 3rd column + + void set(const float src[9]); + void set(float m0, float m1, float m2, // 1st column + float m3, float m4, float m5, // 2nd column + float m6, float m7, float m8); // 3rd column + void setRow(int index, const float row[3]); + void setRow(int index, const Vector3& v); + void setColumn(int index, const float col[3]); + void setColumn(int index, const Vector3& v); + + const float* get() const; + float getDeterminant(); + + Matrix3& identity(); + Matrix3& transpose(); // transpose itself and return reference + Matrix3& invert(); + + // operators + Matrix3 operator+(const Matrix3& rhs) const; // add rhs + Matrix3 operator-(const Matrix3& rhs) const; // subtract rhs + Matrix3& operator+=(const Matrix3& rhs); // add rhs and update this object + Matrix3& operator-=(const Matrix3& rhs); // subtract rhs and update this object + Vector3 operator*(const Vector3& rhs) const; // multiplication: v' = M * v + Matrix3 operator*(const Matrix3& rhs) const; // multiplication: M3 = M1 * M2 + Matrix3& operator*=(const Matrix3& rhs); // multiplication: M1' = M1 * M2 + bool operator==(const Matrix3& rhs) const; // exact compare, no epsilon + bool operator!=(const Matrix3& rhs) const; // exact compare, no epsilon + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Matrix3 operator-(const Matrix3& m); // unary operator (-) + friend Matrix3 operator*(float scalar, const Matrix3& m); // pre-multiplication + friend Vector3 operator*(const Vector3& vec, const Matrix3& m); // pre-multiplication + friend std::ostream& operator<<(std::ostream& os, const Matrix3& m); + +protected: + +private: + float m[9]; + +}; + + + +/////////////////////////////////////////////////////////////////////////// +// 4x4 matrix +/////////////////////////////////////////////////////////////////////////// +class Matrix4 +{ +public: + // constructors + Matrix4(); // init with identity + Matrix4(const float src[16]); + Matrix4(float m00, float m01, float m02, float m03, // 1st column + float m04, float m05, float m06, float m07, // 2nd column + float m08, float m09, float m10, float m11, // 3rd column + float m12, float m13, float m14, float m15);// 4th column + + void set(const float src[16]); + void set(float m00, float m01, float m02, float m03, // 1st column + float m04, float m05, float m06, float m07, // 2nd column + float m08, float m09, float m10, float m11, // 3rd column + float m12, float m13, float m14, float m15);// 4th column + void setRow(int index, const float row[4]); + void setRow(int index, const Vector4& v); + void setRow(int index, const Vector3& v); + void setColumn(int index, const float col[4]); + void setColumn(int index, const Vector4& v); + void setColumn(int index, const Vector3& v); + + const float* get() const; + const float* getTranspose(); // return transposed matrix + float getDeterminant(); + + Matrix4& identity(); + Matrix4& transpose(); // transpose itself and return reference + Matrix4& invert(); // check best inverse method before inverse + Matrix4& invertEuclidean(); // inverse of Euclidean transform matrix + Matrix4& invertAffine(); // inverse of affine transform matrix + Matrix4& invertProjective(); // inverse of projective matrix using partitioning + Matrix4& invertGeneral(); // inverse of generic matrix + + // transform matrix + Matrix4& translate(float x, float y, float z); // translation by (x,y,z) + Matrix4& translate(const Vector3& v); // + Matrix4& rotate(float angle, const Vector3& axis); // rotate angle(degree) along the given axix + Matrix4& rotate(float angle, float x, float y, float z); + Matrix4& rotateX(float angle); // rotate on X-axis with degree + Matrix4& rotateY(float angle); // rotate on Y-axis with degree + Matrix4& rotateZ(float angle); // rotate on Z-axis with degree + Matrix4& scale(float scale); // uniform scale + Matrix4& scale(float sx, float sy, float sz); // scale by (sx, sy, sz) on each axis + + // operators + Matrix4 operator+(const Matrix4& rhs) const; // add rhs + Matrix4 operator-(const Matrix4& rhs) const; // subtract rhs + Matrix4& operator+=(const Matrix4& rhs); // add rhs and update this object + Matrix4& operator-=(const Matrix4& rhs); // subtract rhs and update this object + Vector4 operator*(const Vector4& rhs) const; // multiplication: v' = M * v + Vector3 operator*(const Vector3& rhs) const; // multiplication: v' = M * v + Matrix4 operator*(const Matrix4& rhs) const; // multiplication: M3 = M1 * M2 + Matrix4& operator*=(const Matrix4& rhs); // multiplication: M1' = M1 * M2 + bool operator==(const Matrix4& rhs) const; // exact compare, no epsilon + bool operator!=(const Matrix4& rhs) const; // exact compare, no epsilon + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Matrix4 operator-(const Matrix4& m); // unary operator (-) + friend Matrix4 operator*(float scalar, const Matrix4& m); // pre-multiplication + friend Vector3 operator*(const Vector3& vec, const Matrix4& m); // pre-multiplication + friend Vector4 operator*(const Vector4& vec, const Matrix4& m); // pre-multiplication + friend std::ostream& operator<<(std::ostream& os, const Matrix4& m); + +protected: + +private: + float getCofactor(float m0, float m1, float m2, + float m3, float m4, float m5, + float m6, float m7, float m8); + + float m[16]; + float tm[16]; // transpose m + +}; + + + +/////////////////////////////////////////////////////////////////////////// +// inline functions for Matrix2 +/////////////////////////////////////////////////////////////////////////// +inline Matrix2::Matrix2() +{ + // initially identity matrix + identity(); +} + + + +inline Matrix2::Matrix2(const float src[4]) +{ + set(src); +} + + + +inline Matrix2::Matrix2(float m0, float m1, float m2, float m3) +{ + set(m0, m1, m2, m3); +} + + + +inline void Matrix2::set(const float src[4]) +{ + m[0] = src[0]; m[1] = src[1]; m[2] = src[2]; m[3] = src[3]; +} + + + +inline void Matrix2::set(float m0, float m1, float m2, float m3) +{ + m[0]= m0; m[1] = m1; m[2] = m2; m[3]= m3; +} + + + +inline void Matrix2::setRow(int index, const float row[2]) +{ + m[index] = row[0]; m[index + 2] = row[1]; +} + + + +inline void Matrix2::setRow(int index, const Vector2& v) +{ + m[index] = v.x; m[index + 2] = v.y; +} + + + +inline void Matrix2::setColumn(int index, const float col[2]) +{ + m[index*2] = col[0]; m[index*2 + 1] = col[1]; +} + + + +inline void Matrix2::setColumn(int index, const Vector2& v) +{ + m[index*2] = v.x; m[index*2 + 1] = v.y; +} + + + +inline const float* Matrix2::get() const +{ + return m; +} + + + +inline Matrix2& Matrix2::identity() +{ + m[0] = m[3] = 1.0f; + m[1] = m[2] = 0.0f; + return *this; +} + + + +inline Matrix2 Matrix2::operator+(const Matrix2& rhs) const +{ + return Matrix2(m[0]+rhs[0], m[1]+rhs[1], m[2]+rhs[2], m[3]+rhs[3]); +} + + + +inline Matrix2 Matrix2::operator-(const Matrix2& rhs) const +{ + return Matrix2(m[0]-rhs[0], m[1]-rhs[1], m[2]-rhs[2], m[3]-rhs[3]); +} + + + +inline Matrix2& Matrix2::operator+=(const Matrix2& rhs) +{ + m[0] += rhs[0]; m[1] += rhs[1]; m[2] += rhs[2]; m[3] += rhs[3]; + return *this; +} + + + +inline Matrix2& Matrix2::operator-=(const Matrix2& rhs) +{ + m[0] -= rhs[0]; m[1] -= rhs[1]; m[2] -= rhs[2]; m[3] -= rhs[3]; + return *this; +} + + + +inline Vector2 Matrix2::operator*(const Vector2& rhs) const +{ + return Vector2(m[0]*rhs.x + m[2]*rhs.y, m[1]*rhs.x + m[3]*rhs.y); +} + + + +inline Matrix2 Matrix2::operator*(const Matrix2& rhs) const +{ + return Matrix2(m[0]*rhs[0] + m[2]*rhs[1], m[1]*rhs[0] + m[3]*rhs[1], + m[0]*rhs[2] + m[2]*rhs[3], m[1]*rhs[2] + m[3]*rhs[3]); +} + + + +inline Matrix2& Matrix2::operator*=(const Matrix2& rhs) +{ + *this = *this * rhs; + return *this; +} + + + +inline bool Matrix2::operator==(const Matrix2& rhs) const +{ + return (m[0] == rhs[0]) && (m[1] == rhs[1]) && (m[2] == rhs[2]) && (m[3] == rhs[3]); +} + + + +inline bool Matrix2::operator!=(const Matrix2& rhs) const +{ + return (m[0] != rhs[0]) || (m[1] != rhs[1]) || (m[2] != rhs[2]) || (m[3] != rhs[3]); +} + + + +inline float Matrix2::operator[](int index) const +{ + return m[index]; +} + + + +inline float& Matrix2::operator[](int index) +{ + return m[index]; +} + + + +inline Matrix2 operator-(const Matrix2& rhs) +{ + return Matrix2(-rhs[0], -rhs[1], -rhs[2], -rhs[3]); +} + + + +inline Matrix2 operator*(float s, const Matrix2& rhs) +{ + return Matrix2(s*rhs[0], s*rhs[1], s*rhs[2], s*rhs[3]); +} + + + +inline Vector2 operator*(const Vector2& v, const Matrix2& rhs) +{ + return Vector2(v.x*rhs[0] + v.y*rhs[1], v.x*rhs[2] + v.y*rhs[3]); +} + + + +inline std::ostream& operator<<(std::ostream& os, const Matrix2& m) +{ + os << std::fixed << std::setprecision(5); + os << "[" << std::setw(10) << m[0] << " " << std::setw(10) << m[2] << "]\n" + << "[" << std::setw(10) << m[1] << " " << std::setw(10) << m[3] << "]\n"; + os << std::resetiosflags(std::ios_base::fixed | std::ios_base::floatfield); + return os; +} +// END OF MATRIX2 INLINE ////////////////////////////////////////////////////// + + + + +/////////////////////////////////////////////////////////////////////////// +// inline functions for Matrix3 +/////////////////////////////////////////////////////////////////////////// +inline Matrix3::Matrix3() +{ + // initially identity matrix + identity(); +} + + + +inline Matrix3::Matrix3(const float src[9]) +{ + set(src); +} + + + +inline Matrix3::Matrix3(float m0, float m1, float m2, + float m3, float m4, float m5, + float m6, float m7, float m8) +{ + set(m0, m1, m2, m3, m4, m5, m6, m7, m8); +} + + + +inline void Matrix3::set(const float src[9]) +{ + m[0] = src[0]; m[1] = src[1]; m[2] = src[2]; + m[3] = src[3]; m[4] = src[4]; m[5] = src[5]; + m[6] = src[6]; m[7] = src[7]; m[8] = src[8]; +} + + + +inline void Matrix3::set(float m0, float m1, float m2, + float m3, float m4, float m5, + float m6, float m7, float m8) +{ + m[0] = m0; m[1] = m1; m[2] = m2; + m[3] = m3; m[4] = m4; m[5] = m5; + m[6] = m6; m[7] = m7; m[8] = m8; +} + + + +inline void Matrix3::setRow(int index, const float row[3]) +{ + m[index] = row[0]; m[index + 3] = row[1]; m[index + 6] = row[2]; +} + + + +inline void Matrix3::setRow(int index, const Vector3& v) +{ + m[index] = v.x; m[index + 3] = v.y; m[index + 6] = v.z; +} + + + +inline void Matrix3::setColumn(int index, const float col[3]) +{ + m[index*3] = col[0]; m[index*3 + 1] = col[1]; m[index*3 + 2] = col[2]; +} + + + +inline void Matrix3::setColumn(int index, const Vector3& v) +{ + m[index*3] = v.x; m[index*3 + 1] = v.y; m[index*3 + 2] = v.z; +} + + + +inline const float* Matrix3::get() const +{ + return m; +} + + + +inline Matrix3& Matrix3::identity() +{ + m[0] = m[4] = m[8] = 1.0f; + m[1] = m[2] = m[3] = m[5] = m[6] = m[7] = 0.0f; + return *this; +} + + + +inline Matrix3 Matrix3::operator+(const Matrix3& rhs) const +{ + return Matrix3(m[0]+rhs[0], m[1]+rhs[1], m[2]+rhs[2], + m[3]+rhs[3], m[4]+rhs[4], m[5]+rhs[5], + m[6]+rhs[6], m[7]+rhs[7], m[8]+rhs[8]); +} + + + +inline Matrix3 Matrix3::operator-(const Matrix3& rhs) const +{ + return Matrix3(m[0]-rhs[0], m[1]-rhs[1], m[2]-rhs[2], + m[3]-rhs[3], m[4]-rhs[4], m[5]-rhs[5], + m[6]-rhs[6], m[7]-rhs[7], m[8]-rhs[8]); +} + + + +inline Matrix3& Matrix3::operator+=(const Matrix3& rhs) +{ + m[0] += rhs[0]; m[1] += rhs[1]; m[2] += rhs[2]; + m[3] += rhs[3]; m[4] += rhs[4]; m[5] += rhs[5]; + m[6] += rhs[6]; m[7] += rhs[7]; m[8] += rhs[8]; + return *this; +} + + + +inline Matrix3& Matrix3::operator-=(const Matrix3& rhs) +{ + m[0] -= rhs[0]; m[1] -= rhs[1]; m[2] -= rhs[2]; + m[3] -= rhs[3]; m[4] -= rhs[4]; m[5] -= rhs[5]; + m[6] -= rhs[6]; m[7] -= rhs[7]; m[8] -= rhs[8]; + return *this; +} + + + +inline Vector3 Matrix3::operator*(const Vector3& rhs) const +{ + return Vector3(m[0]*rhs.x + m[3]*rhs.y + m[6]*rhs.z, + m[1]*rhs.x + m[4]*rhs.y + m[7]*rhs.z, + m[2]*rhs.x + m[5]*rhs.y + m[8]*rhs.z); +} + + + +inline Matrix3 Matrix3::operator*(const Matrix3& rhs) const +{ + return Matrix3(m[0]*rhs[0] + m[3]*rhs[1] + m[6]*rhs[2], m[1]*rhs[0] + m[4]*rhs[1] + m[7]*rhs[2], m[2]*rhs[0] + m[5]*rhs[1] + m[8]*rhs[2], + m[0]*rhs[3] + m[3]*rhs[4] + m[6]*rhs[5], m[1]*rhs[3] + m[4]*rhs[4] + m[7]*rhs[5], m[2]*rhs[3] + m[5]*rhs[4] + m[8]*rhs[5], + m[0]*rhs[6] + m[3]*rhs[7] + m[6]*rhs[8], m[1]*rhs[6] + m[4]*rhs[7] + m[7]*rhs[8], m[2]*rhs[6] + m[5]*rhs[7] + m[8]*rhs[8]); +} + + + +inline Matrix3& Matrix3::operator*=(const Matrix3& rhs) +{ + *this = *this * rhs; + return *this; +} + + + +inline bool Matrix3::operator==(const Matrix3& rhs) const +{ + return (m[0] == rhs[0]) && (m[1] == rhs[1]) && (m[2] == rhs[2]) && + (m[3] == rhs[3]) && (m[4] == rhs[4]) && (m[5] == rhs[5]) && + (m[6] == rhs[6]) && (m[7] == rhs[7]) && (m[8] == rhs[8]); +} + + + +inline bool Matrix3::operator!=(const Matrix3& rhs) const +{ + return (m[0] != rhs[0]) || (m[1] != rhs[1]) || (m[2] != rhs[2]) || + (m[3] != rhs[3]) || (m[4] != rhs[4]) || (m[5] != rhs[5]) || + (m[6] != rhs[6]) || (m[7] != rhs[7]) || (m[8] != rhs[8]); +} + + + +inline float Matrix3::operator[](int index) const +{ + return m[index]; +} + + + +inline float& Matrix3::operator[](int index) +{ + return m[index]; +} + + + +inline Matrix3 operator-(const Matrix3& rhs) +{ + return Matrix3(-rhs[0], -rhs[1], -rhs[2], -rhs[3], -rhs[4], -rhs[5], -rhs[6], -rhs[7], -rhs[8]); +} + + + +inline Matrix3 operator*(float s, const Matrix3& rhs) +{ + return Matrix3(s*rhs[0], s*rhs[1], s*rhs[2], s*rhs[3], s*rhs[4], s*rhs[5], s*rhs[6], s*rhs[7], s*rhs[8]); +} + + + +inline Vector3 operator*(const Vector3& v, const Matrix3& m) +{ + return Vector3(v.x*m[0] + v.y*m[1] + v.z*m[2], v.x*m[3] + v.y*m[4] + v.z*m[5], v.x*m[6] + v.y*m[7] + v.z*m[8]); +} + + + +inline std::ostream& operator<<(std::ostream& os, const Matrix3& m) +{ + os << std::fixed << std::setprecision(5); + os << "[" << std::setw(10) << m[0] << " " << std::setw(10) << m[3] << " " << std::setw(10) << m[6] << "]\n" + << "[" << std::setw(10) << m[1] << " " << std::setw(10) << m[4] << " " << std::setw(10) << m[7] << "]\n" + << "[" << std::setw(10) << m[2] << " " << std::setw(10) << m[5] << " " << std::setw(10) << m[8] << "]\n"; + os << std::resetiosflags(std::ios_base::fixed | std::ios_base::floatfield); + return os; +} +// END OF MATRIX3 INLINE ////////////////////////////////////////////////////// + + + + +/////////////////////////////////////////////////////////////////////////// +// inline functions for Matrix4 +/////////////////////////////////////////////////////////////////////////// +inline Matrix4::Matrix4() +{ + // initially identity matrix + identity(); +} + + + +inline Matrix4::Matrix4(const float src[16]) +{ + set(src); +} + + + +inline Matrix4::Matrix4(float m00, float m01, float m02, float m03, + float m04, float m05, float m06, float m07, + float m08, float m09, float m10, float m11, + float m12, float m13, float m14, float m15) +{ + set(m00, m01, m02, m03, m04, m05, m06, m07, m08, m09, m10, m11, m12, m13, m14, m15); +} + + + +inline void Matrix4::set(const float src[16]) +{ + m[0] = src[0]; m[1] = src[1]; m[2] = src[2]; m[3] = src[3]; + m[4] = src[4]; m[5] = src[5]; m[6] = src[6]; m[7] = src[7]; + m[8] = src[8]; m[9] = src[9]; m[10]= src[10]; m[11]= src[11]; + m[12]= src[12]; m[13]= src[13]; m[14]= src[14]; m[15]= src[15]; +} + + + +inline void Matrix4::set(float m00, float m01, float m02, float m03, + float m04, float m05, float m06, float m07, + float m08, float m09, float m10, float m11, + float m12, float m13, float m14, float m15) +{ + m[0] = m00; m[1] = m01; m[2] = m02; m[3] = m03; + m[4] = m04; m[5] = m05; m[6] = m06; m[7] = m07; + m[8] = m08; m[9] = m09; m[10]= m10; m[11]= m11; + m[12]= m12; m[13]= m13; m[14]= m14; m[15]= m15; +} + + + +inline void Matrix4::setRow(int index, const float row[4]) +{ + m[index] = row[0]; m[index + 4] = row[1]; m[index + 8] = row[2]; m[index + 12] = row[3]; +} + + + +inline void Matrix4::setRow(int index, const Vector4& v) +{ + m[index] = v.x; m[index + 4] = v.y; m[index + 8] = v.z; m[index + 12] = v.w; +} + + + +inline void Matrix4::setRow(int index, const Vector3& v) +{ + m[index] = v.x; m[index + 4] = v.y; m[index + 8] = v.z; +} + + + +inline void Matrix4::setColumn(int index, const float col[4]) +{ + m[index*4] = col[0]; m[index*4 + 1] = col[1]; m[index*4 + 2] = col[2]; m[index*4 + 3] = col[3]; +} + + + +inline void Matrix4::setColumn(int index, const Vector4& v) +{ + m[index*4] = v.x; m[index*4 + 1] = v.y; m[index*4 + 2] = v.z; m[index*4 + 3] = v.w; +} + + + +inline void Matrix4::setColumn(int index, const Vector3& v) +{ + m[index*4] = v.x; m[index*4 + 1] = v.y; m[index*4 + 2] = v.z; +} + + + +inline const float* Matrix4::get() const +{ + return m; +} + + + +inline const float* Matrix4::getTranspose() +{ + tm[0] = m[0]; tm[1] = m[4]; tm[2] = m[8]; tm[3] = m[12]; + tm[4] = m[1]; tm[5] = m[5]; tm[6] = m[9]; tm[7] = m[13]; + tm[8] = m[2]; tm[9] = m[6]; tm[10]= m[10]; tm[11]= m[14]; + tm[12]= m[3]; tm[13]= m[7]; tm[14]= m[11]; tm[15]= m[15]; + return tm; +} + + + +inline Matrix4& Matrix4::identity() +{ + m[0] = m[5] = m[10] = m[15] = 1.0f; + m[1] = m[2] = m[3] = m[4] = m[6] = m[7] = m[8] = m[9] = m[11] = m[12] = m[13] = m[14] = 0.0f; + return *this; +} + + + +inline Matrix4 Matrix4::operator+(const Matrix4& rhs) const +{ + return Matrix4(m[0]+rhs[0], m[1]+rhs[1], m[2]+rhs[2], m[3]+rhs[3], + m[4]+rhs[4], m[5]+rhs[5], m[6]+rhs[6], m[7]+rhs[7], + m[8]+rhs[8], m[9]+rhs[9], m[10]+rhs[10], m[11]+rhs[11], + m[12]+rhs[12], m[13]+rhs[13], m[14]+rhs[14], m[15]+rhs[15]); +} + + + +inline Matrix4 Matrix4::operator-(const Matrix4& rhs) const +{ + return Matrix4(m[0]-rhs[0], m[1]-rhs[1], m[2]-rhs[2], m[3]-rhs[3], + m[4]-rhs[4], m[5]-rhs[5], m[6]-rhs[6], m[7]-rhs[7], + m[8]-rhs[8], m[9]-rhs[9], m[10]-rhs[10], m[11]-rhs[11], + m[12]-rhs[12], m[13]-rhs[13], m[14]-rhs[14], m[15]-rhs[15]); +} + + + +inline Matrix4& Matrix4::operator+=(const Matrix4& rhs) +{ + m[0] += rhs[0]; m[1] += rhs[1]; m[2] += rhs[2]; m[3] += rhs[3]; + m[4] += rhs[4]; m[5] += rhs[5]; m[6] += rhs[6]; m[7] += rhs[7]; + m[8] += rhs[8]; m[9] += rhs[9]; m[10]+= rhs[10]; m[11]+= rhs[11]; + m[12]+= rhs[12]; m[13]+= rhs[13]; m[14]+= rhs[14]; m[15]+= rhs[15]; + return *this; +} + + + +inline Matrix4& Matrix4::operator-=(const Matrix4& rhs) +{ + m[0] -= rhs[0]; m[1] -= rhs[1]; m[2] -= rhs[2]; m[3] -= rhs[3]; + m[4] -= rhs[4]; m[5] -= rhs[5]; m[6] -= rhs[6]; m[7] -= rhs[7]; + m[8] -= rhs[8]; m[9] -= rhs[9]; m[10]-= rhs[10]; m[11]-= rhs[11]; + m[12]-= rhs[12]; m[13]-= rhs[13]; m[14]-= rhs[14]; m[15]-= rhs[15]; + return *this; +} + + + +inline Vector4 Matrix4::operator*(const Vector4& rhs) const +{ + return Vector4(m[0]*rhs.x + m[4]*rhs.y + m[8]*rhs.z + m[12]*rhs.w, + m[1]*rhs.x + m[5]*rhs.y + m[9]*rhs.z + m[13]*rhs.w, + m[2]*rhs.x + m[6]*rhs.y + m[10]*rhs.z + m[14]*rhs.w, + m[3]*rhs.x + m[7]*rhs.y + m[11]*rhs.z + m[15]*rhs.w); +} + + + +inline Vector3 Matrix4::operator*(const Vector3& rhs) const +{ + return Vector3(m[0]*rhs.x + m[4]*rhs.y + m[8]*rhs.z, + m[1]*rhs.x + m[5]*rhs.y + m[9]*rhs.z, + m[2]*rhs.x + m[6]*rhs.y + m[10]*rhs.z); +} + + + +inline Matrix4 Matrix4::operator*(const Matrix4& n) const +{ + return Matrix4(m[0]*n[0] + m[4]*n[1] + m[8]*n[2] + m[12]*n[3], m[1]*n[0] + m[5]*n[1] + m[9]*n[2] + m[13]*n[3], m[2]*n[0] + m[6]*n[1] + m[10]*n[2] + m[14]*n[3], m[3]*n[0] + m[7]*n[1] + m[11]*n[2] + m[15]*n[3], + m[0]*n[4] + m[4]*n[5] + m[8]*n[6] + m[12]*n[7], m[1]*n[4] + m[5]*n[5] + m[9]*n[6] + m[13]*n[7], m[2]*n[4] + m[6]*n[5] + m[10]*n[6] + m[14]*n[7], m[3]*n[4] + m[7]*n[5] + m[11]*n[6] + m[15]*n[7], + m[0]*n[8] + m[4]*n[9] + m[8]*n[10] + m[12]*n[11], m[1]*n[8] + m[5]*n[9] + m[9]*n[10] + m[13]*n[11], m[2]*n[8] + m[6]*n[9] + m[10]*n[10] + m[14]*n[11], m[3]*n[8] + m[7]*n[9] + m[11]*n[10] + m[15]*n[11], + m[0]*n[12] + m[4]*n[13] + m[8]*n[14] + m[12]*n[15], m[1]*n[12] + m[5]*n[13] + m[9]*n[14] + m[13]*n[15], m[2]*n[12] + m[6]*n[13] + m[10]*n[14] + m[14]*n[15], m[3]*n[12] + m[7]*n[13] + m[11]*n[14] + m[15]*n[15]); +} + + + +inline Matrix4& Matrix4::operator*=(const Matrix4& rhs) +{ + *this = *this * rhs; + return *this; +} + + + +inline bool Matrix4::operator==(const Matrix4& n) const +{ + return (m[0] == n[0]) && (m[1] == n[1]) && (m[2] == n[2]) && (m[3] == n[3]) && + (m[4] == n[4]) && (m[5] == n[5]) && (m[6] == n[6]) && (m[7] == n[7]) && + (m[8] == n[8]) && (m[9] == n[9]) && (m[10]== n[10]) && (m[11]== n[11]) && + (m[12]== n[12]) && (m[13]== n[13]) && (m[14]== n[14]) && (m[15]== n[15]); +} + + + +inline bool Matrix4::operator!=(const Matrix4& n) const +{ + return (m[0] != n[0]) || (m[1] != n[1]) || (m[2] != n[2]) || (m[3] != n[3]) || + (m[4] != n[4]) || (m[5] != n[5]) || (m[6] != n[6]) || (m[7] != n[7]) || + (m[8] != n[8]) || (m[9] != n[9]) || (m[10]!= n[10]) || (m[11]!= n[11]) || + (m[12]!= n[12]) || (m[13]!= n[13]) || (m[14]!= n[14]) || (m[15]!= n[15]); +} + + + +inline float Matrix4::operator[](int index) const +{ + return m[index]; +} + + + +inline float& Matrix4::operator[](int index) +{ + return m[index]; +} + + + +inline Matrix4 operator-(const Matrix4& rhs) +{ + return Matrix4(-rhs[0], -rhs[1], -rhs[2], -rhs[3], -rhs[4], -rhs[5], -rhs[6], -rhs[7], -rhs[8], -rhs[9], -rhs[10], -rhs[11], -rhs[12], -rhs[13], -rhs[14], -rhs[15]); +} + + + +inline Matrix4 operator*(float s, const Matrix4& rhs) +{ + return Matrix4(s*rhs[0], s*rhs[1], s*rhs[2], s*rhs[3], s*rhs[4], s*rhs[5], s*rhs[6], s*rhs[7], s*rhs[8], s*rhs[9], s*rhs[10], s*rhs[11], s*rhs[12], s*rhs[13], s*rhs[14], s*rhs[15]); +} + + + +inline Vector4 operator*(const Vector4& v, const Matrix4& m) +{ + return Vector4(v.x*m[0] + v.y*m[1] + v.z*m[2] + v.w*m[3], v.x*m[4] + v.y*m[5] + v.z*m[6] + v.w*m[7], v.x*m[8] + v.y*m[9] + v.z*m[10] + v.w*m[11], v.x*m[12] + v.y*m[13] + v.z*m[14] + v.w*m[15]); +} + + + +inline Vector3 operator*(const Vector3& v, const Matrix4& m) +{ + return Vector3(v.x*m[0] + v.y*m[1] + v.z*m[2], v.x*m[4] + v.y*m[5] + v.z*m[6], v.x*m[8] + v.y*m[9] + v.z*m[10]); +} + + + +inline std::ostream& operator<<(std::ostream& os, const Matrix4& m) +{ + os << std::fixed << std::setprecision(5); + os << "[" << std::setw(10) << m[0] << " " << std::setw(10) << m[4] << " " << std::setw(10) << m[8] << " " << std::setw(10) << m[12] << "]\n" + << "[" << std::setw(10) << m[1] << " " << std::setw(10) << m[5] << " " << std::setw(10) << m[9] << " " << std::setw(10) << m[13] << "]\n" + << "[" << std::setw(10) << m[2] << " " << std::setw(10) << m[6] << " " << std::setw(10) << m[10] << " " << std::setw(10) << m[14] << "]\n" + << "[" << std::setw(10) << m[3] << " " << std::setw(10) << m[7] << " " << std::setw(10) << m[11] << " " << std::setw(10) << m[15] << "]\n"; + os << std::resetiosflags(std::ios_base::fixed | std::ios_base::floatfield); + return os; +} +// END OF MATRIX4 INLINE ////////////////////////////////////////////////////// +#endif diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/Vectors.h b/examples/ThirdPartyLibs/openvr/samples/shared/Vectors.h new file mode 100644 index 000000000..2e08103c4 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/Vectors.h @@ -0,0 +1,530 @@ +/////////////////////////////////////////////////////////////////////////////// +// Vectors.h +// ========= +// 2D/3D/4D vectors +// +// AUTHOR: Song Ho Ahn (song.ahn@gmail.com) +// CREATED: 2007-02-14 +// UPDATED: 2013-01-20 +// +// Copyright (C) 2007-2013 Song Ho Ahn +/////////////////////////////////////////////////////////////////////////////// + + +#ifndef VECTORS_H_DEF +#define VECTORS_H_DEF + +#include +#include + +/////////////////////////////////////////////////////////////////////////////// +// 2D vector +/////////////////////////////////////////////////////////////////////////////// +struct Vector2 +{ + float x; + float y; + + // ctors + Vector2() : x(0), y(0) {}; + Vector2(float x, float y) : x(x), y(y) {}; + + // utils functions + void set(float x, float y); + float length() const; // + float distance(const Vector2& vec) const; // distance between two vectors + Vector2& normalize(); // + float dot(const Vector2& vec) const; // dot product + bool equal(const Vector2& vec, float e) const; // compare with epsilon + + // operators + Vector2 operator-() const; // unary operator (negate) + Vector2 operator+(const Vector2& rhs) const; // add rhs + Vector2 operator-(const Vector2& rhs) const; // subtract rhs + Vector2& operator+=(const Vector2& rhs); // add rhs and update this object + Vector2& operator-=(const Vector2& rhs); // subtract rhs and update this object + Vector2 operator*(const float scale) const; // scale + Vector2 operator*(const Vector2& rhs) const; // multiply each element + Vector2& operator*=(const float scale); // scale and update this object + Vector2& operator*=(const Vector2& rhs); // multiply each element and update this object + Vector2 operator/(const float scale) const; // inverse scale + Vector2& operator/=(const float scale); // scale and update this object + bool operator==(const Vector2& rhs) const; // exact compare, no epsilon + bool operator!=(const Vector2& rhs) const; // exact compare, no epsilon + bool operator<(const Vector2& rhs) const; // comparison for sort + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Vector2 operator*(const float a, const Vector2 vec); + friend std::ostream& operator<<(std::ostream& os, const Vector2& vec); +}; + + + +/////////////////////////////////////////////////////////////////////////////// +// 3D vector +/////////////////////////////////////////////////////////////////////////////// +struct Vector3 +{ + float x; + float y; + float z; + + // ctors + Vector3() : x(0), y(0), z(0) {}; + Vector3(float x, float y, float z) : x(x), y(y), z(z) {}; + + // utils functions + void set(float x, float y, float z); + float length() const; // + float distance(const Vector3& vec) const; // distance between two vectors + Vector3& normalize(); // + float dot(const Vector3& vec) const; // dot product + Vector3 cross(const Vector3& vec) const; // cross product + bool equal(const Vector3& vec, float e) const; // compare with epsilon + + // operators + Vector3 operator-() const; // unary operator (negate) + Vector3 operator+(const Vector3& rhs) const; // add rhs + Vector3 operator-(const Vector3& rhs) const; // subtract rhs + Vector3& operator+=(const Vector3& rhs); // add rhs and update this object + Vector3& operator-=(const Vector3& rhs); // subtract rhs and update this object + Vector3 operator*(const float scale) const; // scale + Vector3 operator*(const Vector3& rhs) const; // multiplay each element + Vector3& operator*=(const float scale); // scale and update this object + Vector3& operator*=(const Vector3& rhs); // product each element and update this object + Vector3 operator/(const float scale) const; // inverse scale + Vector3& operator/=(const float scale); // scale and update this object + bool operator==(const Vector3& rhs) const; // exact compare, no epsilon + bool operator!=(const Vector3& rhs) const; // exact compare, no epsilon + bool operator<(const Vector3& rhs) const; // comparison for sort + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Vector3 operator*(const float a, const Vector3 vec); + friend std::ostream& operator<<(std::ostream& os, const Vector3& vec); +}; + + + +/////////////////////////////////////////////////////////////////////////////// +// 4D vector +/////////////////////////////////////////////////////////////////////////////// +struct Vector4 +{ + float x; + float y; + float z; + float w; + + // ctors + Vector4() : x(0), y(0), z(0), w(0) {}; + Vector4(float x, float y, float z, float w) : x(x), y(y), z(z), w(w) {}; + + // utils functions + void set(float x, float y, float z, float w); + float length() const; // + float distance(const Vector4& vec) const; // distance between two vectors + Vector4& normalize(); // + float dot(const Vector4& vec) const; // dot product + bool equal(const Vector4& vec, float e) const; // compare with epsilon + + // operators + Vector4 operator-() const; // unary operator (negate) + Vector4 operator+(const Vector4& rhs) const; // add rhs + Vector4 operator-(const Vector4& rhs) const; // subtract rhs + Vector4& operator+=(const Vector4& rhs); // add rhs and update this object + Vector4& operator-=(const Vector4& rhs); // subtract rhs and update this object + Vector4 operator*(const float scale) const; // scale + Vector4 operator*(const Vector4& rhs) const; // multiply each element + Vector4& operator*=(const float scale); // scale and update this object + Vector4& operator*=(const Vector4& rhs); // multiply each element and update this object + Vector4 operator/(const float scale) const; // inverse scale + Vector4& operator/=(const float scale); // scale and update this object + bool operator==(const Vector4& rhs) const; // exact compare, no epsilon + bool operator!=(const Vector4& rhs) const; // exact compare, no epsilon + bool operator<(const Vector4& rhs) const; // comparison for sort + float operator[](int index) const; // subscript operator v[0], v[1] + float& operator[](int index); // subscript operator v[0], v[1] + + friend Vector4 operator*(const float a, const Vector4 vec); + friend std::ostream& operator<<(std::ostream& os, const Vector4& vec); +}; + + + +// fast math routines from Doom3 SDK +inline float invSqrt(float x) +{ + float xhalf = 0.5f * x; + int i = *(int*)&x; // get bits for floating value + i = 0x5f3759df - (i>>1); // gives initial guess + x = *(float*)&i; // convert bits back to float + x = x * (1.5f - xhalf*x*x); // Newton step + return x; +} + + + +/////////////////////////////////////////////////////////////////////////////// +// inline functions for Vector2 +/////////////////////////////////////////////////////////////////////////////// +inline Vector2 Vector2::operator-() const { + return Vector2(-x, -y); +} + +inline Vector2 Vector2::operator+(const Vector2& rhs) const { + return Vector2(x+rhs.x, y+rhs.y); +} + +inline Vector2 Vector2::operator-(const Vector2& rhs) const { + return Vector2(x-rhs.x, y-rhs.y); +} + +inline Vector2& Vector2::operator+=(const Vector2& rhs) { + x += rhs.x; y += rhs.y; return *this; +} + +inline Vector2& Vector2::operator-=(const Vector2& rhs) { + x -= rhs.x; y -= rhs.y; return *this; +} + +inline Vector2 Vector2::operator*(const float a) const { + return Vector2(x*a, y*a); +} + +inline Vector2 Vector2::operator*(const Vector2& rhs) const { + return Vector2(x*rhs.x, y*rhs.y); +} + +inline Vector2& Vector2::operator*=(const float a) { + x *= a; y *= a; return *this; +} + +inline Vector2& Vector2::operator*=(const Vector2& rhs) { + x *= rhs.x; y *= rhs.y; return *this; +} + +inline Vector2 Vector2::operator/(const float a) const { + return Vector2(x/a, y/a); +} + +inline Vector2& Vector2::operator/=(const float a) { + x /= a; y /= a; return *this; +} + +inline bool Vector2::operator==(const Vector2& rhs) const { + return (x == rhs.x) && (y == rhs.y); +} + +inline bool Vector2::operator!=(const Vector2& rhs) const { + return (x != rhs.x) || (y != rhs.y); +} + +inline bool Vector2::operator<(const Vector2& rhs) const { + if(x < rhs.x) return true; + if(x > rhs.x) return false; + if(y < rhs.y) return true; + if(y > rhs.y) return false; + return false; +} + +inline float Vector2::operator[](int index) const { + return (&x)[index]; +} + +inline float& Vector2::operator[](int index) { + return (&x)[index]; +} + +inline void Vector2::set(float x, float y) { + this->x = x; this->y = y; +} + +inline float Vector2::length() const { + return sqrtf(x*x + y*y); +} + +inline float Vector2::distance(const Vector2& vec) const { + return sqrtf((vec.x-x)*(vec.x-x) + (vec.y-y)*(vec.y-y)); +} + +inline Vector2& Vector2::normalize() { + //@@const float EPSILON = 0.000001f; + float xxyy = x*x + y*y; + //@@if(xxyy < EPSILON) + //@@ return *this; + + //float invLength = invSqrt(xxyy); + float invLength = 1.0f / sqrtf(xxyy); + x *= invLength; + y *= invLength; + return *this; +} + +inline float Vector2::dot(const Vector2& rhs) const { + return (x*rhs.x + y*rhs.y); +} + +inline bool Vector2::equal(const Vector2& rhs, float epsilon) const { + return fabs(x - rhs.x) < epsilon && fabs(y - rhs.y) < epsilon; +} + +inline Vector2 operator*(const float a, const Vector2 vec) { + return Vector2(a*vec.x, a*vec.y); +} + +inline std::ostream& operator<<(std::ostream& os, const Vector2& vec) { + os << "(" << vec.x << ", " << vec.y << ")"; + return os; +} +// END OF VECTOR2 ///////////////////////////////////////////////////////////// + + + + +/////////////////////////////////////////////////////////////////////////////// +// inline functions for Vector3 +/////////////////////////////////////////////////////////////////////////////// +inline Vector3 Vector3::operator-() const { + return Vector3(-x, -y, -z); +} + +inline Vector3 Vector3::operator+(const Vector3& rhs) const { + return Vector3(x+rhs.x, y+rhs.y, z+rhs.z); +} + +inline Vector3 Vector3::operator-(const Vector3& rhs) const { + return Vector3(x-rhs.x, y-rhs.y, z-rhs.z); +} + +inline Vector3& Vector3::operator+=(const Vector3& rhs) { + x += rhs.x; y += rhs.y; z += rhs.z; return *this; +} + +inline Vector3& Vector3::operator-=(const Vector3& rhs) { + x -= rhs.x; y -= rhs.y; z -= rhs.z; return *this; +} + +inline Vector3 Vector3::operator*(const float a) const { + return Vector3(x*a, y*a, z*a); +} + +inline Vector3 Vector3::operator*(const Vector3& rhs) const { + return Vector3(x*rhs.x, y*rhs.y, z*rhs.z); +} + +inline Vector3& Vector3::operator*=(const float a) { + x *= a; y *= a; z *= a; return *this; +} + +inline Vector3& Vector3::operator*=(const Vector3& rhs) { + x *= rhs.x; y *= rhs.y; z *= rhs.z; return *this; +} + +inline Vector3 Vector3::operator/(const float a) const { + return Vector3(x/a, y/a, z/a); +} + +inline Vector3& Vector3::operator/=(const float a) { + x /= a; y /= a; z /= a; return *this; +} + +inline bool Vector3::operator==(const Vector3& rhs) const { + return (x == rhs.x) && (y == rhs.y) && (z == rhs.z); +} + +inline bool Vector3::operator!=(const Vector3& rhs) const { + return (x != rhs.x) || (y != rhs.y) || (z != rhs.z); +} + +inline bool Vector3::operator<(const Vector3& rhs) const { + if(x < rhs.x) return true; + if(x > rhs.x) return false; + if(y < rhs.y) return true; + if(y > rhs.y) return false; + if(z < rhs.z) return true; + if(z > rhs.z) return false; + return false; +} + +inline float Vector3::operator[](int index) const { + return (&x)[index]; +} + +inline float& Vector3::operator[](int index) { + return (&x)[index]; +} + +inline void Vector3::set(float x, float y, float z) { + this->x = x; this->y = y; this->z = z; +} + +inline float Vector3::length() const { + return sqrtf(x*x + y*y + z*z); +} + +inline float Vector3::distance(const Vector3& vec) const { + return sqrtf((vec.x-x)*(vec.x-x) + (vec.y-y)*(vec.y-y) + (vec.z-z)*(vec.z-z)); +} + +inline Vector3& Vector3::normalize() { + //@@const float EPSILON = 0.000001f; + float xxyyzz = x*x + y*y + z*z; + //@@if(xxyyzz < EPSILON) + //@@ return *this; // do nothing if it is ~zero vector + + //float invLength = invSqrt(xxyyzz); + float invLength = 1.0f / sqrtf(xxyyzz); + x *= invLength; + y *= invLength; + z *= invLength; + return *this; +} + +inline float Vector3::dot(const Vector3& rhs) const { + return (x*rhs.x + y*rhs.y + z*rhs.z); +} + +inline Vector3 Vector3::cross(const Vector3& rhs) const { + return Vector3(y*rhs.z - z*rhs.y, z*rhs.x - x*rhs.z, x*rhs.y - y*rhs.x); +} + +inline bool Vector3::equal(const Vector3& rhs, float epsilon) const { + return fabs(x - rhs.x) < epsilon && fabs(y - rhs.y) < epsilon && fabs(z - rhs.z) < epsilon; +} + +inline Vector3 operator*(const float a, const Vector3 vec) { + return Vector3(a*vec.x, a*vec.y, a*vec.z); +} + +inline std::ostream& operator<<(std::ostream& os, const Vector3& vec) { + os << "(" << vec.x << ", " << vec.y << ", " << vec.z << ")"; + return os; +} +// END OF VECTOR3 ///////////////////////////////////////////////////////////// + + + +/////////////////////////////////////////////////////////////////////////////// +// inline functions for Vector4 +/////////////////////////////////////////////////////////////////////////////// +inline Vector4 Vector4::operator-() const { + return Vector4(-x, -y, -z, -w); +} + +inline Vector4 Vector4::operator+(const Vector4& rhs) const { + return Vector4(x+rhs.x, y+rhs.y, z+rhs.z, w+rhs.w); +} + +inline Vector4 Vector4::operator-(const Vector4& rhs) const { + return Vector4(x-rhs.x, y-rhs.y, z-rhs.z, w-rhs.w); +} + +inline Vector4& Vector4::operator+=(const Vector4& rhs) { + x += rhs.x; y += rhs.y; z += rhs.z; w += rhs.w; return *this; +} + +inline Vector4& Vector4::operator-=(const Vector4& rhs) { + x -= rhs.x; y -= rhs.y; z -= rhs.z; w -= rhs.w; return *this; +} + +inline Vector4 Vector4::operator*(const float a) const { + return Vector4(x*a, y*a, z*a, w*a); +} + +inline Vector4 Vector4::operator*(const Vector4& rhs) const { + return Vector4(x*rhs.x, y*rhs.y, z*rhs.z, w*rhs.w); +} + +inline Vector4& Vector4::operator*=(const float a) { + x *= a; y *= a; z *= a; w *= a; return *this; +} + +inline Vector4& Vector4::operator*=(const Vector4& rhs) { + x *= rhs.x; y *= rhs.y; z *= rhs.z; w *= rhs.w; return *this; +} + +inline Vector4 Vector4::operator/(const float a) const { + return Vector4(x/a, y/a, z/a, w/a); +} + +inline Vector4& Vector4::operator/=(const float a) { + x /= a; y /= a; z /= a; w /= a; return *this; +} + +inline bool Vector4::operator==(const Vector4& rhs) const { + return (x == rhs.x) && (y == rhs.y) && (z == rhs.z) && (w == rhs.w); +} + +inline bool Vector4::operator!=(const Vector4& rhs) const { + return (x != rhs.x) || (y != rhs.y) || (z != rhs.z) || (w != rhs.w); +} + +inline bool Vector4::operator<(const Vector4& rhs) const { + if(x < rhs.x) return true; + if(x > rhs.x) return false; + if(y < rhs.y) return true; + if(y > rhs.y) return false; + if(z < rhs.z) return true; + if(z > rhs.z) return false; + if(w < rhs.w) return true; + if(w > rhs.w) return false; + return false; +} + +inline float Vector4::operator[](int index) const { + return (&x)[index]; +} + +inline float& Vector4::operator[](int index) { + return (&x)[index]; +} + +inline void Vector4::set(float x, float y, float z, float w) { + this->x = x; this->y = y; this->z = z; this->w = w; +} + +inline float Vector4::length() const { + return sqrtf(x*x + y*y + z*z + w*w); +} + +inline float Vector4::distance(const Vector4& vec) const { + return sqrtf((vec.x-x)*(vec.x-x) + (vec.y-y)*(vec.y-y) + (vec.z-z)*(vec.z-z) + (vec.w-w)*(vec.w-w)); +} + +inline Vector4& Vector4::normalize() { + //NOTE: leave w-component untouched + //@@const float EPSILON = 0.000001f; + float xxyyzz = x*x + y*y + z*z; + //@@if(xxyyzz < EPSILON) + //@@ return *this; // do nothing if it is zero vector + + //float invLength = invSqrt(xxyyzz); + float invLength = 1.0f / sqrtf(xxyyzz); + x *= invLength; + y *= invLength; + z *= invLength; + return *this; +} + +inline float Vector4::dot(const Vector4& rhs) const { + return (x*rhs.x + y*rhs.y + z*rhs.z + w*rhs.w); +} + +inline bool Vector4::equal(const Vector4& rhs, float epsilon) const { + return fabs(x - rhs.x) < epsilon && fabs(y - rhs.y) < epsilon && + fabs(z - rhs.z) < epsilon && fabs(w - rhs.w) < epsilon; +} + +inline Vector4 operator*(const float a, const Vector4 vec) { + return Vector4(a*vec.x, a*vec.y, a*vec.z, a*vec.w); +} + +inline std::ostream& operator<<(std::ostream& os, const Vector4& vec) { + os << "(" << vec.x << ", " << vec.y << ", " << vec.z << ", " << vec.w << ")"; + return os; +} +// END OF VECTOR4 ///////////////////////////////////////////////////////////// + +#endif diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.cpp b/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.cpp new file mode 100644 index 000000000..d57a9d945 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.cpp @@ -0,0 +1,6104 @@ +/* +LodePNG version 20140823 + +Copyright (c) 2005-2014 Lode Vandevenne + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source + distribution. +*/ + +/* +The manual and changelog are in the header file "lodepng.h" +Rename this file to lodepng.cpp to use it for C++, or to lodepng.c to use it for C. +*/ + +#include "lodepng.h" + +#include +#include + +#ifdef LODEPNG_COMPILE_CPP +#include +#endif /*LODEPNG_COMPILE_CPP*/ + +#define VERSION_STRING "20140823" + +#if defined(_MSC_VER) && (_MSC_VER >= 1310) /*Visual Studio: A few warning types are not desired here.*/ +#pragma warning( disable : 4244 ) /*implicit conversions: not warned by gcc -Wall -Wextra and requires too much casts*/ +#pragma warning( disable : 4996 ) /*VS does not like fopen, but fopen_s is not standard C so unusable here*/ +#endif /*_MSC_VER */ + +/* +This source file is built up in the following large parts. The code sections +with the "LODEPNG_COMPILE_" #defines divide this up further in an intermixed way. +-Tools for C and common code for PNG and Zlib +-C Code for Zlib (huffman, deflate, ...) +-C Code for PNG (file format chunks, adam7, PNG filters, color conversions, ...) +-The C++ wrapper around all of the above +*/ + +/*The malloc, realloc and free functions defined here with "lodepng_" in front +of the name, so that you can easily change them to others related to your +platform if needed. Everything else in the code calls these. Pass +-DLODEPNG_NO_COMPILE_ALLOCATORS to the compiler, or comment out +#define LODEPNG_COMPILE_ALLOCATORS in the header, to disable the ones here and +define them in your own project's source files without needing to change +lodepng source code. Don't forget to remove "static" if you copypaste them +from here.*/ + +#ifdef LODEPNG_COMPILE_ALLOCATORS +static void* lodepng_malloc(size_t size) +{ + return malloc(size); +} + +static void* lodepng_realloc(void* ptr, size_t new_size) +{ + return realloc(ptr, new_size); +} + +static void lodepng_free(void* ptr) +{ + free(ptr); +} +#else /*LODEPNG_COMPILE_ALLOCATORS*/ +void* lodepng_malloc(size_t size); +void* lodepng_realloc(void* ptr, size_t new_size); +void lodepng_free(void* ptr); +#endif /*LODEPNG_COMPILE_ALLOCATORS*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* // Tools for C, and common code for PNG and Zlib. // */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/* +Often in case of an error a value is assigned to a variable and then it breaks +out of a loop (to go to the cleanup phase of a function). This macro does that. +It makes the error handling code shorter and more readable. + +Example: if(!uivector_resizev(&frequencies_ll, 286, 0)) ERROR_BREAK(83); +*/ +#define CERROR_BREAK(errorvar, code)\ +{\ + errorvar = code;\ + break;\ +} + +/*version of CERROR_BREAK that assumes the common case where the error variable is named "error"*/ +#define ERROR_BREAK(code) CERROR_BREAK(error, code) + +/*Set error var to the error code, and return it.*/ +#define CERROR_RETURN_ERROR(errorvar, code)\ +{\ + errorvar = code;\ + return code;\ +} + +/*Try the code, if it returns error, also return the error.*/ +#define CERROR_TRY_RETURN(call)\ +{\ + unsigned error = call;\ + if(error) return error;\ +} + +/* +About uivector, ucvector and string: +-All of them wrap dynamic arrays or text strings in a similar way. +-LodePNG was originally written in C++. The vectors replace the std::vectors that were used in the C++ version. +-The string tools are made to avoid problems with compilers that declare things like strncat as deprecated. +-They're not used in the interface, only internally in this file as static functions. +-As with many other structs in this file, the init and cleanup functions serve as ctor and dtor. +*/ + +#ifdef LODEPNG_COMPILE_ZLIB +/*dynamic vector of unsigned ints*/ +typedef struct uivector +{ + unsigned* data; + size_t size; /*size in number of unsigned longs*/ + size_t allocsize; /*allocated size in bytes*/ +} uivector; + +static void uivector_cleanup(void* p) +{ + ((uivector*)p)->size = ((uivector*)p)->allocsize = 0; + lodepng_free(((uivector*)p)->data); + ((uivector*)p)->data = NULL; +} + +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned uivector_reserve(uivector* p, size_t allocsize) +{ + if(allocsize > p->allocsize) + { + size_t newsize = (allocsize > p->allocsize * 2) ? allocsize : (allocsize * 3 / 2); + void* data = lodepng_realloc(p->data, newsize); + if(data) + { + p->allocsize = newsize; + p->data = (unsigned*)data; + } + else return 0; /*error: not enough memory*/ + } + return 1; +} + +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned uivector_resize(uivector* p, size_t size) +{ + if(!uivector_reserve(p, size * sizeof(unsigned))) return 0; + p->size = size; + return 1; /*success*/ +} + +/*resize and give all new elements the value*/ +static unsigned uivector_resizev(uivector* p, size_t size, unsigned value) +{ + size_t oldsize = p->size, i; + if(!uivector_resize(p, size)) return 0; + for(i = oldsize; i < size; i++) p->data[i] = value; + return 1; +} + +static void uivector_init(uivector* p) +{ + p->data = NULL; + p->size = p->allocsize = 0; +} + +#ifdef LODEPNG_COMPILE_ENCODER +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned uivector_push_back(uivector* p, unsigned c) +{ + if(!uivector_resize(p, p->size + 1)) return 0; + p->data[p->size - 1] = c; + return 1; +} + +/*copy q to p, returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned uivector_copy(uivector* p, const uivector* q) +{ + size_t i; + if(!uivector_resize(p, q->size)) return 0; + for(i = 0; i < q->size; i++) p->data[i] = q->data[i]; + return 1; +} +#endif /*LODEPNG_COMPILE_ENCODER*/ +#endif /*LODEPNG_COMPILE_ZLIB*/ + +/* /////////////////////////////////////////////////////////////////////////// */ + +/*dynamic vector of unsigned chars*/ +typedef struct ucvector +{ + unsigned char* data; + size_t size; /*used size*/ + size_t allocsize; /*allocated size*/ +} ucvector; + +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned ucvector_reserve(ucvector* p, size_t allocsize) +{ + if(allocsize > p->allocsize) + { + size_t newsize = (allocsize > p->allocsize * 2) ? allocsize : (allocsize * 3 / 2); + void* data = lodepng_realloc(p->data, newsize); + if(data) + { + p->allocsize = newsize; + p->data = (unsigned char*)data; + } + else return 0; /*error: not enough memory*/ + } + return 1; +} + +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned ucvector_resize(ucvector* p, size_t size) +{ + if(!ucvector_reserve(p, size * sizeof(unsigned char))) return 0; + p->size = size; + return 1; /*success*/ +} + +#ifdef LODEPNG_COMPILE_PNG + +static void ucvector_cleanup(void* p) +{ + ((ucvector*)p)->size = ((ucvector*)p)->allocsize = 0; + lodepng_free(((ucvector*)p)->data); + ((ucvector*)p)->data = NULL; +} + +static void ucvector_init(ucvector* p) +{ + p->data = NULL; + p->size = p->allocsize = 0; +} + +#ifdef LODEPNG_COMPILE_DECODER +/*resize and give all new elements the value*/ +static unsigned ucvector_resizev(ucvector* p, size_t size, unsigned char value) +{ + size_t oldsize = p->size, i; + if(!ucvector_resize(p, size)) return 0; + for(i = oldsize; i < size; i++) p->data[i] = value; + return 1; +} +#endif /*LODEPNG_COMPILE_DECODER*/ +#endif /*LODEPNG_COMPILE_PNG*/ + +#ifdef LODEPNG_COMPILE_ZLIB +/*you can both convert from vector to buffer&size and vica versa. If you use +init_buffer to take over a buffer and size, it is not needed to use cleanup*/ +static void ucvector_init_buffer(ucvector* p, unsigned char* buffer, size_t size) +{ + p->data = buffer; + p->allocsize = p->size = size; +} +#endif /*LODEPNG_COMPILE_ZLIB*/ + +#if (defined(LODEPNG_COMPILE_PNG) && defined(LODEPNG_COMPILE_ANCILLARY_CHUNKS)) || defined(LODEPNG_COMPILE_ENCODER) +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned ucvector_push_back(ucvector* p, unsigned char c) +{ + if(!ucvector_resize(p, p->size + 1)) return 0; + p->data[p->size - 1] = c; + return 1; +} +#endif /*defined(LODEPNG_COMPILE_PNG) || defined(LODEPNG_COMPILE_ENCODER)*/ + + +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_PNG +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS +/*returns 1 if success, 0 if failure ==> nothing done*/ +static unsigned string_resize(char** out, size_t size) +{ + char* data = (char*)lodepng_realloc(*out, size + 1); + if(data) + { + data[size] = 0; /*null termination char*/ + *out = data; + } + return data != 0; +} + +/*init a {char*, size_t} pair for use as string*/ +static void string_init(char** out) +{ + *out = NULL; + string_resize(out, 0); +} + +/*free the above pair again*/ +static void string_cleanup(char** out) +{ + lodepng_free(*out); + *out = NULL; +} + +static void string_set(char** out, const char* in) +{ + size_t insize = strlen(in), i = 0; + if(string_resize(out, insize)) + { + for(i = 0; i < insize; i++) + { + (*out)[i] = in[i]; + } + } +} +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +#endif /*LODEPNG_COMPILE_PNG*/ + +/* ////////////////////////////////////////////////////////////////////////// */ + +unsigned lodepng_read32bitInt(const unsigned char* buffer) +{ + return (unsigned)((buffer[0] << 24) | (buffer[1] << 16) | (buffer[2] << 8) | buffer[3]); +} + +#if defined(LODEPNG_COMPILE_PNG) || defined(LODEPNG_COMPILE_ENCODER) +/*buffer must have at least 4 allocated bytes available*/ +static void lodepng_set32bitInt(unsigned char* buffer, unsigned value) +{ + buffer[0] = (unsigned char)((value >> 24) & 0xff); + buffer[1] = (unsigned char)((value >> 16) & 0xff); + buffer[2] = (unsigned char)((value >> 8) & 0xff); + buffer[3] = (unsigned char)((value ) & 0xff); +} +#endif /*defined(LODEPNG_COMPILE_PNG) || defined(LODEPNG_COMPILE_ENCODER)*/ + +#ifdef LODEPNG_COMPILE_ENCODER +static void lodepng_add32bitInt(ucvector* buffer, unsigned value) +{ + ucvector_resize(buffer, buffer->size + 4); /*todo: give error if resize failed*/ + lodepng_set32bitInt(&buffer->data[buffer->size - 4], value); +} +#endif /*LODEPNG_COMPILE_ENCODER*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / File IO / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_DISK + +unsigned lodepng_load_file(unsigned char** out, size_t* outsize, const char* filename) +{ + FILE* file; + long size; + + /*provide some proper output values if error will happen*/ + *out = 0; + *outsize = 0; + + file = fopen(filename, "rb"); + if(!file) return 78; + + /*get filesize:*/ + fseek(file , 0 , SEEK_END); + size = ftell(file); + rewind(file); + + /*read contents of the file into the vector*/ + *outsize = 0; + *out = (unsigned char*)lodepng_malloc((size_t)size); + if(size && (*out)) (*outsize) = fread(*out, 1, (size_t)size, file); + + fclose(file); + if(!(*out) && size) return 83; /*the above malloc failed*/ + return 0; +} + +/*write given buffer to the file, overwriting the file, it doesn't append to it.*/ +unsigned lodepng_save_file(const unsigned char* buffer, size_t buffersize, const char* filename) +{ + FILE* file; + file = fopen(filename, "wb" ); + if(!file) return 79; + fwrite((char*)buffer , 1 , buffersize, file); + fclose(file); + return 0; +} + +#endif /*LODEPNG_COMPILE_DISK*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* // End of common code and tools. Begin of Zlib related code. // */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_ZLIB +#ifdef LODEPNG_COMPILE_ENCODER +/*TODO: this ignores potential out of memory errors*/ +#define addBitToStream(/*size_t**/ bitpointer, /*ucvector**/ bitstream, /*unsigned char*/ bit)\ +{\ + /*add a new byte at the end*/\ + if(((*bitpointer) & 7) == 0) ucvector_push_back(bitstream, (unsigned char)0);\ + /*earlier bit of huffman code is in a lesser significant bit of an earlier byte*/\ + (bitstream->data[bitstream->size - 1]) |= (bit << ((*bitpointer) & 0x7));\ + (*bitpointer)++;\ +} + +static void addBitsToStream(size_t* bitpointer, ucvector* bitstream, unsigned value, size_t nbits) +{ + size_t i; + for(i = 0; i < nbits; i++) addBitToStream(bitpointer, bitstream, (unsigned char)((value >> i) & 1)); +} + +static void addBitsToStreamReversed(size_t* bitpointer, ucvector* bitstream, unsigned value, size_t nbits) +{ + size_t i; + for(i = 0; i < nbits; i++) addBitToStream(bitpointer, bitstream, (unsigned char)((value >> (nbits - 1 - i)) & 1)); +} +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#ifdef LODEPNG_COMPILE_DECODER + +#define READBIT(bitpointer, bitstream) ((bitstream[bitpointer >> 3] >> (bitpointer & 0x7)) & (unsigned char)1) + +static unsigned char readBitFromStream(size_t* bitpointer, const unsigned char* bitstream) +{ + unsigned char result = (unsigned char)(READBIT(*bitpointer, bitstream)); + (*bitpointer)++; + return result; +} + +static unsigned readBitsFromStream(size_t* bitpointer, const unsigned char* bitstream, size_t nbits) +{ + unsigned result = 0, i; + for(i = 0; i < nbits; i++) + { + result += ((unsigned)READBIT(*bitpointer, bitstream)) << i; + (*bitpointer)++; + } + return result; +} +#endif /*LODEPNG_COMPILE_DECODER*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Deflate - Huffman / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#define FIRST_LENGTH_CODE_INDEX 257 +#define LAST_LENGTH_CODE_INDEX 285 +/*256 literals, the end code, some length codes, and 2 unused codes*/ +#define NUM_DEFLATE_CODE_SYMBOLS 288 +/*the distance codes have their own symbols, 30 used, 2 unused*/ +#define NUM_DISTANCE_SYMBOLS 32 +/*the code length codes. 0-15: code lengths, 16: copy previous 3-6 times, 17: 3-10 zeros, 18: 11-138 zeros*/ +#define NUM_CODE_LENGTH_CODES 19 + +/*the base lengths represented by codes 257-285*/ +static const unsigned LENGTHBASE[29] + = {3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, + 67, 83, 99, 115, 131, 163, 195, 227, 258}; + +/*the extra bits used by codes 257-285 (added to base length)*/ +static const unsigned LENGTHEXTRA[29] + = {0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, + 4, 4, 4, 4, 5, 5, 5, 5, 0}; + +/*the base backwards distances (the bits of distance codes appear after length codes and use their own huffman tree)*/ +static const unsigned DISTANCEBASE[30] + = {1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, + 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577}; + +/*the extra bits of backwards distances (added to base)*/ +static const unsigned DISTANCEEXTRA[30] + = {0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, + 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13}; + +/*the order in which "code length alphabet code lengths" are stored, out of this +the huffman tree of the dynamic huffman tree lengths is generated*/ +static const unsigned CLCL_ORDER[NUM_CODE_LENGTH_CODES] + = {16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; + +/* ////////////////////////////////////////////////////////////////////////// */ + +/* +Huffman tree struct, containing multiple representations of the tree +*/ +typedef struct HuffmanTree +{ + unsigned* tree2d; + unsigned* tree1d; + unsigned* lengths; /*the lengths of the codes of the 1d-tree*/ + unsigned maxbitlen; /*maximum number of bits a single code can get*/ + unsigned numcodes; /*number of symbols in the alphabet = number of codes*/ +} HuffmanTree; + +/*function used for debug purposes to draw the tree in ascii art with C++*/ +/* +static void HuffmanTree_draw(HuffmanTree* tree) +{ + std::cout << "tree. length: " << tree->numcodes << " maxbitlen: " << tree->maxbitlen << std::endl; + for(size_t i = 0; i < tree->tree1d.size; i++) + { + if(tree->lengths.data[i]) + std::cout << i << " " << tree->tree1d.data[i] << " " << tree->lengths.data[i] << std::endl; + } + std::cout << std::endl; +}*/ + +static void HuffmanTree_init(HuffmanTree* tree) +{ + tree->tree2d = 0; + tree->tree1d = 0; + tree->lengths = 0; +} + +static void HuffmanTree_cleanup(HuffmanTree* tree) +{ + lodepng_free(tree->tree2d); + lodepng_free(tree->tree1d); + lodepng_free(tree->lengths); +} + +/*the tree representation used by the decoder. return value is error*/ +static unsigned HuffmanTree_make2DTree(HuffmanTree* tree) +{ + unsigned nodefilled = 0; /*up to which node it is filled*/ + unsigned treepos = 0; /*position in the tree (1 of the numcodes columns)*/ + unsigned n, i; + + tree->tree2d = (unsigned*)lodepng_malloc(tree->numcodes * 2 * sizeof(unsigned)); + if(!tree->tree2d) return 83; /*alloc fail*/ + + /* + convert tree1d[] to tree2d[][]. In the 2D array, a value of 32767 means + uninited, a value >= numcodes is an address to another bit, a value < numcodes + is a code. The 2 rows are the 2 possible bit values (0 or 1), there are as + many columns as codes - 1. + A good huffmann tree has N * 2 - 1 nodes, of which N - 1 are internal nodes. + Here, the internal nodes are stored (what their 0 and 1 option point to). + There is only memory for such good tree currently, if there are more nodes + (due to too long length codes), error 55 will happen + */ + for(n = 0; n < tree->numcodes * 2; n++) + { + tree->tree2d[n] = 32767; /*32767 here means the tree2d isn't filled there yet*/ + } + + for(n = 0; n < tree->numcodes; n++) /*the codes*/ + { + for(i = 0; i < tree->lengths[n]; i++) /*the bits for this code*/ + { + unsigned char bit = (unsigned char)((tree->tree1d[n] >> (tree->lengths[n] - i - 1)) & 1); + if(treepos > tree->numcodes - 2) return 55; /*oversubscribed, see comment in lodepng_error_text*/ + if(tree->tree2d[2 * treepos + bit] == 32767) /*not yet filled in*/ + { + if(i + 1 == tree->lengths[n]) /*last bit*/ + { + tree->tree2d[2 * treepos + bit] = n; /*put the current code in it*/ + treepos = 0; + } + else + { + /*put address of the next step in here, first that address has to be found of course + (it's just nodefilled + 1)...*/ + nodefilled++; + /*addresses encoded with numcodes added to it*/ + tree->tree2d[2 * treepos + bit] = nodefilled + tree->numcodes; + treepos = nodefilled; + } + } + else treepos = tree->tree2d[2 * treepos + bit] - tree->numcodes; + } + } + + for(n = 0; n < tree->numcodes * 2; n++) + { + if(tree->tree2d[n] == 32767) tree->tree2d[n] = 0; /*remove possible remaining 32767's*/ + } + + return 0; +} + +/* +Second step for the ...makeFromLengths and ...makeFromFrequencies functions. +numcodes, lengths and maxbitlen must already be filled in correctly. return +value is error. +*/ +static unsigned HuffmanTree_makeFromLengths2(HuffmanTree* tree) +{ + uivector blcount; + uivector nextcode; + unsigned bits, n, error = 0; + + uivector_init(&blcount); + uivector_init(&nextcode); + + tree->tree1d = (unsigned*)lodepng_malloc(tree->numcodes * sizeof(unsigned)); + if(!tree->tree1d) error = 83; /*alloc fail*/ + + if(!uivector_resizev(&blcount, tree->maxbitlen + 1, 0) + || !uivector_resizev(&nextcode, tree->maxbitlen + 1, 0)) + error = 83; /*alloc fail*/ + + if(!error) + { + /*step 1: count number of instances of each code length*/ + for(bits = 0; bits < tree->numcodes; bits++) blcount.data[tree->lengths[bits]]++; + /*step 2: generate the nextcode values*/ + for(bits = 1; bits <= tree->maxbitlen; bits++) + { + nextcode.data[bits] = (nextcode.data[bits - 1] + blcount.data[bits - 1]) << 1; + } + /*step 3: generate all the codes*/ + for(n = 0; n < tree->numcodes; n++) + { + if(tree->lengths[n] != 0) tree->tree1d[n] = nextcode.data[tree->lengths[n]]++; + } + } + + uivector_cleanup(&blcount); + uivector_cleanup(&nextcode); + + if(!error) return HuffmanTree_make2DTree(tree); + else return error; +} + +/* +given the code lengths (as stored in the PNG file), generate the tree as defined +by Deflate. maxbitlen is the maximum bits that a code in the tree can have. +return value is error. +*/ +static unsigned HuffmanTree_makeFromLengths(HuffmanTree* tree, const unsigned* bitlen, + size_t numcodes, unsigned maxbitlen) +{ + unsigned i; + tree->lengths = (unsigned*)lodepng_malloc(numcodes * sizeof(unsigned)); + if(!tree->lengths) return 83; /*alloc fail*/ + for(i = 0; i < numcodes; i++) tree->lengths[i] = bitlen[i]; + tree->numcodes = (unsigned)numcodes; /*number of symbols*/ + tree->maxbitlen = maxbitlen; + return HuffmanTree_makeFromLengths2(tree); +} + +#ifdef LODEPNG_COMPILE_ENCODER + +/* +A coin, this is the terminology used for the package-merge algorithm and the +coin collector's problem. This is used to generate the huffman tree. +A coin can be multiple coins (when they're merged) +*/ +typedef struct Coin +{ + uivector symbols; + float weight; /*the sum of all weights in this coin*/ +} Coin; + +static void coin_init(Coin* c) +{ + uivector_init(&c->symbols); +} + +/*argument c is void* so that this dtor can be given as function pointer to the vector resize function*/ +static void coin_cleanup(void* c) +{ + uivector_cleanup(&((Coin*)c)->symbols); +} + +static void coin_copy(Coin* c1, const Coin* c2) +{ + c1->weight = c2->weight; + uivector_copy(&c1->symbols, &c2->symbols); +} + +static void add_coins(Coin* c1, const Coin* c2) +{ + size_t i; + for(i = 0; i < c2->symbols.size; i++) uivector_push_back(&c1->symbols, c2->symbols.data[i]); + c1->weight += c2->weight; +} + +static void init_coins(Coin* coins, size_t num) +{ + size_t i; + for(i = 0; i < num; i++) coin_init(&coins[i]); +} + +static void cleanup_coins(Coin* coins, size_t num) +{ + size_t i; + for(i = 0; i < num; i++) coin_cleanup(&coins[i]); +} + +static int coin_compare(const void* a, const void* b) { + float wa = ((const Coin*)a)->weight; + float wb = ((const Coin*)b)->weight; + return wa > wb ? 1 : wa < wb ? -1 : 0; +} + +static unsigned append_symbol_coins(Coin* coins, const unsigned* frequencies, unsigned numcodes, size_t sum) +{ + unsigned i; + unsigned j = 0; /*index of present symbols*/ + for(i = 0; i < numcodes; i++) + { + if(frequencies[i] != 0) /*only include symbols that are present*/ + { + coins[j].weight = frequencies[i] / (float)sum; + uivector_push_back(&coins[j].symbols, i); + j++; + } + } + return 0; +} + +unsigned lodepng_huffman_code_lengths(unsigned* lengths, const unsigned* frequencies, + size_t numcodes, unsigned maxbitlen) +{ + unsigned i, j; + size_t sum = 0, numpresent = 0; + unsigned error = 0; + Coin* coins; /*the coins of the currently calculated row*/ + Coin* prev_row; /*the previous row of coins*/ + size_t numcoins; + size_t coinmem; + + if(numcodes == 0) return 80; /*error: a tree of 0 symbols is not supposed to be made*/ + + for(i = 0; i < numcodes; i++) + { + if(frequencies[i] > 0) + { + numpresent++; + sum += frequencies[i]; + } + } + + for(i = 0; i < numcodes; i++) lengths[i] = 0; + + /*ensure at least two present symbols. There should be at least one symbol + according to RFC 1951 section 3.2.7. To decoders incorrectly require two. To + make these work as well ensure there are at least two symbols. The + Package-Merge code below also doesn't work correctly if there's only one + symbol, it'd give it the theoritical 0 bits but in practice zlib wants 1 bit*/ + if(numpresent == 0) + { + lengths[0] = lengths[1] = 1; /*note that for RFC 1951 section 3.2.7, only lengths[0] = 1 is needed*/ + } + else if(numpresent == 1) + { + for(i = 0; i < numcodes; i++) + { + if(frequencies[i]) + { + lengths[i] = 1; + lengths[i == 0 ? 1 : 0] = 1; + break; + } + } + } + else + { + /*Package-Merge algorithm represented by coin collector's problem + For every symbol, maxbitlen coins will be created*/ + + coinmem = numpresent * 2; /*max amount of coins needed with the current algo*/ + coins = (Coin*)lodepng_malloc(sizeof(Coin) * coinmem); + prev_row = (Coin*)lodepng_malloc(sizeof(Coin) * coinmem); + if(!coins || !prev_row) + { + lodepng_free(coins); + lodepng_free(prev_row); + return 83; /*alloc fail*/ + } + init_coins(coins, coinmem); + init_coins(prev_row, coinmem); + + /*first row, lowest denominator*/ + error = append_symbol_coins(coins, frequencies, numcodes, sum); + numcoins = numpresent; + qsort(coins, numcoins, sizeof(Coin), coin_compare); + if(!error) + { + unsigned numprev = 0; + for(j = 1; j <= maxbitlen && !error; j++) /*each of the remaining rows*/ + { + unsigned tempnum; + Coin* tempcoins; + /*swap prev_row and coins, and their amounts*/ + tempcoins = prev_row; prev_row = coins; coins = tempcoins; + tempnum = numprev; numprev = numcoins; numcoins = tempnum; + + cleanup_coins(coins, numcoins); + init_coins(coins, numcoins); + + numcoins = 0; + + /*fill in the merged coins of the previous row*/ + for(i = 0; i + 1 < numprev; i += 2) + { + /*merge prev_row[i] and prev_row[i + 1] into new coin*/ + Coin* coin = &coins[numcoins++]; + coin_copy(coin, &prev_row[i]); + add_coins(coin, &prev_row[i + 1]); + } + /*fill in all the original symbols again*/ + if(j < maxbitlen) + { + error = append_symbol_coins(coins + numcoins, frequencies, numcodes, sum); + numcoins += numpresent; + } + qsort(coins, numcoins, sizeof(Coin), coin_compare); + } + } + + if(!error) + { + /*calculate the lenghts of each symbol, as the amount of times a coin of each symbol is used*/ + for(i = 0; i < numpresent - 1; i++) + { + Coin* coin = &coins[i]; + for(j = 0; j < coin->symbols.size; j++) lengths[coin->symbols.data[j]]++; + } + } + + cleanup_coins(coins, coinmem); + lodepng_free(coins); + cleanup_coins(prev_row, coinmem); + lodepng_free(prev_row); + } + + return error; +} + +/*Create the Huffman tree given the symbol frequencies*/ +static unsigned HuffmanTree_makeFromFrequencies(HuffmanTree* tree, const unsigned* frequencies, + size_t mincodes, size_t numcodes, unsigned maxbitlen) +{ + unsigned error = 0; + while(!frequencies[numcodes - 1] && numcodes > mincodes) numcodes--; /*trim zeroes*/ + tree->maxbitlen = maxbitlen; + tree->numcodes = (unsigned)numcodes; /*number of symbols*/ + tree->lengths = (unsigned*)lodepng_realloc(tree->lengths, numcodes * sizeof(unsigned)); + if(!tree->lengths) return 83; /*alloc fail*/ + /*initialize all lengths to 0*/ + memset(tree->lengths, 0, numcodes * sizeof(unsigned)); + + error = lodepng_huffman_code_lengths(tree->lengths, frequencies, numcodes, maxbitlen); + if(!error) error = HuffmanTree_makeFromLengths2(tree); + return error; +} + +static unsigned HuffmanTree_getCode(const HuffmanTree* tree, unsigned index) +{ + return tree->tree1d[index]; +} + +static unsigned HuffmanTree_getLength(const HuffmanTree* tree, unsigned index) +{ + return tree->lengths[index]; +} +#endif /*LODEPNG_COMPILE_ENCODER*/ + +/*get the literal and length code tree of a deflated block with fixed tree, as per the deflate specification*/ +static unsigned generateFixedLitLenTree(HuffmanTree* tree) +{ + unsigned i, error = 0; + unsigned* bitlen = (unsigned*)lodepng_malloc(NUM_DEFLATE_CODE_SYMBOLS * sizeof(unsigned)); + if(!bitlen) return 83; /*alloc fail*/ + + /*288 possible codes: 0-255=literals, 256=endcode, 257-285=lengthcodes, 286-287=unused*/ + for(i = 0; i <= 143; i++) bitlen[i] = 8; + for(i = 144; i <= 255; i++) bitlen[i] = 9; + for(i = 256; i <= 279; i++) bitlen[i] = 7; + for(i = 280; i <= 287; i++) bitlen[i] = 8; + + error = HuffmanTree_makeFromLengths(tree, bitlen, NUM_DEFLATE_CODE_SYMBOLS, 15); + + lodepng_free(bitlen); + return error; +} + +/*get the distance code tree of a deflated block with fixed tree, as specified in the deflate specification*/ +static unsigned generateFixedDistanceTree(HuffmanTree* tree) +{ + unsigned i, error = 0; + unsigned* bitlen = (unsigned*)lodepng_malloc(NUM_DISTANCE_SYMBOLS * sizeof(unsigned)); + if(!bitlen) return 83; /*alloc fail*/ + + /*there are 32 distance codes, but 30-31 are unused*/ + for(i = 0; i < NUM_DISTANCE_SYMBOLS; i++) bitlen[i] = 5; + error = HuffmanTree_makeFromLengths(tree, bitlen, NUM_DISTANCE_SYMBOLS, 15); + + lodepng_free(bitlen); + return error; +} + +#ifdef LODEPNG_COMPILE_DECODER + +/* +returns the code, or (unsigned)(-1) if error happened +inbitlength is the length of the complete buffer, in bits (so its byte length times 8) +*/ +static unsigned huffmanDecodeSymbol(const unsigned char* in, size_t* bp, + const HuffmanTree* codetree, size_t inbitlength) +{ + unsigned treepos = 0, ct; + for(;;) + { + if(*bp >= inbitlength) return (unsigned)(-1); /*error: end of input memory reached without endcode*/ + /* + decode the symbol from the tree. The "readBitFromStream" code is inlined in + the expression below because this is the biggest bottleneck while decoding + */ + ct = codetree->tree2d[(treepos << 1) + READBIT(*bp, in)]; + (*bp)++; + if(ct < codetree->numcodes) return ct; /*the symbol is decoded, return it*/ + else treepos = ct - codetree->numcodes; /*symbol not yet decoded, instead move tree position*/ + + if(treepos >= codetree->numcodes) return (unsigned)(-1); /*error: it appeared outside the codetree*/ + } +} +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_DECODER + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Inflator (Decompressor) / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/*get the tree of a deflated block with fixed tree, as specified in the deflate specification*/ +static void getTreeInflateFixed(HuffmanTree* tree_ll, HuffmanTree* tree_d) +{ + /*TODO: check for out of memory errors*/ + generateFixedLitLenTree(tree_ll); + generateFixedDistanceTree(tree_d); +} + +/*get the tree of a deflated block with dynamic tree, the tree itself is also Huffman compressed with a known tree*/ +static unsigned getTreeInflateDynamic(HuffmanTree* tree_ll, HuffmanTree* tree_d, + const unsigned char* in, size_t* bp, size_t inlength) +{ + /*make sure that length values that aren't filled in will be 0, or a wrong tree will be generated*/ + unsigned error = 0; + unsigned n, HLIT, HDIST, HCLEN, i; + size_t inbitlength = inlength * 8; + + /*see comments in deflateDynamic for explanation of the context and these variables, it is analogous*/ + unsigned* bitlen_ll = 0; /*lit,len code lengths*/ + unsigned* bitlen_d = 0; /*dist code lengths*/ + /*code length code lengths ("clcl"), the bit lengths of the huffman tree used to compress bitlen_ll and bitlen_d*/ + unsigned* bitlen_cl = 0; + HuffmanTree tree_cl; /*the code tree for code length codes (the huffman tree for compressed huffman trees)*/ + + if((*bp) >> 3 >= inlength - 2) return 49; /*error: the bit pointer is or will go past the memory*/ + + /*number of literal/length codes + 257. Unlike the spec, the value 257 is added to it here already*/ + HLIT = readBitsFromStream(bp, in, 5) + 257; + /*number of distance codes. Unlike the spec, the value 1 is added to it here already*/ + HDIST = readBitsFromStream(bp, in, 5) + 1; + /*number of code length codes. Unlike the spec, the value 4 is added to it here already*/ + HCLEN = readBitsFromStream(bp, in, 4) + 4; + + HuffmanTree_init(&tree_cl); + + while(!error) + { + /*read the code length codes out of 3 * (amount of code length codes) bits*/ + + bitlen_cl = (unsigned*)lodepng_malloc(NUM_CODE_LENGTH_CODES * sizeof(unsigned)); + if(!bitlen_cl) ERROR_BREAK(83 /*alloc fail*/); + + for(i = 0; i < NUM_CODE_LENGTH_CODES; i++) + { + if(i < HCLEN) bitlen_cl[CLCL_ORDER[i]] = readBitsFromStream(bp, in, 3); + else bitlen_cl[CLCL_ORDER[i]] = 0; /*if not, it must stay 0*/ + } + + error = HuffmanTree_makeFromLengths(&tree_cl, bitlen_cl, NUM_CODE_LENGTH_CODES, 7); + if(error) break; + + /*now we can use this tree to read the lengths for the tree that this function will return*/ + bitlen_ll = (unsigned*)lodepng_malloc(NUM_DEFLATE_CODE_SYMBOLS * sizeof(unsigned)); + bitlen_d = (unsigned*)lodepng_malloc(NUM_DISTANCE_SYMBOLS * sizeof(unsigned)); + if(!bitlen_ll || !bitlen_d) ERROR_BREAK(83 /*alloc fail*/); + for(i = 0; i < NUM_DEFLATE_CODE_SYMBOLS; i++) bitlen_ll[i] = 0; + for(i = 0; i < NUM_DISTANCE_SYMBOLS; i++) bitlen_d[i] = 0; + + /*i is the current symbol we're reading in the part that contains the code lengths of lit/len and dist codes*/ + i = 0; + while(i < HLIT + HDIST) + { + unsigned code = huffmanDecodeSymbol(in, bp, &tree_cl, inbitlength); + if(code <= 15) /*a length code*/ + { + if(i < HLIT) bitlen_ll[i] = code; + else bitlen_d[i - HLIT] = code; + i++; + } + else if(code == 16) /*repeat previous*/ + { + unsigned replength = 3; /*read in the 2 bits that indicate repeat length (3-6)*/ + unsigned value; /*set value to the previous code*/ + + if(*bp >= inbitlength) ERROR_BREAK(50); /*error, bit pointer jumps past memory*/ + if (i == 0) ERROR_BREAK(54); /*can't repeat previous if i is 0*/ + + replength += readBitsFromStream(bp, in, 2); + + if(i < HLIT + 1) value = bitlen_ll[i - 1]; + else value = bitlen_d[i - HLIT - 1]; + /*repeat this value in the next lengths*/ + for(n = 0; n < replength; n++) + { + if(i >= HLIT + HDIST) ERROR_BREAK(13); /*error: i is larger than the amount of codes*/ + if(i < HLIT) bitlen_ll[i] = value; + else bitlen_d[i - HLIT] = value; + i++; + } + } + else if(code == 17) /*repeat "0" 3-10 times*/ + { + unsigned replength = 3; /*read in the bits that indicate repeat length*/ + if(*bp >= inbitlength) ERROR_BREAK(50); /*error, bit pointer jumps past memory*/ + + replength += readBitsFromStream(bp, in, 3); + + /*repeat this value in the next lengths*/ + for(n = 0; n < replength; n++) + { + if(i >= HLIT + HDIST) ERROR_BREAK(14); /*error: i is larger than the amount of codes*/ + + if(i < HLIT) bitlen_ll[i] = 0; + else bitlen_d[i - HLIT] = 0; + i++; + } + } + else if(code == 18) /*repeat "0" 11-138 times*/ + { + unsigned replength = 11; /*read in the bits that indicate repeat length*/ + if(*bp >= inbitlength) ERROR_BREAK(50); /*error, bit pointer jumps past memory*/ + + replength += readBitsFromStream(bp, in, 7); + + /*repeat this value in the next lengths*/ + for(n = 0; n < replength; n++) + { + if(i >= HLIT + HDIST) ERROR_BREAK(15); /*error: i is larger than the amount of codes*/ + + if(i < HLIT) bitlen_ll[i] = 0; + else bitlen_d[i - HLIT] = 0; + i++; + } + } + else /*if(code == (unsigned)(-1))*/ /*huffmanDecodeSymbol returns (unsigned)(-1) in case of error*/ + { + if(code == (unsigned)(-1)) + { + /*return error code 10 or 11 depending on the situation that happened in huffmanDecodeSymbol + (10=no endcode, 11=wrong jump outside of tree)*/ + error = (*bp) > inbitlength ? 10 : 11; + } + else error = 16; /*unexisting code, this can never happen*/ + break; + } + } + if(error) break; + + if(bitlen_ll[256] == 0) ERROR_BREAK(64); /*the length of the end code 256 must be larger than 0*/ + + /*now we've finally got HLIT and HDIST, so generate the code trees, and the function is done*/ + error = HuffmanTree_makeFromLengths(tree_ll, bitlen_ll, NUM_DEFLATE_CODE_SYMBOLS, 15); + if(error) break; + error = HuffmanTree_makeFromLengths(tree_d, bitlen_d, NUM_DISTANCE_SYMBOLS, 15); + + break; /*end of error-while*/ + } + + lodepng_free(bitlen_cl); + lodepng_free(bitlen_ll); + lodepng_free(bitlen_d); + HuffmanTree_cleanup(&tree_cl); + + return error; +} + +/*inflate a block with dynamic of fixed Huffman tree*/ +static unsigned inflateHuffmanBlock(ucvector* out, const unsigned char* in, size_t* bp, + size_t* pos, size_t inlength, unsigned btype) +{ + unsigned error = 0; + HuffmanTree tree_ll; /*the huffman tree for literal and length codes*/ + HuffmanTree tree_d; /*the huffman tree for distance codes*/ + size_t inbitlength = inlength * 8; + + HuffmanTree_init(&tree_ll); + HuffmanTree_init(&tree_d); + + if(btype == 1) getTreeInflateFixed(&tree_ll, &tree_d); + else if(btype == 2) error = getTreeInflateDynamic(&tree_ll, &tree_d, in, bp, inlength); + + while(!error) /*decode all symbols until end reached, breaks at end code*/ + { + /*code_ll is literal, length or end code*/ + unsigned code_ll = huffmanDecodeSymbol(in, bp, &tree_ll, inbitlength); + if(code_ll <= 255) /*literal symbol*/ + { + /*ucvector_push_back would do the same, but for some reason the two lines below run 10% faster*/ + if(!ucvector_resize(out, (*pos) + 1)) ERROR_BREAK(83 /*alloc fail*/); + out->data[*pos] = (unsigned char)code_ll; + (*pos)++; + } + else if(code_ll >= FIRST_LENGTH_CODE_INDEX && code_ll <= LAST_LENGTH_CODE_INDEX) /*length code*/ + { + unsigned code_d, distance; + unsigned numextrabits_l, numextrabits_d; /*extra bits for length and distance*/ + size_t start, forward, backward, length; + + /*part 1: get length base*/ + length = LENGTHBASE[code_ll - FIRST_LENGTH_CODE_INDEX]; + + /*part 2: get extra bits and add the value of that to length*/ + numextrabits_l = LENGTHEXTRA[code_ll - FIRST_LENGTH_CODE_INDEX]; + if(*bp >= inbitlength) ERROR_BREAK(51); /*error, bit pointer will jump past memory*/ + length += readBitsFromStream(bp, in, numextrabits_l); + + /*part 3: get distance code*/ + code_d = huffmanDecodeSymbol(in, bp, &tree_d, inbitlength); + if(code_d > 29) + { + if(code_ll == (unsigned)(-1)) /*huffmanDecodeSymbol returns (unsigned)(-1) in case of error*/ + { + /*return error code 10 or 11 depending on the situation that happened in huffmanDecodeSymbol + (10=no endcode, 11=wrong jump outside of tree)*/ + error = (*bp) > inlength * 8 ? 10 : 11; + } + else error = 18; /*error: invalid distance code (30-31 are never used)*/ + break; + } + distance = DISTANCEBASE[code_d]; + + /*part 4: get extra bits from distance*/ + numextrabits_d = DISTANCEEXTRA[code_d]; + if(*bp >= inbitlength) ERROR_BREAK(51); /*error, bit pointer will jump past memory*/ + + distance += readBitsFromStream(bp, in, numextrabits_d); + + /*part 5: fill in all the out[n] values based on the length and dist*/ + start = (*pos); + if(distance > start) ERROR_BREAK(52); /*too long backward distance*/ + backward = start - distance; + + if(!ucvector_resize(out, (*pos) + length)) ERROR_BREAK(83 /*alloc fail*/); + for(forward = 0; forward < length; forward++) + { + out->data[(*pos)] = out->data[backward]; + (*pos)++; + backward++; + if(backward >= start) backward = start - distance; + } + } + else if(code_ll == 256) + { + break; /*end code, break the loop*/ + } + else /*if(code == (unsigned)(-1))*/ /*huffmanDecodeSymbol returns (unsigned)(-1) in case of error*/ + { + /*return error code 10 or 11 depending on the situation that happened in huffmanDecodeSymbol + (10=no endcode, 11=wrong jump outside of tree)*/ + error = (*bp) > inlength * 8 ? 10 : 11; + break; + } + } + + HuffmanTree_cleanup(&tree_ll); + HuffmanTree_cleanup(&tree_d); + + return error; +} + +static unsigned inflateNoCompression(ucvector* out, const unsigned char* in, size_t* bp, size_t* pos, size_t inlength) +{ + /*go to first boundary of byte*/ + size_t p; + unsigned LEN, NLEN, n, error = 0; + while(((*bp) & 0x7) != 0) (*bp)++; + p = (*bp) / 8; /*byte position*/ + + /*read LEN (2 bytes) and NLEN (2 bytes)*/ + if(p >= inlength - 4) return 52; /*error, bit pointer will jump past memory*/ + LEN = in[p] + 256u * in[p + 1]; p += 2; + NLEN = in[p] + 256u * in[p + 1]; p += 2; + + /*check if 16-bit NLEN is really the one's complement of LEN*/ + if(LEN + NLEN != 65535) return 21; /*error: NLEN is not one's complement of LEN*/ + + if(!ucvector_resize(out, (*pos) + LEN)) return 83; /*alloc fail*/ + + /*read the literal data: LEN bytes are now stored in the out buffer*/ + if(p + LEN > inlength) return 23; /*error: reading outside of in buffer*/ + for(n = 0; n < LEN; n++) out->data[(*pos)++] = in[p++]; + + (*bp) = p * 8; + + return error; +} + +static unsigned lodepng_inflatev(ucvector* out, + const unsigned char* in, size_t insize, + const LodePNGDecompressSettings* settings) +{ + /*bit pointer in the "in" data, current byte is bp >> 3, current bit is bp & 0x7 (from lsb to msb of the byte)*/ + size_t bp = 0; + unsigned BFINAL = 0; + size_t pos = 0; /*byte position in the out buffer*/ + unsigned error = 0; + + (void)settings; + + while(!BFINAL) + { + unsigned BTYPE; + if(bp + 2 >= insize * 8) return 52; /*error, bit pointer will jump past memory*/ + BFINAL = readBitFromStream(&bp, in); + BTYPE = 1u * readBitFromStream(&bp, in); + BTYPE += 2u * readBitFromStream(&bp, in); + + if(BTYPE == 3) return 20; /*error: invalid BTYPE*/ + else if(BTYPE == 0) error = inflateNoCompression(out, in, &bp, &pos, insize); /*no compression*/ + else error = inflateHuffmanBlock(out, in, &bp, &pos, insize, BTYPE); /*compression, BTYPE 01 or 10*/ + + if(error) return error; + } + + return error; +} + +unsigned lodepng_inflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGDecompressSettings* settings) +{ + unsigned error; + ucvector v; + ucvector_init_buffer(&v, *out, *outsize); + error = lodepng_inflatev(&v, in, insize, settings); + *out = v.data; + *outsize = v.size; + return error; +} + +static unsigned inflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGDecompressSettings* settings) +{ + if(settings->custom_inflate) + { + return settings->custom_inflate(out, outsize, in, insize, settings); + } + else + { + return lodepng_inflate(out, outsize, in, insize, settings); + } +} + +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Deflator (Compressor) / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +static const size_t MAX_SUPPORTED_DEFLATE_LENGTH = 258; + +/*bitlen is the size in bits of the code*/ +static void addHuffmanSymbol(size_t* bp, ucvector* compressed, unsigned code, unsigned bitlen) +{ + addBitsToStreamReversed(bp, compressed, code, bitlen); +} + +/*search the index in the array, that has the largest value smaller than or equal to the given value, +given array must be sorted (if no value is smaller, it returns the size of the given array)*/ +static size_t searchCodeIndex(const unsigned* array, size_t array_size, size_t value) +{ + /*linear search implementation*/ + /*for(size_t i = 1; i < array_size; i++) if(array[i] > value) return i - 1; + return array_size - 1;*/ + + /*binary search implementation (not that much faster) (precondition: array_size > 0)*/ + size_t left = 1; + size_t right = array_size - 1; + while(left <= right) + { + size_t mid = (left + right) / 2; + if(array[mid] <= value) left = mid + 1; /*the value to find is more to the right*/ + else if(array[mid - 1] > value) right = mid - 1; /*the value to find is more to the left*/ + else return mid - 1; + } + return array_size - 1; +} + +static void addLengthDistance(uivector* values, size_t length, size_t distance) +{ + /*values in encoded vector are those used by deflate: + 0-255: literal bytes + 256: end + 257-285: length/distance pair (length code, followed by extra length bits, distance code, extra distance bits) + 286-287: invalid*/ + + unsigned length_code = (unsigned)searchCodeIndex(LENGTHBASE, 29, length); + unsigned extra_length = (unsigned)(length - LENGTHBASE[length_code]); + unsigned dist_code = (unsigned)searchCodeIndex(DISTANCEBASE, 30, distance); + unsigned extra_distance = (unsigned)(distance - DISTANCEBASE[dist_code]); + + uivector_push_back(values, length_code + FIRST_LENGTH_CODE_INDEX); + uivector_push_back(values, extra_length); + uivector_push_back(values, dist_code); + uivector_push_back(values, extra_distance); +} + +/*3 bytes of data get encoded into two bytes. The hash cannot use more than 3 +bytes as input because 3 is the minimum match length for deflate*/ +static const unsigned HASH_NUM_VALUES = 65536; +static const unsigned HASH_BIT_MASK = 65535; /*HASH_NUM_VALUES - 1, but C90 does not like that as initializer*/ + +typedef struct Hash +{ + int* head; /*hash value to head circular pos - can be outdated if went around window*/ + /*circular pos to prev circular pos*/ + unsigned short* chain; + int* val; /*circular pos to hash value*/ + + /*TODO: do this not only for zeros but for any repeated byte. However for PNG + it's always going to be the zeros that dominate, so not important for PNG*/ + int* headz; /*similar to head, but for chainz*/ + unsigned short* chainz; /*those with same amount of zeros*/ + unsigned short* zeros; /*length of zeros streak, used as a second hash chain*/ +} Hash; + +static unsigned hash_init(Hash* hash, unsigned windowsize) +{ + unsigned i; + hash->head = (int*)lodepng_malloc(sizeof(int) * HASH_NUM_VALUES); + hash->val = (int*)lodepng_malloc(sizeof(int) * windowsize); + hash->chain = (unsigned short*)lodepng_malloc(sizeof(unsigned short) * windowsize); + + hash->zeros = (unsigned short*)lodepng_malloc(sizeof(unsigned short) * windowsize); + hash->headz = (int*)lodepng_malloc(sizeof(int) * (MAX_SUPPORTED_DEFLATE_LENGTH + 1)); + hash->chainz = (unsigned short*)lodepng_malloc(sizeof(unsigned short) * windowsize); + + if(!hash->head || !hash->chain || !hash->val || !hash->headz|| !hash->chainz || !hash->zeros) + { + return 83; /*alloc fail*/ + } + + /*initialize hash table*/ + for(i = 0; i < HASH_NUM_VALUES; i++) hash->head[i] = -1; + for(i = 0; i < windowsize; i++) hash->val[i] = -1; + for(i = 0; i < windowsize; i++) hash->chain[i] = i; /*same value as index indicates uninitialized*/ + + for(i = 0; i <= MAX_SUPPORTED_DEFLATE_LENGTH; i++) hash->headz[i] = -1; + for(i = 0; i < windowsize; i++) hash->chainz[i] = i; /*same value as index indicates uninitialized*/ + + return 0; +} + +static void hash_cleanup(Hash* hash) +{ + lodepng_free(hash->head); + lodepng_free(hash->val); + lodepng_free(hash->chain); + + lodepng_free(hash->zeros); + lodepng_free(hash->headz); + lodepng_free(hash->chainz); +} + + + +static unsigned getHash(const unsigned char* data, size_t size, size_t pos) +{ + unsigned result = 0; + if (pos + 2 < size) + { + /*A simple shift and xor hash is used. Since the data of PNGs is dominated + by zeroes due to the filters, a better hash does not have a significant + effect on speed in traversing the chain, and causes more time spend on + calculating the hash.*/ + result ^= (unsigned)(data[pos + 0] << 0u); + result ^= (unsigned)(data[pos + 1] << 4u); + result ^= (unsigned)(data[pos + 2] << 8u); + } else { + size_t amount, i; + if(pos >= size) return 0; + amount = size - pos; + for(i = 0; i < amount; i++) result ^= (unsigned)(data[pos + i] << (i * 8u)); + } + return result & HASH_BIT_MASK; +} + +static unsigned countZeros(const unsigned char* data, size_t size, size_t pos) +{ + const unsigned char* start = data + pos; + const unsigned char* end = start + MAX_SUPPORTED_DEFLATE_LENGTH; + if(end > data + size) end = data + size; + data = start; + while (data != end && *data == 0) data++; + /*subtracting two addresses returned as 32-bit number (max value is MAX_SUPPORTED_DEFLATE_LENGTH)*/ + return (unsigned)(data - start); +} + +/*wpos = pos & (windowsize - 1)*/ +static void updateHashChain(Hash* hash, size_t wpos, unsigned hashval, unsigned short numzeros) +{ + hash->val[wpos] = (int)hashval; + if(hash->head[hashval] != -1) hash->chain[wpos] = hash->head[hashval]; + hash->head[hashval] = wpos; + + hash->zeros[wpos] = numzeros; + if(hash->headz[numzeros] != -1) hash->chainz[wpos] = hash->headz[numzeros]; + hash->headz[numzeros] = wpos; +} + +/* +LZ77-encode the data. Return value is error code. The input are raw bytes, the output +is in the form of unsigned integers with codes representing for example literal bytes, or +length/distance pairs. +It uses a hash table technique to let it encode faster. When doing LZ77 encoding, a +sliding window (of windowsize) is used, and all past bytes in that window can be used as +the "dictionary". A brute force search through all possible distances would be slow, and +this hash technique is one out of several ways to speed this up. +*/ +static unsigned encodeLZ77(uivector* out, Hash* hash, + const unsigned char* in, size_t inpos, size_t insize, unsigned windowsize, + unsigned minmatch, unsigned nicematch, unsigned lazymatching) +{ + size_t pos; + unsigned i, error = 0; + /*for large window lengths, assume the user wants no compression loss. Otherwise, max hash chain length speedup.*/ + unsigned maxchainlength = windowsize >= 8192 ? windowsize : windowsize / 8; + unsigned maxlazymatch = windowsize >= 8192 ? MAX_SUPPORTED_DEFLATE_LENGTH : 64; + + unsigned usezeros = 1; /*not sure if setting it to false for windowsize < 8192 is better or worse*/ + unsigned numzeros = 0; + + unsigned offset; /*the offset represents the distance in LZ77 terminology*/ + unsigned length; + unsigned lazy = 0; + unsigned lazylength = 0, lazyoffset = 0; + unsigned hashval; + unsigned current_offset, current_length; + unsigned prev_offset; + const unsigned char *lastptr, *foreptr, *backptr; + unsigned hashpos; + + if(windowsize <= 0 || windowsize > 32768) return 60; /*error: windowsize smaller/larger than allowed*/ + if((windowsize & (windowsize - 1)) != 0) return 90; /*error: must be power of two*/ + + if(nicematch > MAX_SUPPORTED_DEFLATE_LENGTH) nicematch = MAX_SUPPORTED_DEFLATE_LENGTH; + + for(pos = inpos; pos < insize; pos++) + { + size_t wpos = pos & (windowsize - 1); /*position for in 'circular' hash buffers*/ + unsigned chainlength = 0; + + hashval = getHash(in, insize, pos); + + if(usezeros && hashval == 0) + { + if (numzeros == 0) numzeros = countZeros(in, insize, pos); + else if (pos + numzeros > insize || in[pos + numzeros - 1] != 0) numzeros--; + } + else + { + numzeros = 0; + } + + updateHashChain(hash, wpos, hashval, numzeros); + + /*the length and offset found for the current position*/ + length = 0; + offset = 0; + + hashpos = hash->chain[wpos]; + + lastptr = &in[insize < pos + MAX_SUPPORTED_DEFLATE_LENGTH ? insize : pos + MAX_SUPPORTED_DEFLATE_LENGTH]; + + /*search for the longest string*/ + prev_offset = 0; + for(;;) + { + if(chainlength++ >= maxchainlength) break; + current_offset = hashpos <= wpos ? wpos - hashpos : wpos - hashpos + windowsize; + + if(current_offset < prev_offset) break; /*stop when went completely around the circular buffer*/ + prev_offset = current_offset; + if(current_offset > 0) + { + /*test the next characters*/ + foreptr = &in[pos]; + backptr = &in[pos - current_offset]; + + /*common case in PNGs is lots of zeros. Quickly skip over them as a speedup*/ + if(numzeros >= 3) + { + unsigned skip = hash->zeros[hashpos]; + if(skip > numzeros) skip = numzeros; + backptr += skip; + foreptr += skip; + } + + while(foreptr != lastptr && *backptr == *foreptr) /*maximum supported length by deflate is max length*/ + { + ++backptr; + ++foreptr; + } + current_length = (unsigned)(foreptr - &in[pos]); + + if(current_length > length) + { + length = current_length; /*the longest length*/ + offset = current_offset; /*the offset that is related to this longest length*/ + /*jump out once a length of max length is found (speed gain). This also jumps + out if length is MAX_SUPPORTED_DEFLATE_LENGTH*/ + if(current_length >= nicematch) break; + } + } + + if(hashpos == hash->chain[hashpos]) break; + + if(numzeros >= 3 && length > numzeros) { + hashpos = hash->chainz[hashpos]; + if(hash->zeros[hashpos] != numzeros) break; + } else { + hashpos = hash->chain[hashpos]; + /*outdated hash value, happens if particular value was not encountered in whole last window*/ + if(hash->val[hashpos] != (int)hashval) break; + } + } + + if(lazymatching) + { + if(!lazy && length >= 3 && length <= maxlazymatch && length < MAX_SUPPORTED_DEFLATE_LENGTH) + { + lazy = 1; + lazylength = length; + lazyoffset = offset; + continue; /*try the next byte*/ + } + if(lazy) + { + lazy = 0; + if(pos == 0) ERROR_BREAK(81); + if(length > lazylength + 1) + { + /*push the previous character as literal*/ + if(!uivector_push_back(out, in[pos - 1])) ERROR_BREAK(83 /*alloc fail*/); + } + else + { + length = lazylength; + offset = lazyoffset; + hash->head[hashval] = -1; /*the same hashchain update will be done, this ensures no wrong alteration*/ + hash->headz[numzeros] = -1; /*idem*/ + pos--; + } + } + } + if(length >= 3 && offset > windowsize) ERROR_BREAK(86 /*too big (or overflown negative) offset*/); + + /*encode it as length/distance pair or literal value*/ + if(length < 3) /*only lengths of 3 or higher are supported as length/distance pair*/ + { + if(!uivector_push_back(out, in[pos])) ERROR_BREAK(83 /*alloc fail*/); + } + else if(length < minmatch || (length == 3 && offset > 4096)) + { + /*compensate for the fact that longer offsets have more extra bits, a + length of only 3 may be not worth it then*/ + if(!uivector_push_back(out, in[pos])) ERROR_BREAK(83 /*alloc fail*/); + } + else + { + addLengthDistance(out, length, offset); + for(i = 1; i < length; i++) + { + pos++; + wpos = pos & (windowsize - 1); + hashval = getHash(in, insize, pos); + if(usezeros && hashval == 0) + { + if (numzeros == 0) numzeros = countZeros(in, insize, pos); + else if (pos + numzeros > insize || in[pos + numzeros - 1] != 0) numzeros--; + } + else + { + numzeros = 0; + } + updateHashChain(hash, wpos, hashval, numzeros); + } + } + } /*end of the loop through each character of input*/ + + return error; +} + +/* /////////////////////////////////////////////////////////////////////////// */ + +static unsigned deflateNoCompression(ucvector* out, const unsigned char* data, size_t datasize) +{ + /*non compressed deflate block data: 1 bit BFINAL,2 bits BTYPE,(5 bits): it jumps to start of next byte, + 2 bytes LEN, 2 bytes NLEN, LEN bytes literal DATA*/ + + size_t i, j, numdeflateblocks = (datasize + 65534) / 65535; + unsigned datapos = 0; + for(i = 0; i < numdeflateblocks; i++) + { + unsigned BFINAL, BTYPE, LEN, NLEN; + unsigned char firstbyte; + + BFINAL = (i == numdeflateblocks - 1); + BTYPE = 0; + + firstbyte = (unsigned char)(BFINAL + ((BTYPE & 1) << 1) + ((BTYPE & 2) << 1)); + ucvector_push_back(out, firstbyte); + + LEN = 65535; + if(datasize - datapos < 65535) LEN = (unsigned)datasize - datapos; + NLEN = 65535 - LEN; + + ucvector_push_back(out, (unsigned char)(LEN % 256)); + ucvector_push_back(out, (unsigned char)(LEN / 256)); + ucvector_push_back(out, (unsigned char)(NLEN % 256)); + ucvector_push_back(out, (unsigned char)(NLEN / 256)); + + /*Decompressed data*/ + for(j = 0; j < 65535 && datapos < datasize; j++) + { + ucvector_push_back(out, data[datapos++]); + } + } + + return 0; +} + +/* +write the lz77-encoded data, which has lit, len and dist codes, to compressed stream using huffman trees. +tree_ll: the tree for lit and len codes. +tree_d: the tree for distance codes. +*/ +static void writeLZ77data(size_t* bp, ucvector* out, const uivector* lz77_encoded, + const HuffmanTree* tree_ll, const HuffmanTree* tree_d) +{ + size_t i = 0; + for(i = 0; i < lz77_encoded->size; i++) + { + unsigned val = lz77_encoded->data[i]; + addHuffmanSymbol(bp, out, HuffmanTree_getCode(tree_ll, val), HuffmanTree_getLength(tree_ll, val)); + if(val > 256) /*for a length code, 3 more things have to be added*/ + { + unsigned length_index = val - FIRST_LENGTH_CODE_INDEX; + unsigned n_length_extra_bits = LENGTHEXTRA[length_index]; + unsigned length_extra_bits = lz77_encoded->data[++i]; + + unsigned distance_code = lz77_encoded->data[++i]; + + unsigned distance_index = distance_code; + unsigned n_distance_extra_bits = DISTANCEEXTRA[distance_index]; + unsigned distance_extra_bits = lz77_encoded->data[++i]; + + addBitsToStream(bp, out, length_extra_bits, n_length_extra_bits); + addHuffmanSymbol(bp, out, HuffmanTree_getCode(tree_d, distance_code), + HuffmanTree_getLength(tree_d, distance_code)); + addBitsToStream(bp, out, distance_extra_bits, n_distance_extra_bits); + } + } +} + +/*Deflate for a block of type "dynamic", that is, with freely, optimally, created huffman trees*/ +static unsigned deflateDynamic(ucvector* out, size_t* bp, Hash* hash, + const unsigned char* data, size_t datapos, size_t dataend, + const LodePNGCompressSettings* settings, unsigned final) +{ + unsigned error = 0; + + /* + A block is compressed as follows: The PNG data is lz77 encoded, resulting in + literal bytes and length/distance pairs. This is then huffman compressed with + two huffman trees. One huffman tree is used for the lit and len values ("ll"), + another huffman tree is used for the dist values ("d"). These two trees are + stored using their code lengths, and to compress even more these code lengths + are also run-length encoded and huffman compressed. This gives a huffman tree + of code lengths "cl". The code lenghts used to describe this third tree are + the code length code lengths ("clcl"). + */ + + /*The lz77 encoded data, represented with integers since there will also be length and distance codes in it*/ + uivector lz77_encoded; + HuffmanTree tree_ll; /*tree for lit,len values*/ + HuffmanTree tree_d; /*tree for distance codes*/ + HuffmanTree tree_cl; /*tree for encoding the code lengths representing tree_ll and tree_d*/ + uivector frequencies_ll; /*frequency of lit,len codes*/ + uivector frequencies_d; /*frequency of dist codes*/ + uivector frequencies_cl; /*frequency of code length codes*/ + uivector bitlen_lld; /*lit,len,dist code lenghts (int bits), literally (without repeat codes).*/ + uivector bitlen_lld_e; /*bitlen_lld encoded with repeat codes (this is a rudemtary run length compression)*/ + /*bitlen_cl is the code length code lengths ("clcl"). The bit lengths of codes to represent tree_cl + (these are written as is in the file, it would be crazy to compress these using yet another huffman + tree that needs to be represented by yet another set of code lengths)*/ + uivector bitlen_cl; + size_t datasize = dataend - datapos; + + /* + Due to the huffman compression of huffman tree representations ("two levels"), there are some anologies: + bitlen_lld is to tree_cl what data is to tree_ll and tree_d. + bitlen_lld_e is to bitlen_lld what lz77_encoded is to data. + bitlen_cl is to bitlen_lld_e what bitlen_lld is to lz77_encoded. + */ + + unsigned BFINAL = final; + size_t numcodes_ll, numcodes_d, i; + unsigned HLIT, HDIST, HCLEN; + + uivector_init(&lz77_encoded); + HuffmanTree_init(&tree_ll); + HuffmanTree_init(&tree_d); + HuffmanTree_init(&tree_cl); + uivector_init(&frequencies_ll); + uivector_init(&frequencies_d); + uivector_init(&frequencies_cl); + uivector_init(&bitlen_lld); + uivector_init(&bitlen_lld_e); + uivector_init(&bitlen_cl); + + /*This while loop never loops due to a break at the end, it is here to + allow breaking out of it to the cleanup phase on error conditions.*/ + while(!error) + { + if(settings->use_lz77) + { + error = encodeLZ77(&lz77_encoded, hash, data, datapos, dataend, settings->windowsize, + settings->minmatch, settings->nicematch, settings->lazymatching); + if(error) break; + } + else + { + if(!uivector_resize(&lz77_encoded, datasize)) ERROR_BREAK(83 /*alloc fail*/); + for(i = datapos; i < dataend; i++) lz77_encoded.data[i] = data[i]; /*no LZ77, but still will be Huffman compressed*/ + } + + if(!uivector_resizev(&frequencies_ll, 286, 0)) ERROR_BREAK(83 /*alloc fail*/); + if(!uivector_resizev(&frequencies_d, 30, 0)) ERROR_BREAK(83 /*alloc fail*/); + + /*Count the frequencies of lit, len and dist codes*/ + for(i = 0; i < lz77_encoded.size; i++) + { + unsigned symbol = lz77_encoded.data[i]; + frequencies_ll.data[symbol]++; + if(symbol > 256) + { + unsigned dist = lz77_encoded.data[i + 2]; + frequencies_d.data[dist]++; + i += 3; + } + } + frequencies_ll.data[256] = 1; /*there will be exactly 1 end code, at the end of the block*/ + + /*Make both huffman trees, one for the lit and len codes, one for the dist codes*/ + error = HuffmanTree_makeFromFrequencies(&tree_ll, frequencies_ll.data, 257, frequencies_ll.size, 15); + if(error) break; + /*2, not 1, is chosen for mincodes: some buggy PNG decoders require at least 2 symbols in the dist tree*/ + error = HuffmanTree_makeFromFrequencies(&tree_d, frequencies_d.data, 2, frequencies_d.size, 15); + if(error) break; + + numcodes_ll = tree_ll.numcodes; if(numcodes_ll > 286) numcodes_ll = 286; + numcodes_d = tree_d.numcodes; if(numcodes_d > 30) numcodes_d = 30; + /*store the code lengths of both generated trees in bitlen_lld*/ + for(i = 0; i < numcodes_ll; i++) uivector_push_back(&bitlen_lld, HuffmanTree_getLength(&tree_ll, (unsigned)i)); + for(i = 0; i < numcodes_d; i++) uivector_push_back(&bitlen_lld, HuffmanTree_getLength(&tree_d, (unsigned)i)); + + /*run-length compress bitlen_ldd into bitlen_lld_e by using repeat codes 16 (copy length 3-6 times), + 17 (3-10 zeroes), 18 (11-138 zeroes)*/ + for(i = 0; i < (unsigned)bitlen_lld.size; i++) + { + unsigned j = 0; /*amount of repititions*/ + while(i + j + 1 < (unsigned)bitlen_lld.size && bitlen_lld.data[i + j + 1] == bitlen_lld.data[i]) j++; + + if(bitlen_lld.data[i] == 0 && j >= 2) /*repeat code for zeroes*/ + { + j++; /*include the first zero*/ + if(j <= 10) /*repeat code 17 supports max 10 zeroes*/ + { + uivector_push_back(&bitlen_lld_e, 17); + uivector_push_back(&bitlen_lld_e, j - 3); + } + else /*repeat code 18 supports max 138 zeroes*/ + { + if(j > 138) j = 138; + uivector_push_back(&bitlen_lld_e, 18); + uivector_push_back(&bitlen_lld_e, j - 11); + } + i += (j - 1); + } + else if(j >= 3) /*repeat code for value other than zero*/ + { + size_t k; + unsigned num = j / 6, rest = j % 6; + uivector_push_back(&bitlen_lld_e, bitlen_lld.data[i]); + for(k = 0; k < num; k++) + { + uivector_push_back(&bitlen_lld_e, 16); + uivector_push_back(&bitlen_lld_e, 6 - 3); + } + if(rest >= 3) + { + uivector_push_back(&bitlen_lld_e, 16); + uivector_push_back(&bitlen_lld_e, rest - 3); + } + else j -= rest; + i += j; + } + else /*too short to benefit from repeat code*/ + { + uivector_push_back(&bitlen_lld_e, bitlen_lld.data[i]); + } + } + + /*generate tree_cl, the huffmantree of huffmantrees*/ + + if(!uivector_resizev(&frequencies_cl, NUM_CODE_LENGTH_CODES, 0)) ERROR_BREAK(83 /*alloc fail*/); + for(i = 0; i < bitlen_lld_e.size; i++) + { + frequencies_cl.data[bitlen_lld_e.data[i]]++; + /*after a repeat code come the bits that specify the number of repetitions, + those don't need to be in the frequencies_cl calculation*/ + if(bitlen_lld_e.data[i] >= 16) i++; + } + + error = HuffmanTree_makeFromFrequencies(&tree_cl, frequencies_cl.data, + frequencies_cl.size, frequencies_cl.size, 7); + if(error) break; + + if(!uivector_resize(&bitlen_cl, tree_cl.numcodes)) ERROR_BREAK(83 /*alloc fail*/); + for(i = 0; i < tree_cl.numcodes; i++) + { + /*lenghts of code length tree is in the order as specified by deflate*/ + bitlen_cl.data[i] = HuffmanTree_getLength(&tree_cl, CLCL_ORDER[i]); + } + while(bitlen_cl.data[bitlen_cl.size - 1] == 0 && bitlen_cl.size > 4) + { + /*remove zeros at the end, but minimum size must be 4*/ + if(!uivector_resize(&bitlen_cl, bitlen_cl.size - 1)) ERROR_BREAK(83 /*alloc fail*/); + } + if(error) break; + + /* + Write everything into the output + + After the BFINAL and BTYPE, the dynamic block consists out of the following: + - 5 bits HLIT, 5 bits HDIST, 4 bits HCLEN + - (HCLEN+4)*3 bits code lengths of code length alphabet + - HLIT + 257 code lenghts of lit/length alphabet (encoded using the code length + alphabet, + possible repetition codes 16, 17, 18) + - HDIST + 1 code lengths of distance alphabet (encoded using the code length + alphabet, + possible repetition codes 16, 17, 18) + - compressed data + - 256 (end code) + */ + + /*Write block type*/ + addBitToStream(bp, out, BFINAL); + addBitToStream(bp, out, 0); /*first bit of BTYPE "dynamic"*/ + addBitToStream(bp, out, 1); /*second bit of BTYPE "dynamic"*/ + + /*write the HLIT, HDIST and HCLEN values*/ + HLIT = (unsigned)(numcodes_ll - 257); + HDIST = (unsigned)(numcodes_d - 1); + HCLEN = (unsigned)bitlen_cl.size - 4; + /*trim zeroes for HCLEN. HLIT and HDIST were already trimmed at tree creation*/ + while(!bitlen_cl.data[HCLEN + 4 - 1] && HCLEN > 0) HCLEN--; + addBitsToStream(bp, out, HLIT, 5); + addBitsToStream(bp, out, HDIST, 5); + addBitsToStream(bp, out, HCLEN, 4); + + /*write the code lenghts of the code length alphabet*/ + for(i = 0; i < HCLEN + 4; i++) addBitsToStream(bp, out, bitlen_cl.data[i], 3); + + /*write the lenghts of the lit/len AND the dist alphabet*/ + for(i = 0; i < bitlen_lld_e.size; i++) + { + addHuffmanSymbol(bp, out, HuffmanTree_getCode(&tree_cl, bitlen_lld_e.data[i]), + HuffmanTree_getLength(&tree_cl, bitlen_lld_e.data[i])); + /*extra bits of repeat codes*/ + if(bitlen_lld_e.data[i] == 16) addBitsToStream(bp, out, bitlen_lld_e.data[++i], 2); + else if(bitlen_lld_e.data[i] == 17) addBitsToStream(bp, out, bitlen_lld_e.data[++i], 3); + else if(bitlen_lld_e.data[i] == 18) addBitsToStream(bp, out, bitlen_lld_e.data[++i], 7); + } + + /*write the compressed data symbols*/ + writeLZ77data(bp, out, &lz77_encoded, &tree_ll, &tree_d); + /*error: the length of the end code 256 must be larger than 0*/ + if(HuffmanTree_getLength(&tree_ll, 256) == 0) ERROR_BREAK(64); + + /*write the end code*/ + addHuffmanSymbol(bp, out, HuffmanTree_getCode(&tree_ll, 256), HuffmanTree_getLength(&tree_ll, 256)); + + break; /*end of error-while*/ + } + + /*cleanup*/ + uivector_cleanup(&lz77_encoded); + HuffmanTree_cleanup(&tree_ll); + HuffmanTree_cleanup(&tree_d); + HuffmanTree_cleanup(&tree_cl); + uivector_cleanup(&frequencies_ll); + uivector_cleanup(&frequencies_d); + uivector_cleanup(&frequencies_cl); + uivector_cleanup(&bitlen_lld_e); + uivector_cleanup(&bitlen_lld); + uivector_cleanup(&bitlen_cl); + + return error; +} + +static unsigned deflateFixed(ucvector* out, size_t* bp, Hash* hash, + const unsigned char* data, + size_t datapos, size_t dataend, + const LodePNGCompressSettings* settings, unsigned final) +{ + HuffmanTree tree_ll; /*tree for literal values and length codes*/ + HuffmanTree tree_d; /*tree for distance codes*/ + + unsigned BFINAL = final; + unsigned error = 0; + size_t i; + + HuffmanTree_init(&tree_ll); + HuffmanTree_init(&tree_d); + + generateFixedLitLenTree(&tree_ll); + generateFixedDistanceTree(&tree_d); + + addBitToStream(bp, out, BFINAL); + addBitToStream(bp, out, 1); /*first bit of BTYPE*/ + addBitToStream(bp, out, 0); /*second bit of BTYPE*/ + + if(settings->use_lz77) /*LZ77 encoded*/ + { + uivector lz77_encoded; + uivector_init(&lz77_encoded); + error = encodeLZ77(&lz77_encoded, hash, data, datapos, dataend, settings->windowsize, + settings->minmatch, settings->nicematch, settings->lazymatching); + if(!error) writeLZ77data(bp, out, &lz77_encoded, &tree_ll, &tree_d); + uivector_cleanup(&lz77_encoded); + } + else /*no LZ77, but still will be Huffman compressed*/ + { + for(i = datapos; i < dataend; i++) + { + addHuffmanSymbol(bp, out, HuffmanTree_getCode(&tree_ll, data[i]), HuffmanTree_getLength(&tree_ll, data[i])); + } + } + /*add END code*/ + if(!error) addHuffmanSymbol(bp, out, HuffmanTree_getCode(&tree_ll, 256), HuffmanTree_getLength(&tree_ll, 256)); + + /*cleanup*/ + HuffmanTree_cleanup(&tree_ll); + HuffmanTree_cleanup(&tree_d); + + return error; +} + +static unsigned lodepng_deflatev(ucvector* out, const unsigned char* in, size_t insize, + const LodePNGCompressSettings* settings) +{ + unsigned error = 0; + size_t i, blocksize, numdeflateblocks; + size_t bp = 0; /*the bit pointer*/ + Hash hash; + + if(settings->btype > 2) return 61; + else if(settings->btype == 0) return deflateNoCompression(out, in, insize); + else if(settings->btype == 1) blocksize = insize; + else /*if(settings->btype == 2)*/ + { + blocksize = insize / 8 + 8; + if(blocksize < 65535) blocksize = 65535; + } + + numdeflateblocks = (insize + blocksize - 1) / blocksize; + if(numdeflateblocks == 0) numdeflateblocks = 1; + + error = hash_init(&hash, settings->windowsize); + if(error) return error; + + for(i = 0; i < numdeflateblocks && !error; i++) + { + unsigned final = (i == numdeflateblocks - 1); + size_t start = i * blocksize; + size_t end = start + blocksize; + if(end > insize) end = insize; + + if(settings->btype == 1) error = deflateFixed(out, &bp, &hash, in, start, end, settings, final); + else if(settings->btype == 2) error = deflateDynamic(out, &bp, &hash, in, start, end, settings, final); + } + + hash_cleanup(&hash); + + return error; +} + +unsigned lodepng_deflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGCompressSettings* settings) +{ + unsigned error; + ucvector v; + ucvector_init_buffer(&v, *out, *outsize); + error = lodepng_deflatev(&v, in, insize, settings); + *out = v.data; + *outsize = v.size; + return error; +} + +static unsigned deflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGCompressSettings* settings) +{ + if(settings->custom_deflate) + { + return settings->custom_deflate(out, outsize, in, insize, settings); + } + else + { + return lodepng_deflate(out, outsize, in, insize, settings); + } +} + +#endif /*LODEPNG_COMPILE_DECODER*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Adler32 */ +/* ////////////////////////////////////////////////////////////////////////// */ + +static unsigned update_adler32(unsigned adler, const unsigned char* data, unsigned len) +{ + unsigned s1 = adler & 0xffff; + unsigned s2 = (adler >> 16) & 0xffff; + + while(len > 0) + { + /*at least 5550 sums can be done before the sums overflow, saving a lot of module divisions*/ + unsigned amount = len > 5550 ? 5550 : len; + len -= amount; + while(amount > 0) + { + s1 += (*data++); + s2 += s1; + amount--; + } + s1 %= 65521; + s2 %= 65521; + } + + return (s2 << 16) | s1; +} + +/*Return the adler32 of the bytes data[0..len-1]*/ +static unsigned adler32(const unsigned char* data, unsigned len) +{ + return update_adler32(1L, data, len); +} + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Zlib / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_DECODER + +unsigned lodepng_zlib_decompress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGDecompressSettings* settings) +{ + unsigned error = 0; + unsigned CM, CINFO, FDICT; + + if(insize < 2) return 53; /*error, size of zlib data too small*/ + /*read information from zlib header*/ + if((in[0] * 256 + in[1]) % 31 != 0) + { + /*error: 256 * in[0] + in[1] must be a multiple of 31, the FCHECK value is supposed to be made that way*/ + return 24; + } + + CM = in[0] & 15; + CINFO = (in[0] >> 4) & 15; + /*FCHECK = in[1] & 31;*/ /*FCHECK is already tested above*/ + FDICT = (in[1] >> 5) & 1; + /*FLEVEL = (in[1] >> 6) & 3;*/ /*FLEVEL is not used here*/ + + if(CM != 8 || CINFO > 7) + { + /*error: only compression method 8: inflate with sliding window of 32k is supported by the PNG spec*/ + return 25; + } + if(FDICT != 0) + { + /*error: the specification of PNG says about the zlib stream: + "The additional flags shall not specify a preset dictionary."*/ + return 26; + } + + error = inflate(out, outsize, in + 2, insize - 2, settings); + if(error) return error; + + if(!settings->ignore_adler32) + { + unsigned ADLER32 = lodepng_read32bitInt(&in[insize - 4]); + unsigned checksum = adler32(*out, (unsigned)(*outsize)); + if(checksum != ADLER32) return 58; /*error, adler checksum not correct, data must be corrupted*/ + } + + return 0; /*no error*/ +} + +static unsigned zlib_decompress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGDecompressSettings* settings) +{ + if(settings->custom_zlib) + { + return settings->custom_zlib(out, outsize, in, insize, settings); + } + else + { + return lodepng_zlib_decompress(out, outsize, in, insize, settings); + } +} + +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER + +unsigned lodepng_zlib_compress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGCompressSettings* settings) +{ + /*initially, *out must be NULL and outsize 0, if you just give some random *out + that's pointing to a non allocated buffer, this'll crash*/ + ucvector outv; + size_t i; + unsigned error; + unsigned char* deflatedata = 0; + size_t deflatesize = 0; + + unsigned ADLER32; + /*zlib data: 1 byte CMF (CM+CINFO), 1 byte FLG, deflate data, 4 byte ADLER32 checksum of the Decompressed data*/ + unsigned CMF = 120; /*0b01111000: CM 8, CINFO 7. With CINFO 7, any window size up to 32768 can be used.*/ + unsigned FLEVEL = 0; + unsigned FDICT = 0; + unsigned CMFFLG = 256 * CMF + FDICT * 32 + FLEVEL * 64; + unsigned FCHECK = 31 - CMFFLG % 31; + CMFFLG += FCHECK; + + /*ucvector-controlled version of the output buffer, for dynamic array*/ + ucvector_init_buffer(&outv, *out, *outsize); + + ucvector_push_back(&outv, (unsigned char)(CMFFLG / 256)); + ucvector_push_back(&outv, (unsigned char)(CMFFLG % 256)); + + error = deflate(&deflatedata, &deflatesize, in, insize, settings); + + if(!error) + { + ADLER32 = adler32(in, (unsigned)insize); + for(i = 0; i < deflatesize; i++) ucvector_push_back(&outv, deflatedata[i]); + lodepng_free(deflatedata); + lodepng_add32bitInt(&outv, ADLER32); + } + + *out = outv.data; + *outsize = outv.size; + + return error; +} + +/* compress using the default or custom zlib function */ +static unsigned zlib_compress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGCompressSettings* settings) +{ + if(settings->custom_zlib) + { + return settings->custom_zlib(out, outsize, in, insize, settings); + } + else + { + return lodepng_zlib_compress(out, outsize, in, insize, settings); + } +} + +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#else /*no LODEPNG_COMPILE_ZLIB*/ + +#ifdef LODEPNG_COMPILE_DECODER +static unsigned zlib_decompress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGDecompressSettings* settings) +{ + if (!settings->custom_zlib) return 87; /*no custom zlib function provided */ + return settings->custom_zlib(out, outsize, in, insize, settings); +} +#endif /*LODEPNG_COMPILE_DECODER*/ +#ifdef LODEPNG_COMPILE_ENCODER +static unsigned zlib_compress(unsigned char** out, size_t* outsize, const unsigned char* in, + size_t insize, const LodePNGCompressSettings* settings) +{ + if (!settings->custom_zlib) return 87; /*no custom zlib function provided */ + return settings->custom_zlib(out, outsize, in, insize, settings); +} +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#endif /*LODEPNG_COMPILE_ZLIB*/ + +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_ENCODER + +/*this is a good tradeoff between speed and compression ratio*/ +#define DEFAULT_WINDOWSIZE 2048 + +void lodepng_compress_settings_init(LodePNGCompressSettings* settings) +{ + /*compress with dynamic huffman tree (not in the mathematical sense, just not the predefined one)*/ + settings->btype = 2; + settings->use_lz77 = 1; + settings->windowsize = DEFAULT_WINDOWSIZE; + settings->minmatch = 3; + settings->nicematch = 128; + settings->lazymatching = 1; + + settings->custom_zlib = 0; + settings->custom_deflate = 0; + settings->custom_context = 0; +} + +const LodePNGCompressSettings lodepng_default_compress_settings = {2, 1, DEFAULT_WINDOWSIZE, 3, 128, 1, 0, 0, 0}; + + +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#ifdef LODEPNG_COMPILE_DECODER + +void lodepng_decompress_settings_init(LodePNGDecompressSettings* settings) +{ + settings->ignore_adler32 = 0; + + settings->custom_zlib = 0; + settings->custom_inflate = 0; + settings->custom_context = 0; +} + +const LodePNGDecompressSettings lodepng_default_decompress_settings = {0, 0, 0, 0}; + +#endif /*LODEPNG_COMPILE_DECODER*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* // End of Zlib related code. Begin of PNG related code. // */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_PNG + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / CRC32 / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/* CRC polynomial: 0xedb88320 */ +static unsigned lodepng_crc32_table[256] = { + 0u, 1996959894u, 3993919788u, 2567524794u, 124634137u, 1886057615u, 3915621685u, 2657392035u, + 249268274u, 2044508324u, 3772115230u, 2547177864u, 162941995u, 2125561021u, 3887607047u, 2428444049u, + 498536548u, 1789927666u, 4089016648u, 2227061214u, 450548861u, 1843258603u, 4107580753u, 2211677639u, + 325883990u, 1684777152u, 4251122042u, 2321926636u, 335633487u, 1661365465u, 4195302755u, 2366115317u, + 997073096u, 1281953886u, 3579855332u, 2724688242u, 1006888145u, 1258607687u, 3524101629u, 2768942443u, + 901097722u, 1119000684u, 3686517206u, 2898065728u, 853044451u, 1172266101u, 3705015759u, 2882616665u, + 651767980u, 1373503546u, 3369554304u, 3218104598u, 565507253u, 1454621731u, 3485111705u, 3099436303u, + 671266974u, 1594198024u, 3322730930u, 2970347812u, 795835527u, 1483230225u, 3244367275u, 3060149565u, + 1994146192u, 31158534u, 2563907772u, 4023717930u, 1907459465u, 112637215u, 2680153253u, 3904427059u, + 2013776290u, 251722036u, 2517215374u, 3775830040u, 2137656763u, 141376813u, 2439277719u, 3865271297u, + 1802195444u, 476864866u, 2238001368u, 4066508878u, 1812370925u, 453092731u, 2181625025u, 4111451223u, + 1706088902u, 314042704u, 2344532202u, 4240017532u, 1658658271u, 366619977u, 2362670323u, 4224994405u, + 1303535960u, 984961486u, 2747007092u, 3569037538u, 1256170817u, 1037604311u, 2765210733u, 3554079995u, + 1131014506u, 879679996u, 2909243462u, 3663771856u, 1141124467u, 855842277u, 2852801631u, 3708648649u, + 1342533948u, 654459306u, 3188396048u, 3373015174u, 1466479909u, 544179635u, 3110523913u, 3462522015u, + 1591671054u, 702138776u, 2966460450u, 3352799412u, 1504918807u, 783551873u, 3082640443u, 3233442989u, + 3988292384u, 2596254646u, 62317068u, 1957810842u, 3939845945u, 2647816111u, 81470997u, 1943803523u, + 3814918930u, 2489596804u, 225274430u, 2053790376u, 3826175755u, 2466906013u, 167816743u, 2097651377u, + 4027552580u, 2265490386u, 503444072u, 1762050814u, 4150417245u, 2154129355u, 426522225u, 1852507879u, + 4275313526u, 2312317920u, 282753626u, 1742555852u, 4189708143u, 2394877945u, 397917763u, 1622183637u, + 3604390888u, 2714866558u, 953729732u, 1340076626u, 3518719985u, 2797360999u, 1068828381u, 1219638859u, + 3624741850u, 2936675148u, 906185462u, 1090812512u, 3747672003u, 2825379669u, 829329135u, 1181335161u, + 3412177804u, 3160834842u, 628085408u, 1382605366u, 3423369109u, 3138078467u, 570562233u, 1426400815u, + 3317316542u, 2998733608u, 733239954u, 1555261956u, 3268935591u, 3050360625u, 752459403u, 1541320221u, + 2607071920u, 3965973030u, 1969922972u, 40735498u, 2617837225u, 3943577151u, 1913087877u, 83908371u, + 2512341634u, 3803740692u, 2075208622u, 213261112u, 2463272603u, 3855990285u, 2094854071u, 198958881u, + 2262029012u, 4057260610u, 1759359992u, 534414190u, 2176718541u, 4139329115u, 1873836001u, 414664567u, + 2282248934u, 4279200368u, 1711684554u, 285281116u, 2405801727u, 4167216745u, 1634467795u, 376229701u, + 2685067896u, 3608007406u, 1308918612u, 956543938u, 2808555105u, 3495958263u, 1231636301u, 1047427035u, + 2932959818u, 3654703836u, 1088359270u, 936918000u, 2847714899u, 3736837829u, 1202900863u, 817233897u, + 3183342108u, 3401237130u, 1404277552u, 615818150u, 3134207493u, 3453421203u, 1423857449u, 601450431u, + 3009837614u, 3294710456u, 1567103746u, 711928724u, 3020668471u, 3272380065u, 1510334235u, 755167117u +}; + +/*Return the CRC of the bytes buf[0..len-1].*/ +unsigned lodepng_crc32(const unsigned char* buf, size_t len) +{ + unsigned c = 0xffffffffL; + size_t n; + + for(n = 0; n < len; n++) + { + c = lodepng_crc32_table[(c ^ buf[n]) & 0xff] ^ (c >> 8); + } + return c ^ 0xffffffffL; +} + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Reading and writing single bits and bytes from/to stream for LodePNG / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +static unsigned char readBitFromReversedStream(size_t* bitpointer, const unsigned char* bitstream) +{ + unsigned char result = (unsigned char)((bitstream[(*bitpointer) >> 3] >> (7 - ((*bitpointer) & 0x7))) & 1); + (*bitpointer)++; + return result; +} + +static unsigned readBitsFromReversedStream(size_t* bitpointer, const unsigned char* bitstream, size_t nbits) +{ + unsigned result = 0; + size_t i; + for ( i = 1; i <= nbits; i++ ) + { + result += (unsigned)readBitFromReversedStream( bitpointer, bitstream ) << (nbits - i); + } + return result; +} + +#ifdef LODEPNG_COMPILE_DECODER +static void setBitOfReversedStream0(size_t* bitpointer, unsigned char* bitstream, unsigned char bit) +{ + /*the current bit in bitstream must be 0 for this to work*/ + if(bit) + { + /*earlier bit of huffman code is in a lesser significant bit of an earlier byte*/ + bitstream[(*bitpointer) >> 3] |= (bit << (7 - ((*bitpointer) & 0x7))); + } + (*bitpointer)++; +} +#endif /*LODEPNG_COMPILE_DECODER*/ + +static void setBitOfReversedStream(size_t* bitpointer, unsigned char* bitstream, unsigned char bit) +{ + /*the current bit in bitstream may be 0 or 1 for this to work*/ + if(bit == 0) bitstream[(*bitpointer) >> 3] &= (unsigned char)(~(1 << (7 - ((*bitpointer) & 0x7)))); + else bitstream[(*bitpointer) >> 3] |= (1 << (7 - ((*bitpointer) & 0x7))); + (*bitpointer)++; +} + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / PNG chunks / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +unsigned lodepng_chunk_length(const unsigned char* chunk) +{ + return lodepng_read32bitInt(&chunk[0]); +} + +void lodepng_chunk_type(char type[5], const unsigned char* chunk) +{ + unsigned i; + for(i = 0; i < 4; i++) type[i] = (char)chunk[4 + i]; + type[4] = 0; /*null termination char*/ +} + +unsigned char lodepng_chunk_type_equals(const unsigned char* chunk, const char* type) +{ + if(strlen(type) != 4) return 0; + return (chunk[4] == type[0] && chunk[5] == type[1] && chunk[6] == type[2] && chunk[7] == type[3]); +} + +unsigned char lodepng_chunk_ancillary(const unsigned char* chunk) +{ + return((chunk[4] & 32) != 0); +} + +unsigned char lodepng_chunk_private(const unsigned char* chunk) +{ + return((chunk[6] & 32) != 0); +} + +unsigned char lodepng_chunk_safetocopy(const unsigned char* chunk) +{ + return((chunk[7] & 32) != 0); +} + +unsigned char* lodepng_chunk_data(unsigned char* chunk) +{ + return &chunk[8]; +} + +const unsigned char* lodepng_chunk_data_const(const unsigned char* chunk) +{ + return &chunk[8]; +} + +unsigned lodepng_chunk_check_crc(const unsigned char* chunk) +{ + unsigned length = lodepng_chunk_length(chunk); + unsigned CRC = lodepng_read32bitInt(&chunk[length + 8]); + /*the CRC is taken of the data and the 4 chunk type letters, not the length*/ + unsigned checksum = lodepng_crc32(&chunk[4], length + 4); + if(CRC != checksum) return 1; + else return 0; +} + +void lodepng_chunk_generate_crc(unsigned char* chunk) +{ + unsigned length = lodepng_chunk_length(chunk); + unsigned CRC = lodepng_crc32(&chunk[4], length + 4); + lodepng_set32bitInt(chunk + 8 + length, CRC); +} + +unsigned char* lodepng_chunk_next(unsigned char* chunk) +{ + unsigned total_chunk_length = lodepng_chunk_length(chunk) + 12; + return &chunk[total_chunk_length]; +} + +const unsigned char* lodepng_chunk_next_const(const unsigned char* chunk) +{ + unsigned total_chunk_length = lodepng_chunk_length(chunk) + 12; + return &chunk[total_chunk_length]; +} + +unsigned lodepng_chunk_append(unsigned char** out, size_t* outlength, const unsigned char* chunk) +{ + unsigned i; + unsigned total_chunk_length = lodepng_chunk_length(chunk) + 12; + unsigned char *chunk_start, *new_buffer; + size_t new_length = (*outlength) + total_chunk_length; + if(new_length < total_chunk_length || new_length < (*outlength)) return 77; /*integer overflow happened*/ + + new_buffer = (unsigned char*)lodepng_realloc(*out, new_length); + if(!new_buffer) return 83; /*alloc fail*/ + (*out) = new_buffer; + (*outlength) = new_length; + chunk_start = &(*out)[new_length - total_chunk_length]; + + for(i = 0; i < total_chunk_length; i++) chunk_start[i] = chunk[i]; + + return 0; +} + +unsigned lodepng_chunk_create(unsigned char** out, size_t* outlength, unsigned length, + const char* type, const unsigned char* data) +{ + unsigned i; + unsigned char *chunk, *new_buffer; + size_t new_length = (*outlength) + length + 12; + if(new_length < length + 12 || new_length < (*outlength)) return 77; /*integer overflow happened*/ + new_buffer = (unsigned char*)lodepng_realloc(*out, new_length); + if(!new_buffer) return 83; /*alloc fail*/ + (*out) = new_buffer; + (*outlength) = new_length; + chunk = &(*out)[(*outlength) - length - 12]; + + /*1: length*/ + lodepng_set32bitInt(chunk, (unsigned)length); + + /*2: chunk name (4 letters)*/ + chunk[4] = (unsigned char)type[0]; + chunk[5] = (unsigned char)type[1]; + chunk[6] = (unsigned char)type[2]; + chunk[7] = (unsigned char)type[3]; + + /*3: the data*/ + for(i = 0; i < length; i++) chunk[8 + i] = data[i]; + + /*4: CRC (of the chunkname characters and the data)*/ + lodepng_chunk_generate_crc(chunk); + + return 0; +} + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / Color types and such / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/*return type is a LodePNG error code*/ +static unsigned checkColorValidity(LodePNGColorType colortype, unsigned bd) /*bd = bitdepth*/ +{ + switch(colortype) + { + case 0: if(!(bd == 1 || bd == 2 || bd == 4 || bd == 8 || bd == 16)) return 37; break; /*grey*/ + case 2: if(!( bd == 8 || bd == 16)) return 37; break; /*RGB*/ + case 3: if(!(bd == 1 || bd == 2 || bd == 4 || bd == 8 )) return 37; break; /*palette*/ + case 4: if(!( bd == 8 || bd == 16)) return 37; break; /*grey + alpha*/ + case 6: if(!( bd == 8 || bd == 16)) return 37; break; /*RGBA*/ + default: return 31; + } + return 0; /*allowed color type / bits combination*/ +} + +static unsigned getNumColorChannels(LodePNGColorType colortype) +{ + switch(colortype) + { + case 0: return 1; /*grey*/ + case 2: return 3; /*RGB*/ + case 3: return 1; /*palette*/ + case 4: return 2; /*grey + alpha*/ + case 6: return 4; /*RGBA*/ + } + return 0; /*unexisting color type*/ +} + +static unsigned lodepng_get_bpp_lct(LodePNGColorType colortype, unsigned bitdepth) +{ + /*bits per pixel is amount of channels * bits per channel*/ + return getNumColorChannels(colortype) * bitdepth; +} + +/* ////////////////////////////////////////////////////////////////////////// */ + +void lodepng_color_mode_init(LodePNGColorMode* info) +{ + info->key_defined = 0; + info->key_r = info->key_g = info->key_b = 0; + info->colortype = LCT_RGBA; + info->bitdepth = 8; + info->palette = 0; + info->palettesize = 0; +} + +void lodepng_color_mode_cleanup(LodePNGColorMode* info) +{ + lodepng_palette_clear(info); +} + +unsigned lodepng_color_mode_copy(LodePNGColorMode* dest, const LodePNGColorMode* source) +{ + size_t i; + lodepng_color_mode_cleanup(dest); + *dest = *source; + if(source->palette) + { + dest->palette = (unsigned char*)lodepng_malloc(1024); + if(!dest->palette && source->palettesize) return 83; /*alloc fail*/ + for(i = 0; i < source->palettesize * 4; i++) dest->palette[i] = source->palette[i]; + } + return 0; +} + +static int lodepng_color_mode_equal(const LodePNGColorMode* a, const LodePNGColorMode* b) +{ + size_t i; + if(a->colortype != b->colortype) return 0; + if(a->bitdepth != b->bitdepth) return 0; + if(a->key_defined != b->key_defined) return 0; + if(a->key_defined) + { + if(a->key_r != b->key_r) return 0; + if(a->key_g != b->key_g) return 0; + if(a->key_b != b->key_b) return 0; + } + if(a->palettesize != b->palettesize) return 0; + for(i = 0; i < a->palettesize * 4; i++) + { + if(a->palette[i] != b->palette[i]) return 0; + } + return 1; +} + +void lodepng_palette_clear(LodePNGColorMode* info) +{ + if(info->palette) lodepng_free(info->palette); + info->palette = 0; + info->palettesize = 0; +} + +unsigned lodepng_palette_add(LodePNGColorMode* info, + unsigned char r, unsigned char g, unsigned char b, unsigned char a) +{ + unsigned char* data; + /*the same resize technique as C++ std::vectors is used, and here it's made so that for a palette with + the max of 256 colors, it'll have the exact alloc size*/ + if(!info->palette) /*allocate palette if empty*/ + { + /*room for 256 colors with 4 bytes each*/ + data = (unsigned char*)lodepng_realloc(info->palette, 1024); + if(!data) return 83; /*alloc fail*/ + else info->palette = data; + } + info->palette[4 * info->palettesize + 0] = r; + info->palette[4 * info->palettesize + 1] = g; + info->palette[4 * info->palettesize + 2] = b; + info->palette[4 * info->palettesize + 3] = a; + info->palettesize++; + return 0; +} + +unsigned lodepng_get_bpp(const LodePNGColorMode* info) +{ + /*calculate bits per pixel out of colortype and bitdepth*/ + return lodepng_get_bpp_lct(info->colortype, info->bitdepth); +} + +unsigned lodepng_get_channels(const LodePNGColorMode* info) +{ + return getNumColorChannels(info->colortype); +} + +unsigned lodepng_is_greyscale_type(const LodePNGColorMode* info) +{ + return info->colortype == LCT_GREY || info->colortype == LCT_GREY_ALPHA; +} + +unsigned lodepng_is_alpha_type(const LodePNGColorMode* info) +{ + return (info->colortype & 4) != 0; /*4 or 6*/ +} + +unsigned lodepng_is_palette_type(const LodePNGColorMode* info) +{ + return info->colortype == LCT_PALETTE; +} + +unsigned lodepng_has_palette_alpha(const LodePNGColorMode* info) +{ + size_t i; + for(i = 0; i < info->palettesize; i++) + { + if(info->palette[i * 4 + 3] < 255) return 1; + } + return 0; +} + +unsigned lodepng_can_have_alpha(const LodePNGColorMode* info) +{ + return info->key_defined + || lodepng_is_alpha_type(info) + || lodepng_has_palette_alpha(info); +} + +size_t lodepng_get_raw_size(unsigned w, unsigned h, const LodePNGColorMode* color) +{ + return (w * h * lodepng_get_bpp(color) + 7) / 8; +} + +size_t lodepng_get_raw_size_lct(unsigned w, unsigned h, LodePNGColorType colortype, unsigned bitdepth) +{ + return (w * h * lodepng_get_bpp_lct(colortype, bitdepth) + 7) / 8; +} + + +#ifdef LODEPNG_COMPILE_PNG +#ifdef LODEPNG_COMPILE_DECODER +/*in an idat chunk, each scanline is a multiple of 8 bits, unlike the lodepng output buffer*/ +static size_t lodepng_get_raw_size_idat(unsigned w, unsigned h, const LodePNGColorMode* color) +{ + return h * ((w * lodepng_get_bpp(color) + 7) / 8); +} +#endif /*LODEPNG_COMPILE_DECODER*/ +#endif /*LODEPNG_COMPILE_PNG*/ + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + +static void LodePNGUnknownChunks_init(LodePNGInfo* info) +{ + unsigned i; + for(i = 0; i < 3; i++) info->unknown_chunks_data[i] = 0; + for(i = 0; i < 3; i++) info->unknown_chunks_size[i] = 0; +} + +static void LodePNGUnknownChunks_cleanup(LodePNGInfo* info) +{ + unsigned i; + for(i = 0; i < 3; i++) lodepng_free(info->unknown_chunks_data[i]); +} + +static unsigned LodePNGUnknownChunks_copy(LodePNGInfo* dest, const LodePNGInfo* src) +{ + unsigned i; + + LodePNGUnknownChunks_cleanup(dest); + + for(i = 0; i < 3; i++) + { + size_t j; + dest->unknown_chunks_size[i] = src->unknown_chunks_size[i]; + dest->unknown_chunks_data[i] = (unsigned char*)lodepng_malloc(src->unknown_chunks_size[i]); + if(!dest->unknown_chunks_data[i] && dest->unknown_chunks_size[i]) return 83; /*alloc fail*/ + for(j = 0; j < src->unknown_chunks_size[i]; j++) + { + dest->unknown_chunks_data[i][j] = src->unknown_chunks_data[i][j]; + } + } + + return 0; +} + +/******************************************************************************/ + +static void LodePNGText_init(LodePNGInfo* info) +{ + info->text_num = 0; + info->text_keys = NULL; + info->text_strings = NULL; +} + +static void LodePNGText_cleanup(LodePNGInfo* info) +{ + size_t i; + for(i = 0; i < info->text_num; i++) + { + string_cleanup(&info->text_keys[i]); + string_cleanup(&info->text_strings[i]); + } + lodepng_free(info->text_keys); + lodepng_free(info->text_strings); +} + +static unsigned LodePNGText_copy(LodePNGInfo* dest, const LodePNGInfo* source) +{ + size_t i = 0; + dest->text_keys = 0; + dest->text_strings = 0; + dest->text_num = 0; + for(i = 0; i < source->text_num; i++) + { + CERROR_TRY_RETURN(lodepng_add_text(dest, source->text_keys[i], source->text_strings[i])); + } + return 0; +} + +void lodepng_clear_text(LodePNGInfo* info) +{ + LodePNGText_cleanup(info); +} + +unsigned lodepng_add_text(LodePNGInfo* info, const char* key, const char* str) +{ + char** new_keys = (char**)(lodepng_realloc(info->text_keys, sizeof(char*) * (info->text_num + 1))); + char** new_strings = (char**)(lodepng_realloc(info->text_strings, sizeof(char*) * (info->text_num + 1))); + if(!new_keys || !new_strings) + { + lodepng_free(new_keys); + lodepng_free(new_strings); + return 83; /*alloc fail*/ + } + + info->text_num++; + info->text_keys = new_keys; + info->text_strings = new_strings; + + string_init(&info->text_keys[info->text_num - 1]); + string_set(&info->text_keys[info->text_num - 1], key); + + string_init(&info->text_strings[info->text_num - 1]); + string_set(&info->text_strings[info->text_num - 1], str); + + return 0; +} + +/******************************************************************************/ + +static void LodePNGIText_init(LodePNGInfo* info) +{ + info->itext_num = 0; + info->itext_keys = NULL; + info->itext_langtags = NULL; + info->itext_transkeys = NULL; + info->itext_strings = NULL; +} + +static void LodePNGIText_cleanup(LodePNGInfo* info) +{ + size_t i; + for(i = 0; i < info->itext_num; i++) + { + string_cleanup(&info->itext_keys[i]); + string_cleanup(&info->itext_langtags[i]); + string_cleanup(&info->itext_transkeys[i]); + string_cleanup(&info->itext_strings[i]); + } + lodepng_free(info->itext_keys); + lodepng_free(info->itext_langtags); + lodepng_free(info->itext_transkeys); + lodepng_free(info->itext_strings); +} + +static unsigned LodePNGIText_copy(LodePNGInfo* dest, const LodePNGInfo* source) +{ + size_t i = 0; + dest->itext_keys = 0; + dest->itext_langtags = 0; + dest->itext_transkeys = 0; + dest->itext_strings = 0; + dest->itext_num = 0; + for(i = 0; i < source->itext_num; i++) + { + CERROR_TRY_RETURN(lodepng_add_itext(dest, source->itext_keys[i], source->itext_langtags[i], + source->itext_transkeys[i], source->itext_strings[i])); + } + return 0; +} + +void lodepng_clear_itext(LodePNGInfo* info) +{ + LodePNGIText_cleanup(info); +} + +unsigned lodepng_add_itext(LodePNGInfo* info, const char* key, const char* langtag, + const char* transkey, const char* str) +{ + char** new_keys = (char**)(lodepng_realloc(info->itext_keys, sizeof(char*) * (info->itext_num + 1))); + char** new_langtags = (char**)(lodepng_realloc(info->itext_langtags, sizeof(char*) * (info->itext_num + 1))); + char** new_transkeys = (char**)(lodepng_realloc(info->itext_transkeys, sizeof(char*) * (info->itext_num + 1))); + char** new_strings = (char**)(lodepng_realloc(info->itext_strings, sizeof(char*) * (info->itext_num + 1))); + if(!new_keys || !new_langtags || !new_transkeys || !new_strings) + { + lodepng_free(new_keys); + lodepng_free(new_langtags); + lodepng_free(new_transkeys); + lodepng_free(new_strings); + return 83; /*alloc fail*/ + } + + info->itext_num++; + info->itext_keys = new_keys; + info->itext_langtags = new_langtags; + info->itext_transkeys = new_transkeys; + info->itext_strings = new_strings; + + string_init(&info->itext_keys[info->itext_num - 1]); + string_set(&info->itext_keys[info->itext_num - 1], key); + + string_init(&info->itext_langtags[info->itext_num - 1]); + string_set(&info->itext_langtags[info->itext_num - 1], langtag); + + string_init(&info->itext_transkeys[info->itext_num - 1]); + string_set(&info->itext_transkeys[info->itext_num - 1], transkey); + + string_init(&info->itext_strings[info->itext_num - 1]); + string_set(&info->itext_strings[info->itext_num - 1], str); + + return 0; +} +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +void lodepng_info_init(LodePNGInfo* info) +{ + lodepng_color_mode_init(&info->color); + info->interlace_method = 0; + info->compression_method = 0; + info->filter_method = 0; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + info->background_defined = 0; + info->background_r = info->background_g = info->background_b = 0; + + LodePNGText_init(info); + LodePNGIText_init(info); + + info->time_defined = 0; + info->phys_defined = 0; + + LodePNGUnknownChunks_init(info); +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} + +void lodepng_info_cleanup(LodePNGInfo* info) +{ + lodepng_color_mode_cleanup(&info->color); +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + LodePNGText_cleanup(info); + LodePNGIText_cleanup(info); + + LodePNGUnknownChunks_cleanup(info); +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} + +unsigned lodepng_info_copy(LodePNGInfo* dest, const LodePNGInfo* source) +{ + lodepng_info_cleanup(dest); + *dest = *source; + lodepng_color_mode_init(&dest->color); + CERROR_TRY_RETURN(lodepng_color_mode_copy(&dest->color, &source->color)); + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + CERROR_TRY_RETURN(LodePNGText_copy(dest, source)); + CERROR_TRY_RETURN(LodePNGIText_copy(dest, source)); + + LodePNGUnknownChunks_init(dest); + CERROR_TRY_RETURN(LodePNGUnknownChunks_copy(dest, source)); +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + return 0; +} + +void lodepng_info_swap(LodePNGInfo* a, LodePNGInfo* b) +{ + LodePNGInfo temp = *a; + *a = *b; + *b = temp; +} + +/* ////////////////////////////////////////////////////////////////////////// */ + +/*index: bitgroup index, bits: bitgroup size(1, 2 or 4), in: bitgroup value, out: octet array to add bits to*/ +static void addColorBits(unsigned char* out, size_t index, unsigned bits, unsigned in) +{ + unsigned m = bits == 1 ? 7 : bits == 2 ? 3 : 1; /*8 / bits - 1*/ + /*p = the partial index in the byte, e.g. with 4 palettebits it is 0 for first half or 1 for second half*/ + unsigned p = index & m; + in &= (1u << bits) - 1u; /*filter out any other bits of the input value*/ + in = in << (bits * (m - p)); + if(p == 0) out[index * bits / 8] = in; + else out[index * bits / 8] |= in; +} + +typedef struct ColorTree ColorTree; + +/* +One node of a color tree +This is the data structure used to count the number of unique colors and to get a palette +index for a color. It's like an octree, but because the alpha channel is used too, each +node has 16 instead of 8 children. +*/ +struct ColorTree +{ + ColorTree* children[16]; /*up to 16 pointers to ColorTree of next level*/ + int index; /*the payload. Only has a meaningful value if this is in the last level*/ +}; + +static void color_tree_init(ColorTree* tree) +{ + int i; + for(i = 0; i < 16; i++) tree->children[i] = 0; + tree->index = -1; +} + +static void color_tree_cleanup(ColorTree* tree) +{ + int i; + for(i = 0; i < 16; i++) + { + if(tree->children[i]) + { + color_tree_cleanup(tree->children[i]); + lodepng_free(tree->children[i]); + } + } +} + +/*returns -1 if color not present, its index otherwise*/ +static int color_tree_get(ColorTree* tree, unsigned char r, unsigned char g, unsigned char b, unsigned char a) +{ + int bit = 0; + for(bit = 0; bit < 8; bit++) + { + int i = 8 * ((r >> bit) & 1) + 4 * ((g >> bit) & 1) + 2 * ((b >> bit) & 1) + 1 * ((a >> bit) & 1); + if(!tree->children[i]) return -1; + else tree = tree->children[i]; + } + return tree ? tree->index : -1; +} + +#ifdef LODEPNG_COMPILE_ENCODER +static int color_tree_has(ColorTree* tree, unsigned char r, unsigned char g, unsigned char b, unsigned char a) +{ + return color_tree_get(tree, r, g, b, a) >= 0; +} +#endif /*LODEPNG_COMPILE_ENCODER*/ + +/*color is not allowed to already exist. +Index should be >= 0 (it's signed to be compatible with using -1 for "doesn't exist")*/ +static void color_tree_add(ColorTree* tree, + unsigned char r, unsigned char g, unsigned char b, unsigned char a, unsigned index) +{ + int bit; + for(bit = 0; bit < 8; bit++) + { + int i = 8 * ((r >> bit) & 1) + 4 * ((g >> bit) & 1) + 2 * ((b >> bit) & 1) + 1 * ((a >> bit) & 1); + if(!tree->children[i]) + { + tree->children[i] = (ColorTree*)lodepng_malloc(sizeof(ColorTree)); + color_tree_init(tree->children[i]); + } + tree = tree->children[i]; + } + tree->index = (int)index; +} + +/*put a pixel, given its RGBA color, into image of any color type*/ +static unsigned rgba8ToPixel(unsigned char* out, size_t i, + const LodePNGColorMode* mode, ColorTree* tree /*for palette*/, + unsigned char r, unsigned char g, unsigned char b, unsigned char a) +{ + if(mode->colortype == LCT_GREY) + { + unsigned char grey = r; /*((unsigned short)r + g + b) / 3*/; + if(mode->bitdepth == 8) out[i] = grey; + else if(mode->bitdepth == 16) out[i * 2 + 0] = out[i * 2 + 1] = grey; + else + { + /*take the most significant bits of grey*/ + grey = (grey >> (8 - mode->bitdepth)) & ((1 << mode->bitdepth) - 1); + addColorBits(out, i, mode->bitdepth, grey); + } + } + else if(mode->colortype == LCT_RGB) + { + if(mode->bitdepth == 8) + { + out[i * 3 + 0] = r; + out[i * 3 + 1] = g; + out[i * 3 + 2] = b; + } + else + { + out[i * 6 + 0] = out[i * 6 + 1] = r; + out[i * 6 + 2] = out[i * 6 + 3] = g; + out[i * 6 + 4] = out[i * 6 + 5] = b; + } + } + else if(mode->colortype == LCT_PALETTE) + { + int index = color_tree_get(tree, r, g, b, a); + if(index < 0) return 82; /*color not in palette*/ + if(mode->bitdepth == 8) out[i] = index; + else addColorBits(out, i, mode->bitdepth, (unsigned)index); + } + else if(mode->colortype == LCT_GREY_ALPHA) + { + unsigned char grey = r; /*((unsigned short)r + g + b) / 3*/; + if(mode->bitdepth == 8) + { + out[i * 2 + 0] = grey; + out[i * 2 + 1] = a; + } + else if(mode->bitdepth == 16) + { + out[i * 4 + 0] = out[i * 4 + 1] = grey; + out[i * 4 + 2] = out[i * 4 + 3] = a; + } + } + else if(mode->colortype == LCT_RGBA) + { + if(mode->bitdepth == 8) + { + out[i * 4 + 0] = r; + out[i * 4 + 1] = g; + out[i * 4 + 2] = b; + out[i * 4 + 3] = a; + } + else + { + out[i * 8 + 0] = out[i * 8 + 1] = r; + out[i * 8 + 2] = out[i * 8 + 3] = g; + out[i * 8 + 4] = out[i * 8 + 5] = b; + out[i * 8 + 6] = out[i * 8 + 7] = a; + } + } + + return 0; /*no error*/ +} + +/*put a pixel, given its RGBA16 color, into image of any color 16-bitdepth type*/ +static void rgba16ToPixel(unsigned char* out, size_t i, + const LodePNGColorMode* mode, + unsigned short r, unsigned short g, unsigned short b, unsigned short a) +{ + if(mode->colortype == LCT_GREY) + { + unsigned short grey = r; /*((unsigned)r + g + b) / 3*/; + out[i * 2 + 0] = (grey >> 8) & 255; + out[i * 2 + 1] = grey & 255; + } + else if(mode->colortype == LCT_RGB) + { + out[i * 6 + 0] = (r >> 8) & 255; + out[i * 6 + 1] = r & 255; + out[i * 6 + 2] = (g >> 8) & 255; + out[i * 6 + 3] = g & 255; + out[i * 6 + 4] = (b >> 8) & 255; + out[i * 6 + 5] = b & 255; + } + else if(mode->colortype == LCT_GREY_ALPHA) + { + unsigned short grey = r; /*((unsigned)r + g + b) / 3*/; + out[i * 4 + 0] = (grey >> 8) & 255; + out[i * 4 + 1] = grey & 255; + out[i * 4 + 2] = (a >> 8) & 255; + out[i * 4 + 3] = a & 255; + } + else if(mode->colortype == LCT_RGBA) + { + out[i * 8 + 0] = (r >> 8) & 255; + out[i * 8 + 1] = r & 255; + out[i * 8 + 2] = (g >> 8) & 255; + out[i * 8 + 3] = g & 255; + out[i * 8 + 4] = (b >> 8) & 255; + out[i * 8 + 5] = b & 255; + out[i * 8 + 6] = (a >> 8) & 255; + out[i * 8 + 7] = a & 255; + } +} + +/*Get RGBA8 color of pixel with index i (y * width + x) from the raw image with given color type.*/ +static void getPixelColorRGBA8(unsigned char* r, unsigned char* g, + unsigned char* b, unsigned char* a, + const unsigned char* in, size_t i, + const LodePNGColorMode* mode) +{ + if(mode->colortype == LCT_GREY) + { + if(mode->bitdepth == 8) + { + *r = *g = *b = in[i]; + if(mode->key_defined && *r == mode->key_r) *a = 0; + else *a = 255; + } + else if(mode->bitdepth == 16) + { + *r = *g = *b = in[i * 2 + 0]; + if(mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r) *a = 0; + else *a = 255; + } + else + { + unsigned highest = ((1U << mode->bitdepth) - 1U); /*highest possible value for this bit depth*/ + size_t j = i * mode->bitdepth; + unsigned value = readBitsFromReversedStream(&j, in, mode->bitdepth); + *r = *g = *b = (value * 255) / highest; + if(mode->key_defined && value == mode->key_r) *a = 0; + else *a = 255; + } + } + else if(mode->colortype == LCT_RGB) + { + if(mode->bitdepth == 8) + { + *r = in[i * 3 + 0]; *g = in[i * 3 + 1]; *b = in[i * 3 + 2]; + if(mode->key_defined && *r == mode->key_r && *g == mode->key_g && *b == mode->key_b) *a = 0; + else *a = 255; + } + else + { + *r = in[i * 6 + 0]; + *g = in[i * 6 + 2]; + *b = in[i * 6 + 4]; + if(mode->key_defined && 256U * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r + && 256U * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g + && 256U * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b) *a = 0; + else *a = 255; + } + } + else if(mode->colortype == LCT_PALETTE) + { + unsigned index; + if(mode->bitdepth == 8) index = in[i]; + else + { + size_t j = i * mode->bitdepth; + index = readBitsFromReversedStream(&j, in, mode->bitdepth); + } + + if(index >= mode->palettesize) + { + /*This is an error according to the PNG spec, but common PNG decoders make it black instead. + Done here too, slightly faster due to no error handling needed.*/ + *r = *g = *b = 0; + *a = 255; + } + else + { + *r = mode->palette[index * 4 + 0]; + *g = mode->palette[index * 4 + 1]; + *b = mode->palette[index * 4 + 2]; + *a = mode->palette[index * 4 + 3]; + } + } + else if(mode->colortype == LCT_GREY_ALPHA) + { + if(mode->bitdepth == 8) + { + *r = *g = *b = in[i * 2 + 0]; + *a = in[i * 2 + 1]; + } + else + { + *r = *g = *b = in[i * 4 + 0]; + *a = in[i * 4 + 2]; + } + } + else if(mode->colortype == LCT_RGBA) + { + if(mode->bitdepth == 8) + { + *r = in[i * 4 + 0]; + *g = in[i * 4 + 1]; + *b = in[i * 4 + 2]; + *a = in[i * 4 + 3]; + } + else + { + *r = in[i * 8 + 0]; + *g = in[i * 8 + 2]; + *b = in[i * 8 + 4]; + *a = in[i * 8 + 6]; + } + } +} + +/*Similar to getPixelColorRGBA8, but with all the for loops inside of the color +mode test cases, optimized to convert the colors much faster, when converting +to RGBA or RGB with 8 bit per cannel. buffer must be RGBA or RGB output with +enough memory, if has_alpha is true the output is RGBA. mode has the color mode +of the input buffer.*/ +static void getPixelColorsRGBA8(unsigned char* buffer, size_t numpixels, + unsigned has_alpha, const unsigned char* in, + const LodePNGColorMode* mode) +{ + unsigned num_channels = has_alpha ? 4 : 3; + size_t i; + if(mode->colortype == LCT_GREY) + { + if(mode->bitdepth == 8) + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = buffer[1] = buffer[2] = in[i]; + if(has_alpha) buffer[3] = mode->key_defined && in[i] == mode->key_r ? 0 : 255; + } + } + else if(mode->bitdepth == 16) + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = buffer[1] = buffer[2] = in[i * 2]; + if(has_alpha) buffer[3] = mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r ? 0 : 255; + } + } + else + { + unsigned highest = ((1U << mode->bitdepth) - 1U); /*highest possible value for this bit depth*/ + size_t j = 0; + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + unsigned value = readBitsFromReversedStream(&j, in, mode->bitdepth); + buffer[0] = buffer[1] = buffer[2] = (value * 255) / highest; + if(has_alpha) buffer[3] = mode->key_defined && value == mode->key_r ? 0 : 255; + } + } + } + else if(mode->colortype == LCT_RGB) + { + if(mode->bitdepth == 8) + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = in[i * 3 + 0]; + buffer[1] = in[i * 3 + 1]; + buffer[2] = in[i * 3 + 2]; + if(has_alpha) buffer[3] = mode->key_defined && buffer[0] == mode->key_r + && buffer[1]== mode->key_g && buffer[2] == mode->key_b ? 0 : 255; + } + } + else + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = in[i * 6 + 0]; + buffer[1] = in[i * 6 + 2]; + buffer[2] = in[i * 6 + 4]; + if(has_alpha) buffer[3] = mode->key_defined + && 256U * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r + && 256U * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g + && 256U * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b ? 0 : 255; + } + } + } + else if(mode->colortype == LCT_PALETTE) + { + unsigned index; + size_t j = 0; + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + if(mode->bitdepth == 8) index = in[i]; + else index = readBitsFromReversedStream(&j, in, mode->bitdepth); + + if(index >= mode->palettesize) + { + /*This is an error according to the PNG spec, but most PNG decoders make it black instead. + Done here too, slightly faster due to no error handling needed.*/ + buffer[0] = buffer[1] = buffer[2] = 0; + if(has_alpha) buffer[3] = 255; + } + else + { + buffer[0] = mode->palette[index * 4 + 0]; + buffer[1] = mode->palette[index * 4 + 1]; + buffer[2] = mode->palette[index * 4 + 2]; + if(has_alpha) buffer[3] = mode->palette[index * 4 + 3]; + } + } + } + else if(mode->colortype == LCT_GREY_ALPHA) + { + if(mode->bitdepth == 8) + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = buffer[1] = buffer[2] = in[i * 2 + 0]; + if(has_alpha) buffer[3] = in[i * 2 + 1]; + } + } + else + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = buffer[1] = buffer[2] = in[i * 4 + 0]; + if(has_alpha) buffer[3] = in[i * 4 + 2]; + } + } + } + else if(mode->colortype == LCT_RGBA) + { + if(mode->bitdepth == 8) + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = in[i * 4 + 0]; + buffer[1] = in[i * 4 + 1]; + buffer[2] = in[i * 4 + 2]; + if(has_alpha) buffer[3] = in[i * 4 + 3]; + } + } + else + { + for(i = 0; i < numpixels; i++, buffer += num_channels) + { + buffer[0] = in[i * 8 + 0]; + buffer[1] = in[i * 8 + 2]; + buffer[2] = in[i * 8 + 4]; + if(has_alpha) buffer[3] = in[i * 8 + 6]; + } + } + } +} + +/*Get RGBA16 color of pixel with index i (y * width + x) from the raw image with +given color type, but the given color type must be 16-bit itself.*/ +static void getPixelColorRGBA16(unsigned short* r, unsigned short* g, unsigned short* b, unsigned short* a, + const unsigned char* in, size_t i, const LodePNGColorMode* mode) +{ + if(mode->colortype == LCT_GREY) + { + *r = *g = *b = 256 * in[i * 2 + 0] + in[i * 2 + 1]; + if(mode->key_defined && 256U * in[i * 2 + 0] + in[i * 2 + 1] == mode->key_r) *a = 0; + else *a = 65535; + } + else if(mode->colortype == LCT_RGB) + { + *r = 256 * in[i * 6 + 0] + in[i * 6 + 1]; + *g = 256 * in[i * 6 + 2] + in[i * 6 + 3]; + *b = 256 * in[i * 6 + 4] + in[i * 6 + 5]; + if(mode->key_defined && 256U * in[i * 6 + 0] + in[i * 6 + 1] == mode->key_r + && 256U * in[i * 6 + 2] + in[i * 6 + 3] == mode->key_g + && 256U * in[i * 6 + 4] + in[i * 6 + 5] == mode->key_b) *a = 0; + else *a = 65535; + } + else if(mode->colortype == LCT_GREY_ALPHA) + { + *r = *g = *b = 256 * in[i * 4 + 0] + in[i * 4 + 1]; + *a = 256 * in[i * 4 + 2] + in[i * 4 + 3]; + } + else if(mode->colortype == LCT_RGBA) + { + *r = 256 * in[i * 8 + 0] + in[i * 8 + 1]; + *g = 256 * in[i * 8 + 2] + in[i * 8 + 3]; + *b = 256 * in[i * 8 + 4] + in[i * 8 + 5]; + *a = 256 * in[i * 8 + 6] + in[i * 8 + 7]; + } +} + +unsigned lodepng_convert(unsigned char* out, const unsigned char* in, + LodePNGColorMode* mode_out, const LodePNGColorMode* mode_in, + unsigned w, unsigned h) +{ + size_t i; + ColorTree tree; + size_t numpixels = w * h; + + if(lodepng_color_mode_equal(mode_out, mode_in)) + { + size_t numbytes = lodepng_get_raw_size(w, h, mode_in); + for(i = 0; i < numbytes; i++) out[i] = in[i]; + return 0; + } + + if(mode_out->colortype == LCT_PALETTE) + { + size_t palsize = 1u << mode_out->bitdepth; + if(mode_out->palettesize < palsize) palsize = mode_out->palettesize; + color_tree_init(&tree); + for(i = 0; i < palsize; i++) + { + unsigned char* p = &mode_out->palette[i * 4]; + color_tree_add(&tree, p[0], p[1], p[2], p[3], i); + } + } + + if(mode_in->bitdepth == 16 && mode_out->bitdepth == 16) + { + for(i = 0; i < numpixels; i++) + { + unsigned short r = 0, g = 0, b = 0, a = 0; + getPixelColorRGBA16(&r, &g, &b, &a, in, i, mode_in); + rgba16ToPixel(out, i, mode_out, r, g, b, a); + } + } + else if(mode_out->bitdepth == 8 && mode_out->colortype == LCT_RGBA) + { + getPixelColorsRGBA8(out, numpixels, 1, in, mode_in); + } + else if(mode_out->bitdepth == 8 && mode_out->colortype == LCT_RGB) + { + getPixelColorsRGBA8(out, numpixels, 0, in, mode_in); + } + else + { + unsigned char r = 0, g = 0, b = 0, a = 0; + for(i = 0; i < numpixels; i++) + { + getPixelColorRGBA8(&r, &g, &b, &a, in, i, mode_in); + rgba8ToPixel(out, i, mode_out, &tree, r, g, b, a); + } + } + + if(mode_out->colortype == LCT_PALETTE) + { + color_tree_cleanup(&tree); + } + + return 0; /*no error (this function currently never has one, but maybe OOM detection added later.)*/ +} + +#ifdef LODEPNG_COMPILE_ENCODER + +void lodepng_color_profile_init(LodePNGColorProfile* profile) +{ + profile->colored = 0; + profile->key = 0; + profile->alpha = 0; + profile->key_r = profile->key_g = profile->key_b = 0; + profile->numcolors = 0; + profile->bits = 1; +} + +/*function used for debug purposes with C++*/ +/*void printColorProfile(LodePNGColorProfile* p) +{ + std::cout << "colored: " << (int)p->colored << ", "; + std::cout << "key: " << (int)p->key << ", "; + std::cout << "key_r: " << (int)p->key_r << ", "; + std::cout << "key_g: " << (int)p->key_g << ", "; + std::cout << "key_b: " << (int)p->key_b << ", "; + std::cout << "alpha: " << (int)p->alpha << ", "; + std::cout << "numcolors: " << (int)p->numcolors << ", "; + std::cout << "bits: " << (int)p->bits << std::endl; +}*/ + +/*Returns how many bits needed to represent given value (max 8 bit)*/ +unsigned getValueRequiredBits(unsigned char value) +{ + if(value == 0 || value == 255) return 1; + /*The scaling of 2-bit and 4-bit values uses multiples of 85 and 17*/ + if(value % 17 == 0) return value % 85 == 0 ? 2 : 4; + return 8; +} + +/*profile must already have been inited with mode. +It's ok to set some parameters of profile to done already.*/ +unsigned get_color_profile(LodePNGColorProfile* profile, + const unsigned char* in, unsigned w, unsigned h, + const LodePNGColorMode* mode) +{ + unsigned error = 0; + size_t i; + ColorTree tree; + size_t numpixels = w * h; + + unsigned colored_done = lodepng_is_greyscale_type(mode) ? 1 : 0; + unsigned alpha_done = lodepng_can_have_alpha(mode) ? 0 : 1; + unsigned numcolors_done = 0; + unsigned bpp = lodepng_get_bpp(mode); + unsigned bits_done = bpp == 1 ? 1 : 0; + unsigned maxnumcolors = 257; + unsigned sixteen = 0; + if(bpp <= 8) maxnumcolors = bpp == 1 ? 2 : (bpp == 2 ? 4 : (bpp == 4 ? 16 : 256)); + + color_tree_init(&tree); + + /*Check if the 16-bit input is truly 16-bit*/ + if(mode->bitdepth == 16) + { + unsigned short r, g, b, a; + for(i = 0; i < numpixels; i++) + { + getPixelColorRGBA16(&r, &g, &b, &a, in, i, mode); + if(r % 257u != 0 || g % 257u != 0 || b % 257u != 0 || a % 257u != 0) /*first and second byte differ*/ + { + sixteen = 1; + break; + } + } + } + + if(sixteen) + { + unsigned short r = 0, g = 0, b = 0, a = 0; + profile->bits = 16; + bits_done = numcolors_done = 1; /*counting colors no longer useful, palette doesn't support 16-bit*/ + + for(i = 0; i < numpixels; i++) + { + getPixelColorRGBA16(&r, &g, &b, &a, in, i, mode); + + if(!colored_done && (r != g || r != b)) + { + profile->colored = 1; + colored_done = 1; + } + + if(!alpha_done) + { + unsigned matchkey = (r == profile->key_r && g == profile->key_g && b == profile->key_b); + if(a != 65535 && (a != 0 || (profile->key && !matchkey))) + { + profile->alpha = 1; + alpha_done = 1; + if(profile->bits < 8) profile->bits = 8; /*PNG has no alphachannel modes with less than 8-bit per channel*/ + } + else if(a == 0 && !profile->alpha && !profile->key) + { + profile->key = 1; + profile->key_r = r; + profile->key_g = g; + profile->key_b = b; + } + else if(a == 65535 && profile->key && matchkey) + { + /* Color key cannot be used if an opaque pixel also has that RGB color. */ + profile->alpha = 1; + alpha_done = 1; + } + } + + if(alpha_done && numcolors_done && colored_done && bits_done) break; + } + } + else /* < 16-bit */ + { + for(i = 0; i < numpixels; i++) + { + unsigned char r = 0, g = 0, b = 0, a = 0; + getPixelColorRGBA8(&r, &g, &b, &a, in, i, mode); + + if(!bits_done && profile->bits < 8) + { + /*only r is checked, < 8 bits is only relevant for greyscale*/ + unsigned bits = getValueRequiredBits(r); + if(bits > profile->bits) profile->bits = bits; + } + bits_done = (profile->bits >= bpp); + + if(!colored_done && (r != g || r != b)) + { + profile->colored = 1; + colored_done = 1; + if(profile->bits < 8) profile->bits = 8; /*PNG has no colored modes with less than 8-bit per channel*/ + } + + if(!alpha_done) + { + unsigned matchkey = (r == profile->key_r && g == profile->key_g && b == profile->key_b); + if(a != 255 && (a != 0 || (profile->key && !matchkey))) + { + profile->alpha = 1; + alpha_done = 1; + if(profile->bits < 8) profile->bits = 8; /*PNG has no alphachannel modes with less than 8-bit per channel*/ + } + else if(a == 0 && !profile->alpha && !profile->key) + { + profile->key = 1; + profile->key_r = r; + profile->key_g = g; + profile->key_b = b; + } + else if(a == 255 && profile->key && matchkey) + { + /* Color key cannot be used if an opaque pixel also has that RGB color. */ + profile->alpha = 1; + alpha_done = 1; + if(profile->bits < 8) profile->bits = 8; /*PNG has no alphachannel modes with less than 8-bit per channel*/ + } + } + + if(!numcolors_done) + { + if(!color_tree_has(&tree, r, g, b, a)) + { + color_tree_add(&tree, r, g, b, a, profile->numcolors); + if(profile->numcolors < 256) + { + unsigned char* p = profile->palette; + unsigned n = profile->numcolors; + p[n * 4 + 0] = r; + p[n * 4 + 1] = g; + p[n * 4 + 2] = b; + p[n * 4 + 3] = a; + } + profile->numcolors++; + numcolors_done = profile->numcolors >= maxnumcolors; + } + } + + if(alpha_done && numcolors_done && colored_done && bits_done) break; + } + + /*make the profile's key always 16-bit for consistency - repeat each byte twice*/ + profile->key_r *= 257; + profile->key_g *= 257; + profile->key_b *= 257; + } + + color_tree_cleanup(&tree); + return error; +} + +/*Automatically chooses color type that gives smallest amount of bits in the +output image, e.g. grey if there are only greyscale pixels, palette if there +are less than 256 colors, ... +Updates values of mode with a potentially smaller color model. mode_out should +contain the user chosen color model, but will be overwritten with the new chosen one.*/ +unsigned lodepng_auto_choose_color(LodePNGColorMode* mode_out, + const unsigned char* image, unsigned w, unsigned h, + const LodePNGColorMode* mode_in) +{ + LodePNGColorProfile prof; + unsigned error = 0; + unsigned i, n, palettebits, grey_ok, palette_ok; + + lodepng_color_profile_init(&prof); + error = get_color_profile(&prof, image, w, h, mode_in); + if(error) return error; + mode_out->key_defined = 0; + + if(prof.key && w * h <= 16) prof.alpha = 1; /*too few pixels to justify tRNS chunk overhead*/ + grey_ok = !prof.colored && !prof.alpha; /*grey without alpha, with potentially low bits*/ + n = prof.numcolors; + palettebits = n <= 2 ? 1 : (n <= 4 ? 2 : (n <= 16 ? 4 : 8)); + palette_ok = n <= 256 && (n * 2 < w * h) && prof.bits <= 8; + if(w * h < n * 2) palette_ok = 0; /*don't add palette overhead if image has only a few pixels*/ + if(grey_ok && prof.bits <= palettebits) palette_ok = 0; /*grey is less overhead*/ + + if(palette_ok) + { + unsigned char* p = prof.palette; + lodepng_palette_clear(mode_out); /*remove potential earlier palette*/ + for(i = 0; i < prof.numcolors; i++) + { + error = lodepng_palette_add(mode_out, p[i * 4 + 0], p[i * 4 + 1], p[i * 4 + 2], p[i * 4 + 3]); + if(error) break; + } + + mode_out->colortype = LCT_PALETTE; + mode_out->bitdepth = palettebits; + + if(mode_in->colortype == LCT_PALETTE && mode_in->palettesize >= mode_out->palettesize + && mode_in->bitdepth == mode_out->bitdepth) + { + /*If input should have same palette colors, keep original to preserve its order and prevent conversion*/ + lodepng_color_mode_cleanup(mode_out); + lodepng_color_mode_copy(mode_out, mode_in); + } + } + else /*8-bit or 16-bit per channel*/ + { + mode_out->bitdepth = prof.bits; + mode_out->colortype = prof.alpha ? (prof.colored ? LCT_RGBA : LCT_GREY_ALPHA) + : (prof.colored ? LCT_RGB : LCT_GREY); + + if(prof.key && !prof.alpha) + { + unsigned mask = (1u << mode_out->bitdepth) - 1u; /*profile always uses 16-bit, mask converts it*/ + mode_out->key_r = prof.key_r & mask; + mode_out->key_g = prof.key_g & mask; + mode_out->key_b = prof.key_b & mask; + mode_out->key_defined = 1; + } + } + + return error; +} + +#endif /* #ifdef LODEPNG_COMPILE_ENCODER */ + +/* +Paeth predicter, used by PNG filter type 4 +The parameters are of type short, but should come from unsigned chars, the shorts +are only needed to make the paeth calculation correct. +*/ +static unsigned char paethPredictor(short a, short b, short c) +{ + short pa = abs(b - c); + short pb = abs(a - c); + short pc = abs(a + b - c - c); + + if(pc < pa && pc < pb) return (unsigned char)c; + else if(pb < pa) return (unsigned char)b; + else return (unsigned char)a; +} + +/*shared values used by multiple Adam7 related functions*/ + +static const unsigned ADAM7_IX[7] = { 0, 4, 0, 2, 0, 1, 0 }; /*x start values*/ +static const unsigned ADAM7_IY[7] = { 0, 0, 4, 0, 2, 0, 1 }; /*y start values*/ +static const unsigned ADAM7_DX[7] = { 8, 8, 4, 4, 2, 2, 1 }; /*x delta values*/ +static const unsigned ADAM7_DY[7] = { 8, 8, 8, 4, 4, 2, 2 }; /*y delta values*/ + +/* +Outputs various dimensions and positions in the image related to the Adam7 reduced images. +passw: output containing the width of the 7 passes +passh: output containing the height of the 7 passes +filter_passstart: output containing the index of the start and end of each + reduced image with filter bytes +padded_passstart output containing the index of the start and end of each + reduced image when without filter bytes but with padded scanlines +passstart: output containing the index of the start and end of each reduced + image without padding between scanlines, but still padding between the images +w, h: width and height of non-interlaced image +bpp: bits per pixel +"padded" is only relevant if bpp is less than 8 and a scanline or image does not + end at a full byte +*/ +static void Adam7_getpassvalues(unsigned passw[7], unsigned passh[7], size_t filter_passstart[8], + size_t padded_passstart[8], size_t passstart[8], unsigned w, unsigned h, unsigned bpp) +{ + /*the passstart values have 8 values: the 8th one indicates the byte after the end of the 7th (= last) pass*/ + unsigned i; + + /*calculate width and height in pixels of each pass*/ + for(i = 0; i < 7; i++) + { + passw[i] = (w + ADAM7_DX[i] - ADAM7_IX[i] - 1) / ADAM7_DX[i]; + passh[i] = (h + ADAM7_DY[i] - ADAM7_IY[i] - 1) / ADAM7_DY[i]; + if(passw[i] == 0) passh[i] = 0; + if(passh[i] == 0) passw[i] = 0; + } + + filter_passstart[0] = padded_passstart[0] = passstart[0] = 0; + for(i = 0; i < 7; i++) + { + /*if passw[i] is 0, it's 0 bytes, not 1 (no filtertype-byte)*/ + filter_passstart[i + 1] = filter_passstart[i] + + ((passw[i] && passh[i]) ? passh[i] * (1 + (passw[i] * bpp + 7) / 8) : 0); + /*bits padded if needed to fill full byte at end of each scanline*/ + padded_passstart[i + 1] = padded_passstart[i] + passh[i] * ((passw[i] * bpp + 7) / 8); + /*only padded at end of reduced image*/ + passstart[i + 1] = passstart[i] + (passh[i] * passw[i] * bpp + 7) / 8; + } +} + +#ifdef LODEPNG_COMPILE_DECODER + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / PNG Decoder / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/*read the information from the header and store it in the LodePNGInfo. return value is error*/ +unsigned lodepng_inspect(unsigned* w, unsigned* h, LodePNGState* state, + const unsigned char* in, size_t insize) +{ + LodePNGInfo* info = &state->info_png; + if(insize == 0 || in == 0) + { + CERROR_RETURN_ERROR(state->error, 48); /*error: the given data is empty*/ + } + if(insize < 29) + { + CERROR_RETURN_ERROR(state->error, 27); /*error: the data length is smaller than the length of a PNG header*/ + } + + /*when decoding a new PNG image, make sure all parameters created after previous decoding are reset*/ + lodepng_info_cleanup(info); + lodepng_info_init(info); + + if(in[0] != 137 || in[1] != 80 || in[2] != 78 || in[3] != 71 + || in[4] != 13 || in[5] != 10 || in[6] != 26 || in[7] != 10) + { + CERROR_RETURN_ERROR(state->error, 28); /*error: the first 8 bytes are not the correct PNG signature*/ + } + if(in[12] != 'I' || in[13] != 'H' || in[14] != 'D' || in[15] != 'R') + { + CERROR_RETURN_ERROR(state->error, 29); /*error: it doesn't start with a IHDR chunk!*/ + } + + /*read the values given in the header*/ + *w = lodepng_read32bitInt(&in[16]); + *h = lodepng_read32bitInt(&in[20]); + info->color.bitdepth = in[24]; + info->color.colortype = (LodePNGColorType)in[25]; + info->compression_method = in[26]; + info->filter_method = in[27]; + info->interlace_method = in[28]; + + if(!state->decoder.ignore_crc) + { + unsigned CRC = lodepng_read32bitInt(&in[29]); + unsigned checksum = lodepng_crc32(&in[12], 17); + if(CRC != checksum) + { + CERROR_RETURN_ERROR(state->error, 57); /*invalid CRC*/ + } + } + + /*error: only compression method 0 is allowed in the specification*/ + if(info->compression_method != 0) CERROR_RETURN_ERROR(state->error, 32); + /*error: only filter method 0 is allowed in the specification*/ + if(info->filter_method != 0) CERROR_RETURN_ERROR(state->error, 33); + /*error: only interlace methods 0 and 1 exist in the specification*/ + if(info->interlace_method > 1) CERROR_RETURN_ERROR(state->error, 34); + + state->error = checkColorValidity(info->color.colortype, info->color.bitdepth); + return state->error; +} + +static unsigned unfilterScanline(unsigned char* recon, const unsigned char* scanline, const unsigned char* precon, + size_t bytewidth, unsigned char filterType, size_t length) +{ + /* + For PNG filter method 0 + unfilter a PNG image scanline by scanline. when the pixels are smaller than 1 byte, + the filter works byte per byte (bytewidth = 1) + precon is the previous unfiltered scanline, recon the result, scanline the current one + the incoming scanlines do NOT include the filtertype byte, that one is given in the parameter filterType instead + recon and scanline MAY be the same memory address! precon must be disjoint. + */ + + size_t i; + switch(filterType) + { + case 0: + for(i = 0; i < length; i++) recon[i] = scanline[i]; + break; + case 1: + for(i = 0; i < bytewidth; i++) recon[i] = scanline[i]; + for(i = bytewidth; i < length; i++) recon[i] = scanline[i] + recon[i - bytewidth]; + break; + case 2: + if(precon) + { + for(i = 0; i < length; i++) recon[i] = scanline[i] + precon[i]; + } + else + { + for(i = 0; i < length; i++) recon[i] = scanline[i]; + } + break; + case 3: + if(precon) + { + for(i = 0; i < bytewidth; i++) recon[i] = scanline[i] + precon[i] / 2; + for(i = bytewidth; i < length; i++) recon[i] = scanline[i] + ((recon[i - bytewidth] + precon[i]) / 2); + } + else + { + for(i = 0; i < bytewidth; i++) recon[i] = scanline[i]; + for(i = bytewidth; i < length; i++) recon[i] = scanline[i] + recon[i - bytewidth] / 2; + } + break; + case 4: + if(precon) + { + for(i = 0; i < bytewidth; i++) + { + recon[i] = (scanline[i] + precon[i]); /*paethPredictor(0, precon[i], 0) is always precon[i]*/ + } + for(i = bytewidth; i < length; i++) + { + recon[i] = (scanline[i] + paethPredictor(recon[i - bytewidth], precon[i], precon[i - bytewidth])); + } + } + else + { + for(i = 0; i < bytewidth; i++) + { + recon[i] = scanline[i]; + } + for(i = bytewidth; i < length; i++) + { + /*paethPredictor(recon[i - bytewidth], 0, 0) is always recon[i - bytewidth]*/ + recon[i] = (scanline[i] + recon[i - bytewidth]); + } + } + break; + default: return 36; /*error: unexisting filter type given*/ + } + return 0; +} + +static unsigned unfilter(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, unsigned bpp) +{ + /* + For PNG filter method 0 + this function unfilters a single image (e.g. without interlacing this is called once, with Adam7 seven times) + out must have enough bytes allocated already, in must have the scanlines + 1 filtertype byte per scanline + w and h are image dimensions or dimensions of reduced image, bpp is bits per pixel + in and out are allowed to be the same memory address (but aren't the same size since in has the extra filter bytes) + */ + + unsigned y; + unsigned char* prevline = 0; + + /*bytewidth is used for filtering, is 1 when bpp < 8, number of bytes per pixel otherwise*/ + size_t bytewidth = (bpp + 7) / 8; + size_t linebytes = (w * bpp + 7) / 8; + + for(y = 0; y < h; y++) + { + size_t outindex = linebytes * y; + size_t inindex = (1 + linebytes) * y; /*the extra filterbyte added to each row*/ + unsigned char filterType = in[inindex]; + + CERROR_TRY_RETURN(unfilterScanline(&out[outindex], &in[inindex + 1], prevline, bytewidth, filterType, linebytes)); + + prevline = &out[outindex]; + } + + return 0; +} + +/* +in: Adam7 interlaced image, with no padding bits between scanlines, but between + reduced images so that each reduced image starts at a byte. +out: the same pixels, but re-ordered so that they're now a non-interlaced image with size w*h +bpp: bits per pixel +out has the following size in bits: w * h * bpp. +in is possibly bigger due to padding bits between reduced images. +out must be big enough AND must be 0 everywhere if bpp < 8 in the current implementation +(because that's likely a little bit faster) +NOTE: comments about padding bits are only relevant if bpp < 8 +*/ +static void Adam7_deinterlace(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, unsigned bpp) +{ + unsigned passw[7], passh[7]; + size_t filter_passstart[8], padded_passstart[8], passstart[8]; + unsigned i; + + Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp); + + if(bpp >= 8) + { + for(i = 0; i < 7; i++) + { + unsigned x, y, b; + size_t bytewidth = bpp / 8; + for(y = 0; y < passh[i]; y++) + for(x = 0; x < passw[i]; x++) + { + size_t pixelinstart = passstart[i] + (y * passw[i] + x) * bytewidth; + size_t pixeloutstart = ((ADAM7_IY[i] + y * ADAM7_DY[i]) * w + ADAM7_IX[i] + x * ADAM7_DX[i]) * bytewidth; + for(b = 0; b < bytewidth; b++) + { + out[pixeloutstart + b] = in[pixelinstart + b]; + } + } + } + } + else /*bpp < 8: Adam7 with pixels < 8 bit is a bit trickier: with bit pointers*/ + { + for(i = 0; i < 7; i++) + { + unsigned x, y, b; + unsigned ilinebits = bpp * passw[i]; + unsigned olinebits = bpp * w; + size_t obp, ibp; /*bit pointers (for out and in buffer)*/ + for(y = 0; y < passh[i]; y++) + for(x = 0; x < passw[i]; x++) + { + ibp = (8 * passstart[i]) + (y * ilinebits + x * bpp); + obp = (ADAM7_IY[i] + y * ADAM7_DY[i]) * olinebits + (ADAM7_IX[i] + x * ADAM7_DX[i]) * bpp; + for(b = 0; b < bpp; b++) + { + unsigned char bit = readBitFromReversedStream(&ibp, in); + /*note that this function assumes the out buffer is completely 0, use setBitOfReversedStream otherwise*/ + setBitOfReversedStream0(&obp, out, bit); + } + } + } + } +} + +static void removePaddingBits(unsigned char* out, const unsigned char* in, + size_t olinebits, size_t ilinebits, unsigned h) +{ + /* + After filtering there are still padding bits if scanlines have non multiple of 8 bit amounts. They need + to be removed (except at last scanline of (Adam7-reduced) image) before working with pure image buffers + for the Adam7 code, the color convert code and the output to the user. + in and out are allowed to be the same buffer, in may also be higher but still overlapping; in must + have >= ilinebits*h bits, out must have >= olinebits*h bits, olinebits must be <= ilinebits + also used to move bits after earlier such operations happened, e.g. in a sequence of reduced images from Adam7 + only useful if (ilinebits - olinebits) is a value in the range 1..7 + */ + unsigned y; + size_t diff = ilinebits - olinebits; + size_t ibp = 0, obp = 0; /*input and output bit pointers*/ + for(y = 0; y < h; y++) + { + size_t x; + for(x = 0; x < olinebits; x++) + { + unsigned char bit = readBitFromReversedStream(&ibp, in); + setBitOfReversedStream(&obp, out, bit); + } + ibp += diff; + } +} + +/*out must be buffer big enough to contain full image, and in must contain the full decompressed data from +the IDAT chunks (with filter index bytes and possible padding bits) +return value is error*/ +static unsigned postProcessScanlines(unsigned char* out, unsigned char* in, + unsigned w, unsigned h, const LodePNGInfo* info_png) +{ + /* + This function converts the filtered-padded-interlaced data into pure 2D image buffer with the PNG's colortype. + Steps: + *) if no Adam7: 1) unfilter 2) remove padding bits (= posible extra bits per scanline if bpp < 8) + *) if adam7: 1) 7x unfilter 2) 7x remove padding bits 3) Adam7_deinterlace + NOTE: the in buffer will be overwritten with intermediate data! + */ + unsigned bpp = lodepng_get_bpp(&info_png->color); + if(bpp == 0) return 31; /*error: invalid colortype*/ + + if(info_png->interlace_method == 0) + { + if(bpp < 8 && w * bpp != ((w * bpp + 7) / 8) * 8) + { + CERROR_TRY_RETURN(unfilter(in, in, w, h, bpp)); + removePaddingBits(out, in, w * bpp, ((w * bpp + 7) / 8) * 8, h); + } + /*we can immediatly filter into the out buffer, no other steps needed*/ + else CERROR_TRY_RETURN(unfilter(out, in, w, h, bpp)); + } + else /*interlace_method is 1 (Adam7)*/ + { + unsigned passw[7], passh[7]; size_t filter_passstart[8], padded_passstart[8], passstart[8]; + unsigned i; + + Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp); + + for(i = 0; i < 7; i++) + { + CERROR_TRY_RETURN(unfilter(&in[padded_passstart[i]], &in[filter_passstart[i]], passw[i], passh[i], bpp)); + /*TODO: possible efficiency improvement: if in this reduced image the bits fit nicely in 1 scanline, + move bytes instead of bits or move not at all*/ + if(bpp < 8) + { + /*remove padding bits in scanlines; after this there still may be padding + bits between the different reduced images: each reduced image still starts nicely at a byte*/ + removePaddingBits(&in[passstart[i]], &in[padded_passstart[i]], passw[i] * bpp, + ((passw[i] * bpp + 7) / 8) * 8, passh[i]); + } + } + + Adam7_deinterlace(out, in, w, h, bpp); + } + + return 0; +} + +static unsigned readChunk_PLTE(LodePNGColorMode* color, const unsigned char* data, size_t chunkLength) +{ + unsigned pos = 0, i; + if(color->palette) lodepng_free(color->palette); + color->palettesize = chunkLength / 3; + color->palette = (unsigned char*)lodepng_malloc(4 * color->palettesize); + if(!color->palette && color->palettesize) + { + color->palettesize = 0; + return 83; /*alloc fail*/ + } + if(color->palettesize > 256) return 38; /*error: palette too big*/ + + for(i = 0; i < color->palettesize; i++) + { + color->palette[4 * i + 0] = data[pos++]; /*R*/ + color->palette[4 * i + 1] = data[pos++]; /*G*/ + color->palette[4 * i + 2] = data[pos++]; /*B*/ + color->palette[4 * i + 3] = 255; /*alpha*/ + } + + return 0; /* OK */ +} + +static unsigned readChunk_tRNS(LodePNGColorMode* color, const unsigned char* data, size_t chunkLength) +{ + unsigned i; + if(color->colortype == LCT_PALETTE) + { + /*error: more alpha values given than there are palette entries*/ + if(chunkLength > color->palettesize) return 38; + + for(i = 0; i < chunkLength; i++) color->palette[4 * i + 3] = data[i]; + } + else if(color->colortype == LCT_GREY) + { + /*error: this chunk must be 2 bytes for greyscale image*/ + if(chunkLength != 2) return 30; + + color->key_defined = 1; + color->key_r = color->key_g = color->key_b = 256u * data[0] + data[1]; + } + else if(color->colortype == LCT_RGB) + { + /*error: this chunk must be 6 bytes for RGB image*/ + if(chunkLength != 6) return 41; + + color->key_defined = 1; + color->key_r = 256u * data[0] + data[1]; + color->key_g = 256u * data[2] + data[3]; + color->key_b = 256u * data[4] + data[5]; + } + else return 42; /*error: tRNS chunk not allowed for other color models*/ + + return 0; /* OK */ +} + + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS +/*background color chunk (bKGD)*/ +static unsigned readChunk_bKGD(LodePNGInfo* info, const unsigned char* data, size_t chunkLength) +{ + if(info->color.colortype == LCT_PALETTE) + { + /*error: this chunk must be 1 byte for indexed color image*/ + if(chunkLength != 1) return 43; + + info->background_defined = 1; + info->background_r = info->background_g = info->background_b = data[0]; + } + else if(info->color.colortype == LCT_GREY || info->color.colortype == LCT_GREY_ALPHA) + { + /*error: this chunk must be 2 bytes for greyscale image*/ + if(chunkLength != 2) return 44; + + info->background_defined = 1; + info->background_r = info->background_g = info->background_b = 256u * data[0] + data[1]; + } + else if(info->color.colortype == LCT_RGB || info->color.colortype == LCT_RGBA) + { + /*error: this chunk must be 6 bytes for greyscale image*/ + if(chunkLength != 6) return 45; + + info->background_defined = 1; + info->background_r = 256u * data[0] + data[1]; + info->background_g = 256u * data[2] + data[3]; + info->background_b = 256u * data[4] + data[5]; + } + + return 0; /* OK */ +} + +/*text chunk (tEXt)*/ +static unsigned readChunk_tEXt(LodePNGInfo* info, const unsigned char* data, size_t chunkLength) +{ + unsigned error = 0; + char *key = 0, *str = 0; + unsigned i; + + while(!error) /*not really a while loop, only used to break on error*/ + { + unsigned length, string2_begin; + + length = 0; + while(length < chunkLength && data[length] != 0) length++; + /*even though it's not allowed by the standard, no error is thrown if + there's no null termination char, if the text is empty*/ + if(length < 1 || length > 79) CERROR_BREAK(error, 89); /*keyword too short or long*/ + + key = (char*)lodepng_malloc(length + 1); + if(!key) CERROR_BREAK(error, 83); /*alloc fail*/ + + key[length] = 0; + for(i = 0; i < length; i++) key[i] = (char)data[i]; + + string2_begin = length + 1; /*skip keyword null terminator*/ + + length = chunkLength < string2_begin ? 0 : chunkLength - string2_begin; + str = (char*)lodepng_malloc(length + 1); + if(!str) CERROR_BREAK(error, 83); /*alloc fail*/ + + str[length] = 0; + for(i = 0; i < length; i++) str[i] = (char)data[string2_begin + i]; + + error = lodepng_add_text(info, key, str); + + break; + } + + lodepng_free(key); + lodepng_free(str); + + return error; +} + +/*compressed text chunk (zTXt)*/ +static unsigned readChunk_zTXt(LodePNGInfo* info, const LodePNGDecompressSettings* zlibsettings, + const unsigned char* data, size_t chunkLength) +{ + unsigned error = 0; + unsigned i; + + unsigned length, string2_begin; + char *key = 0; + ucvector decoded; + + ucvector_init(&decoded); + + while(!error) /*not really a while loop, only used to break on error*/ + { + for(length = 0; length < chunkLength && data[length] != 0; length++) ; + if(length + 2 >= chunkLength) CERROR_BREAK(error, 75); /*no null termination, corrupt?*/ + if(length < 1 || length > 79) CERROR_BREAK(error, 89); /*keyword too short or long*/ + + key = (char*)lodepng_malloc(length + 1); + if(!key) CERROR_BREAK(error, 83); /*alloc fail*/ + + key[length] = 0; + for(i = 0; i < length; i++) key[i] = (char)data[i]; + + if(data[length + 1] != 0) CERROR_BREAK(error, 72); /*the 0 byte indicating compression must be 0*/ + + string2_begin = length + 2; + if(string2_begin > chunkLength) CERROR_BREAK(error, 75); /*no null termination, corrupt?*/ + + length = chunkLength - string2_begin; + /*will fail if zlib error, e.g. if length is too small*/ + error = zlib_decompress(&decoded.data, &decoded.size, + (unsigned char*)(&data[string2_begin]), + length, zlibsettings); + if(error) break; + ucvector_push_back(&decoded, 0); + + error = lodepng_add_text(info, key, (char*)decoded.data); + + break; + } + + lodepng_free(key); + ucvector_cleanup(&decoded); + + return error; +} + +/*international text chunk (iTXt)*/ +static unsigned readChunk_iTXt(LodePNGInfo* info, const LodePNGDecompressSettings* zlibsettings, + const unsigned char* data, size_t chunkLength) +{ + unsigned error = 0; + unsigned i; + + unsigned length, begin, compressed; + char *key = 0, *langtag = 0, *transkey = 0; + ucvector decoded; + ucvector_init(&decoded); + + while(!error) /*not really a while loop, only used to break on error*/ + { + /*Quick check if the chunk length isn't too small. Even without check + it'd still fail with other error checks below if it's too short. This just gives a different error code.*/ + if(chunkLength < 5) CERROR_BREAK(error, 30); /*iTXt chunk too short*/ + + /*read the key*/ + for(length = 0; length < chunkLength && data[length] != 0; length++) ; + if(length + 3 >= chunkLength) CERROR_BREAK(error, 75); /*no null termination char, corrupt?*/ + if(length < 1 || length > 79) CERROR_BREAK(error, 89); /*keyword too short or long*/ + + key = (char*)lodepng_malloc(length + 1); + if(!key) CERROR_BREAK(error, 83); /*alloc fail*/ + + key[length] = 0; + for(i = 0; i < length; i++) key[i] = (char)data[i]; + + /*read the compression method*/ + compressed = data[length + 1]; + if(data[length + 2] != 0) CERROR_BREAK(error, 72); /*the 0 byte indicating compression must be 0*/ + + /*even though it's not allowed by the standard, no error is thrown if + there's no null termination char, if the text is empty for the next 3 texts*/ + + /*read the langtag*/ + begin = length + 3; + length = 0; + for(i = begin; i < chunkLength && data[i] != 0; i++) length++; + + langtag = (char*)lodepng_malloc(length + 1); + if(!langtag) CERROR_BREAK(error, 83); /*alloc fail*/ + + langtag[length] = 0; + for(i = 0; i < length; i++) langtag[i] = (char)data[begin + i]; + + /*read the transkey*/ + begin += length + 1; + length = 0; + for(i = begin; i < chunkLength && data[i] != 0; i++) length++; + + transkey = (char*)lodepng_malloc(length + 1); + if(!transkey) CERROR_BREAK(error, 83); /*alloc fail*/ + + transkey[length] = 0; + for(i = 0; i < length; i++) transkey[i] = (char)data[begin + i]; + + /*read the actual text*/ + begin += length + 1; + + length = chunkLength < begin ? 0 : chunkLength - begin; + + if(compressed) + { + /*will fail if zlib error, e.g. if length is too small*/ + error = zlib_decompress(&decoded.data, &decoded.size, + (unsigned char*)(&data[begin]), + length, zlibsettings); + if(error) break; + if(decoded.allocsize < decoded.size) decoded.allocsize = decoded.size; + ucvector_push_back(&decoded, 0); + } + else + { + if(!ucvector_resize(&decoded, length + 1)) CERROR_BREAK(error, 83 /*alloc fail*/); + + decoded.data[length] = 0; + for(i = 0; i < length; i++) decoded.data[i] = data[begin + i]; + } + + error = lodepng_add_itext(info, key, langtag, transkey, (char*)decoded.data); + + break; + } + + lodepng_free(key); + lodepng_free(langtag); + lodepng_free(transkey); + ucvector_cleanup(&decoded); + + return error; +} + +static unsigned readChunk_tIME(LodePNGInfo* info, const unsigned char* data, size_t chunkLength) +{ + if(chunkLength != 7) return 73; /*invalid tIME chunk size*/ + + info->time_defined = 1; + info->time.year = 256u * data[0] + data[1]; + info->time.month = data[2]; + info->time.day = data[3]; + info->time.hour = data[4]; + info->time.minute = data[5]; + info->time.second = data[6]; + + return 0; /* OK */ +} + +static unsigned readChunk_pHYs(LodePNGInfo* info, const unsigned char* data, size_t chunkLength) +{ + if(chunkLength != 9) return 74; /*invalid pHYs chunk size*/ + + info->phys_defined = 1; + info->phys_x = 16777216u * data[0] + 65536u * data[1] + 256u * data[2] + data[3]; + info->phys_y = 16777216u * data[4] + 65536u * data[5] + 256u * data[6] + data[7]; + info->phys_unit = data[8]; + + return 0; /* OK */ +} +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +/*read a PNG, the result will be in the same color type as the PNG (hence "generic")*/ +static void decodeGeneric(unsigned char** out, unsigned* w, unsigned* h, + LodePNGState* state, + const unsigned char* in, size_t insize) +{ + unsigned char IEND = 0; + const unsigned char* chunk; + size_t i; + ucvector idat; /*the data from idat chunks*/ + ucvector scanlines; + size_t predict; + + /*for unknown chunk order*/ + unsigned unknown = 0; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + unsigned critical_pos = 1; /*1 = after IHDR, 2 = after PLTE, 3 = after IDAT*/ +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + + /*provide some proper output values if error will happen*/ + *out = 0; + + state->error = lodepng_inspect(w, h, state, in, insize); /*reads header and resets other parameters in state->info_png*/ + if(state->error) return; + + ucvector_init(&idat); + chunk = &in[33]; /*first byte of the first chunk after the header*/ + + /*loop through the chunks, ignoring unknown chunks and stopping at IEND chunk. + IDAT data is put at the start of the in buffer*/ + while(!IEND && !state->error) + { + unsigned chunkLength; + const unsigned char* data; /*the data in the chunk*/ + + /*error: size of the in buffer too small to contain next chunk*/ + if((size_t)((chunk - in) + 12) > insize || chunk < in) CERROR_BREAK(state->error, 30); + + /*length of the data of the chunk, excluding the length bytes, chunk type and CRC bytes*/ + chunkLength = lodepng_chunk_length(chunk); + /*error: chunk length larger than the max PNG chunk size*/ + if(chunkLength > 2147483647) CERROR_BREAK(state->error, 63); + + if((size_t)((chunk - in) + chunkLength + 12) > insize || (chunk + chunkLength + 12) < in) + { + CERROR_BREAK(state->error, 64); /*error: size of the in buffer too small to contain next chunk*/ + } + + data = lodepng_chunk_data_const(chunk); + + /*IDAT chunk, containing compressed image data*/ + if(lodepng_chunk_type_equals(chunk, "IDAT")) + { + size_t oldsize = idat.size; + if(!ucvector_resize(&idat, oldsize + chunkLength)) CERROR_BREAK(state->error, 83 /*alloc fail*/); + for(i = 0; i < chunkLength; i++) idat.data[oldsize + i] = data[i]; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + critical_pos = 3; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + } + /*IEND chunk*/ + else if(lodepng_chunk_type_equals(chunk, "IEND")) + { + IEND = 1; + } + /*palette chunk (PLTE)*/ + else if(lodepng_chunk_type_equals(chunk, "PLTE")) + { + state->error = readChunk_PLTE(&state->info_png.color, data, chunkLength); + if(state->error) break; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + critical_pos = 2; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + } + /*palette transparency chunk (tRNS)*/ + else if(lodepng_chunk_type_equals(chunk, "tRNS")) + { + state->error = readChunk_tRNS(&state->info_png.color, data, chunkLength); + if(state->error) break; + } +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /*background color chunk (bKGD)*/ + else if(lodepng_chunk_type_equals(chunk, "bKGD")) + { + state->error = readChunk_bKGD(&state->info_png, data, chunkLength); + if(state->error) break; + } + /*text chunk (tEXt)*/ + else if(lodepng_chunk_type_equals(chunk, "tEXt")) + { + if(state->decoder.read_text_chunks) + { + state->error = readChunk_tEXt(&state->info_png, data, chunkLength); + if(state->error) break; + } + } + /*compressed text chunk (zTXt)*/ + else if(lodepng_chunk_type_equals(chunk, "zTXt")) + { + if(state->decoder.read_text_chunks) + { + state->error = readChunk_zTXt(&state->info_png, &state->decoder.zlibsettings, data, chunkLength); + if(state->error) break; + } + } + /*international text chunk (iTXt)*/ + else if(lodepng_chunk_type_equals(chunk, "iTXt")) + { + if(state->decoder.read_text_chunks) + { + state->error = readChunk_iTXt(&state->info_png, &state->decoder.zlibsettings, data, chunkLength); + if(state->error) break; + } + } + else if(lodepng_chunk_type_equals(chunk, "tIME")) + { + state->error = readChunk_tIME(&state->info_png, data, chunkLength); + if(state->error) break; + } + else if(lodepng_chunk_type_equals(chunk, "pHYs")) + { + state->error = readChunk_pHYs(&state->info_png, data, chunkLength); + if(state->error) break; + } +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + else /*it's not an implemented chunk type, so ignore it: skip over the data*/ + { + /*error: unknown critical chunk (5th bit of first byte of chunk type is 0)*/ + if(!lodepng_chunk_ancillary(chunk)) CERROR_BREAK(state->error, 69); + + unknown = 1; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + if(state->decoder.remember_unknown_chunks) + { + state->error = lodepng_chunk_append(&state->info_png.unknown_chunks_data[critical_pos - 1], + &state->info_png.unknown_chunks_size[critical_pos - 1], chunk); + if(state->error) break; + } +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + } + + if(!state->decoder.ignore_crc && !unknown) /*check CRC if wanted, only on known chunk types*/ + { + if(lodepng_chunk_check_crc(chunk)) CERROR_BREAK(state->error, 57); /*invalid CRC*/ + } + + if(!IEND) chunk = lodepng_chunk_next_const(chunk); + } + + ucvector_init(&scanlines); + /*predict output size, to allocate exact size for output buffer to avoid more dynamic allocation. + The prediction is currently not correct for interlaced PNG images.*/ + predict = lodepng_get_raw_size_idat(*w, *h, &state->info_png.color) + *h; + if(!state->error && !ucvector_reserve(&scanlines, predict)) state->error = 83; /*alloc fail*/ + if(!state->error) + { + state->error = zlib_decompress(&scanlines.data, &scanlines.size, idat.data, + idat.size, &state->decoder.zlibsettings); + } + ucvector_cleanup(&idat); + + if(!state->error) + { + ucvector outv; + ucvector_init(&outv); + if(!ucvector_resizev(&outv, + lodepng_get_raw_size(*w, *h, &state->info_png.color), 0)) state->error = 83; /*alloc fail*/ + if(!state->error) state->error = postProcessScanlines(outv.data, scanlines.data, *w, *h, &state->info_png); + *out = outv.data; + } + ucvector_cleanup(&scanlines); +} + +unsigned lodepng_decode(unsigned char** out, unsigned* w, unsigned* h, + LodePNGState* state, + const unsigned char* in, size_t insize) +{ + *out = 0; + decodeGeneric(out, w, h, state, in, insize); + if(state->error) return state->error; + if(!state->decoder.color_convert || lodepng_color_mode_equal(&state->info_raw, &state->info_png.color)) + { + /*same color type, no copying or converting of data needed*/ + /*store the info_png color settings on the info_raw so that the info_raw still reflects what colortype + the raw image has to the end user*/ + if(!state->decoder.color_convert) + { + state->error = lodepng_color_mode_copy(&state->info_raw, &state->info_png.color); + if(state->error) return state->error; + } + } + else + { + /*color conversion needed; sort of copy of the data*/ + unsigned char* data = *out; + size_t outsize; + + /*TODO: check if this works according to the statement in the documentation: "The converter can convert + from greyscale input color type, to 8-bit greyscale or greyscale with alpha"*/ + if(!(state->info_raw.colortype == LCT_RGB || state->info_raw.colortype == LCT_RGBA) + && !(state->info_raw.bitdepth == 8)) + { + return 56; /*unsupported color mode conversion*/ + } + + outsize = lodepng_get_raw_size(*w, *h, &state->info_raw); + *out = (unsigned char*)lodepng_malloc(outsize); + if(!(*out)) + { + state->error = 83; /*alloc fail*/ + } + else state->error = lodepng_convert(*out, data, &state->info_raw, + &state->info_png.color, *w, *h); + lodepng_free(data); + } + return state->error; +} + +unsigned lodepng_decode_memory(unsigned char** out, unsigned* w, unsigned* h, const unsigned char* in, + size_t insize, LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned error; + LodePNGState state; + lodepng_state_init(&state); + state.info_raw.colortype = colortype; + state.info_raw.bitdepth = bitdepth; + error = lodepng_decode(out, w, h, &state, in, insize); + lodepng_state_cleanup(&state); + return error; +} + +unsigned lodepng_decode32(unsigned char** out, unsigned* w, unsigned* h, const unsigned char* in, size_t insize) +{ + return lodepng_decode_memory(out, w, h, in, insize, LCT_RGBA, 8); +} + +unsigned lodepng_decode24(unsigned char** out, unsigned* w, unsigned* h, const unsigned char* in, size_t insize) +{ + return lodepng_decode_memory(out, w, h, in, insize, LCT_RGB, 8); +} + +#ifdef LODEPNG_COMPILE_DISK +unsigned lodepng_decode_file(unsigned char** out, unsigned* w, unsigned* h, const char* filename, + LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned char* buffer; + size_t buffersize; + unsigned error; + error = lodepng_load_file(&buffer, &buffersize, filename); + if(!error) error = lodepng_decode_memory(out, w, h, buffer, buffersize, colortype, bitdepth); + lodepng_free(buffer); + return error; +} + +unsigned lodepng_decode32_file(unsigned char** out, unsigned* w, unsigned* h, const char* filename) +{ + return lodepng_decode_file(out, w, h, filename, LCT_RGBA, 8); +} + +unsigned lodepng_decode24_file(unsigned char** out, unsigned* w, unsigned* h, const char* filename) +{ + return lodepng_decode_file(out, w, h, filename, LCT_RGB, 8); +} +#endif /*LODEPNG_COMPILE_DISK*/ + +void lodepng_decoder_settings_init(LodePNGDecoderSettings* settings) +{ + settings->color_convert = 1; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + settings->read_text_chunks = 1; + settings->remember_unknown_chunks = 0; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + settings->ignore_crc = 0; + lodepng_decompress_settings_init(&settings->zlibsettings); +} + +#endif /*LODEPNG_COMPILE_DECODER*/ + +#if defined(LODEPNG_COMPILE_DECODER) || defined(LODEPNG_COMPILE_ENCODER) + +void lodepng_state_init(LodePNGState* state) +{ +#ifdef LODEPNG_COMPILE_DECODER + lodepng_decoder_settings_init(&state->decoder); +#endif /*LODEPNG_COMPILE_DECODER*/ +#ifdef LODEPNG_COMPILE_ENCODER + lodepng_encoder_settings_init(&state->encoder); +#endif /*LODEPNG_COMPILE_ENCODER*/ + lodepng_color_mode_init(&state->info_raw); + lodepng_info_init(&state->info_png); + state->error = 1; +} + +void lodepng_state_cleanup(LodePNGState* state) +{ + lodepng_color_mode_cleanup(&state->info_raw); + lodepng_info_cleanup(&state->info_png); +} + +void lodepng_state_copy(LodePNGState* dest, const LodePNGState* source) +{ + lodepng_state_cleanup(dest); + *dest = *source; + lodepng_color_mode_init(&dest->info_raw); + lodepng_info_init(&dest->info_png); + dest->error = lodepng_color_mode_copy(&dest->info_raw, &source->info_raw); if(dest->error) return; + dest->error = lodepng_info_copy(&dest->info_png, &source->info_png); if(dest->error) return; +} + +#endif /* defined(LODEPNG_COMPILE_DECODER) || defined(LODEPNG_COMPILE_ENCODER) */ + +#ifdef LODEPNG_COMPILE_ENCODER + +/* ////////////////////////////////////////////////////////////////////////// */ +/* / PNG Encoder / */ +/* ////////////////////////////////////////////////////////////////////////// */ + +/*chunkName must be string of 4 characters*/ +static unsigned addChunk(ucvector* out, const char* chunkName, const unsigned char* data, size_t length) +{ + CERROR_TRY_RETURN(lodepng_chunk_create(&out->data, &out->size, (unsigned)length, chunkName, data)); + out->allocsize = out->size; /*fix the allocsize again*/ + return 0; +} + +static void writeSignature(ucvector* out) +{ + /*8 bytes PNG signature, aka the magic bytes*/ + ucvector_push_back(out, 137); + ucvector_push_back(out, 80); + ucvector_push_back(out, 78); + ucvector_push_back(out, 71); + ucvector_push_back(out, 13); + ucvector_push_back(out, 10); + ucvector_push_back(out, 26); + ucvector_push_back(out, 10); +} + +static unsigned addChunk_IHDR(ucvector* out, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth, unsigned interlace_method) +{ + unsigned error = 0; + ucvector header; + ucvector_init(&header); + + lodepng_add32bitInt(&header, w); /*width*/ + lodepng_add32bitInt(&header, h); /*height*/ + ucvector_push_back(&header, (unsigned char)bitdepth); /*bit depth*/ + ucvector_push_back(&header, (unsigned char)colortype); /*color type*/ + ucvector_push_back(&header, 0); /*compression method*/ + ucvector_push_back(&header, 0); /*filter method*/ + ucvector_push_back(&header, interlace_method); /*interlace method*/ + + error = addChunk(out, "IHDR", header.data, header.size); + ucvector_cleanup(&header); + + return error; +} + +static unsigned addChunk_PLTE(ucvector* out, const LodePNGColorMode* info) +{ + unsigned error = 0; + size_t i; + ucvector PLTE; + ucvector_init(&PLTE); + for(i = 0; i < info->palettesize * 4; i++) + { + /*add all channels except alpha channel*/ + if(i % 4 != 3) ucvector_push_back(&PLTE, info->palette[i]); + } + error = addChunk(out, "PLTE", PLTE.data, PLTE.size); + ucvector_cleanup(&PLTE); + + return error; +} + +static unsigned addChunk_tRNS(ucvector* out, const LodePNGColorMode* info) +{ + unsigned error = 0; + size_t i; + ucvector tRNS; + ucvector_init(&tRNS); + if(info->colortype == LCT_PALETTE) + { + size_t amount = info->palettesize; + /*the tail of palette values that all have 255 as alpha, does not have to be encoded*/ + for(i = info->palettesize; i > 0; i--) + { + if(info->palette[4 * (i - 1) + 3] == 255) amount--; + else break; + } + /*add only alpha channel*/ + for(i = 0; i < amount; i++) ucvector_push_back(&tRNS, info->palette[4 * i + 3]); + } + else if(info->colortype == LCT_GREY) + { + if(info->key_defined) + { + ucvector_push_back(&tRNS, (unsigned char)(info->key_r / 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_r % 256)); + } + } + else if(info->colortype == LCT_RGB) + { + if(info->key_defined) + { + ucvector_push_back(&tRNS, (unsigned char)(info->key_r / 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_r % 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_g / 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_g % 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_b / 256)); + ucvector_push_back(&tRNS, (unsigned char)(info->key_b % 256)); + } + } + + error = addChunk(out, "tRNS", tRNS.data, tRNS.size); + ucvector_cleanup(&tRNS); + + return error; +} + +static unsigned addChunk_IDAT(ucvector* out, const unsigned char* data, size_t datasize, + LodePNGCompressSettings* zlibsettings) +{ + ucvector zlibdata; + unsigned error = 0; + + /*compress with the Zlib compressor*/ + ucvector_init(&zlibdata); + error = zlib_compress(&zlibdata.data, &zlibdata.size, data, datasize, zlibsettings); + if(!error) error = addChunk(out, "IDAT", zlibdata.data, zlibdata.size); + ucvector_cleanup(&zlibdata); + + return error; +} + +static unsigned addChunk_IEND(ucvector* out) +{ + unsigned error = 0; + error = addChunk(out, "IEND", 0, 0); + return error; +} + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + +static unsigned addChunk_tEXt(ucvector* out, const char* keyword, const char* textstring) +{ + unsigned error = 0; + size_t i; + ucvector text; + ucvector_init(&text); + for(i = 0; keyword[i] != 0; i++) ucvector_push_back(&text, (unsigned char)keyword[i]); + if(i < 1 || i > 79) return 89; /*error: invalid keyword size*/ + ucvector_push_back(&text, 0); /*0 termination char*/ + for(i = 0; textstring[i] != 0; i++) ucvector_push_back(&text, (unsigned char)textstring[i]); + error = addChunk(out, "tEXt", text.data, text.size); + ucvector_cleanup(&text); + + return error; +} + +static unsigned addChunk_zTXt(ucvector* out, const char* keyword, const char* textstring, + LodePNGCompressSettings* zlibsettings) +{ + unsigned error = 0; + ucvector data, compressed; + size_t i, textsize = strlen(textstring); + + ucvector_init(&data); + ucvector_init(&compressed); + for(i = 0; keyword[i] != 0; i++) ucvector_push_back(&data, (unsigned char)keyword[i]); + if(i < 1 || i > 79) return 89; /*error: invalid keyword size*/ + ucvector_push_back(&data, 0); /*0 termination char*/ + ucvector_push_back(&data, 0); /*compression method: 0*/ + + error = zlib_compress(&compressed.data, &compressed.size, + (unsigned char*)textstring, textsize, zlibsettings); + if(!error) + { + for(i = 0; i < compressed.size; i++) ucvector_push_back(&data, compressed.data[i]); + error = addChunk(out, "zTXt", data.data, data.size); + } + + ucvector_cleanup(&compressed); + ucvector_cleanup(&data); + return error; +} + +static unsigned addChunk_iTXt(ucvector* out, unsigned compressed, const char* keyword, const char* langtag, + const char* transkey, const char* textstring, LodePNGCompressSettings* zlibsettings) +{ + unsigned error = 0; + ucvector data; + size_t i, textsize = strlen(textstring); + + ucvector_init(&data); + + for(i = 0; keyword[i] != 0; i++) ucvector_push_back(&data, (unsigned char)keyword[i]); + if(i < 1 || i > 79) return 89; /*error: invalid keyword size*/ + ucvector_push_back(&data, 0); /*null termination char*/ + ucvector_push_back(&data, compressed ? 1 : 0); /*compression flag*/ + ucvector_push_back(&data, 0); /*compression method*/ + for(i = 0; langtag[i] != 0; i++) ucvector_push_back(&data, (unsigned char)langtag[i]); + ucvector_push_back(&data, 0); /*null termination char*/ + for(i = 0; transkey[i] != 0; i++) ucvector_push_back(&data, (unsigned char)transkey[i]); + ucvector_push_back(&data, 0); /*null termination char*/ + + if(compressed) + { + ucvector compressed_data; + ucvector_init(&compressed_data); + error = zlib_compress(&compressed_data.data, &compressed_data.size, + (unsigned char*)textstring, textsize, zlibsettings); + if(!error) + { + for(i = 0; i < compressed_data.size; i++) ucvector_push_back(&data, compressed_data.data[i]); + } + ucvector_cleanup(&compressed_data); + } + else /*not compressed*/ + { + for(i = 0; textstring[i] != 0; i++) ucvector_push_back(&data, (unsigned char)textstring[i]); + } + + if(!error) error = addChunk(out, "iTXt", data.data, data.size); + ucvector_cleanup(&data); + return error; +} + +static unsigned addChunk_bKGD(ucvector* out, const LodePNGInfo* info) +{ + unsigned error = 0; + ucvector bKGD; + ucvector_init(&bKGD); + if(info->color.colortype == LCT_GREY || info->color.colortype == LCT_GREY_ALPHA) + { + ucvector_push_back(&bKGD, (unsigned char)(info->background_r / 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_r % 256)); + } + else if(info->color.colortype == LCT_RGB || info->color.colortype == LCT_RGBA) + { + ucvector_push_back(&bKGD, (unsigned char)(info->background_r / 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_r % 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_g / 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_g % 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_b / 256)); + ucvector_push_back(&bKGD, (unsigned char)(info->background_b % 256)); + } + else if(info->color.colortype == LCT_PALETTE) + { + ucvector_push_back(&bKGD, (unsigned char)(info->background_r % 256)); /*palette index*/ + } + + error = addChunk(out, "bKGD", bKGD.data, bKGD.size); + ucvector_cleanup(&bKGD); + + return error; +} + +static unsigned addChunk_tIME(ucvector* out, const LodePNGTime* time) +{ + unsigned error = 0; + unsigned char* data = (unsigned char*)lodepng_malloc(7); + if(!data) return 83; /*alloc fail*/ + data[0] = (unsigned char)(time->year / 256); + data[1] = (unsigned char)(time->year % 256); + data[2] = (unsigned char)time->month; + data[3] = (unsigned char)time->day; + data[4] = (unsigned char)time->hour; + data[5] = (unsigned char)time->minute; + data[6] = (unsigned char)time->second; + error = addChunk(out, "tIME", data, 7); + lodepng_free(data); + return error; +} + +static unsigned addChunk_pHYs(ucvector* out, const LodePNGInfo* info) +{ + unsigned error = 0; + ucvector data; + ucvector_init(&data); + + lodepng_add32bitInt(&data, info->phys_x); + lodepng_add32bitInt(&data, info->phys_y); + ucvector_push_back(&data, info->phys_unit); + + error = addChunk(out, "pHYs", data.data, data.size); + ucvector_cleanup(&data); + + return error; +} + +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +static void filterScanline(unsigned char* out, const unsigned char* scanline, const unsigned char* prevline, + size_t length, size_t bytewidth, unsigned char filterType) +{ + size_t i; + switch(filterType) + { + case 0: /*None*/ + for(i = 0; i < length; i++) out[i] = scanline[i]; + break; + case 1: /*Sub*/ + if(prevline) + { + for(i = 0; i < bytewidth; i++) out[i] = scanline[i]; + for(i = bytewidth; i < length; i++) out[i] = scanline[i] - scanline[i - bytewidth]; + } + else + { + for(i = 0; i < bytewidth; i++) out[i] = scanline[i]; + for(i = bytewidth; i < length; i++) out[i] = scanline[i] - scanline[i - bytewidth]; + } + break; + case 2: /*Up*/ + if(prevline) + { + for(i = 0; i < length; i++) out[i] = scanline[i] - prevline[i]; + } + else + { + for(i = 0; i < length; i++) out[i] = scanline[i]; + } + break; + case 3: /*Average*/ + if(prevline) + { + for(i = 0; i < bytewidth; i++) out[i] = scanline[i] - prevline[i] / 2; + for(i = bytewidth; i < length; i++) out[i] = scanline[i] - ((scanline[i - bytewidth] + prevline[i]) / 2); + } + else + { + for(i = 0; i < bytewidth; i++) out[i] = scanline[i]; + for(i = bytewidth; i < length; i++) out[i] = scanline[i] - scanline[i - bytewidth] / 2; + } + break; + case 4: /*Paeth*/ + if(prevline) + { + /*paethPredictor(0, prevline[i], 0) is always prevline[i]*/ + for(i = 0; i < bytewidth; i++) out[i] = (scanline[i] - prevline[i]); + for(i = bytewidth; i < length; i++) + { + out[i] = (scanline[i] - paethPredictor(scanline[i - bytewidth], prevline[i], prevline[i - bytewidth])); + } + } + else + { + for(i = 0; i < bytewidth; i++) out[i] = scanline[i]; + /*paethPredictor(scanline[i - bytewidth], 0, 0) is always scanline[i - bytewidth]*/ + for(i = bytewidth; i < length; i++) out[i] = (scanline[i] - scanline[i - bytewidth]); + } + break; + default: return; /*unexisting filter type given*/ + } +} + +/* log2 approximation. A slight bit faster than std::log. */ +static float flog2(float f) +{ + float result = 0; + while(f > 32) { result += 4; f /= 16; } + while(f > 2) { result++; f /= 2; } + return result + 1.442695f * (f * f * f / 3 - 3 * f * f / 2 + 3 * f - 1.83333f); +} + +static unsigned filter(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, + const LodePNGColorMode* info, const LodePNGEncoderSettings* settings) +{ + /* + For PNG filter method 0 + out must be a buffer with as size: h + (w * h * bpp + 7) / 8, because there are + the scanlines with 1 extra byte per scanline + */ + + unsigned bpp = lodepng_get_bpp(info); + /*the width of a scanline in bytes, not including the filter type*/ + size_t linebytes = (w * bpp + 7) / 8; + /*bytewidth is used for filtering, is 1 when bpp < 8, number of bytes per pixel otherwise*/ + size_t bytewidth = (bpp + 7) / 8; + const unsigned char* prevline = 0; + unsigned x, y; + unsigned error = 0; + LodePNGFilterStrategy strategy = settings->filter_strategy; + + /* + There is a heuristic called the minimum sum of absolute differences heuristic, suggested by the PNG standard: + * If the image type is Palette, or the bit depth is smaller than 8, then do not filter the image (i.e. + use fixed filtering, with the filter None). + * (The other case) If the image type is Grayscale or RGB (with or without Alpha), and the bit depth is + not smaller than 8, then use adaptive filtering heuristic as follows: independently for each row, apply + all five filters and select the filter that produces the smallest sum of absolute values per row. + This heuristic is used if filter strategy is LFS_MINSUM and filter_palette_zero is true. + + If filter_palette_zero is true and filter_strategy is not LFS_MINSUM, the above heuristic is followed, + but for "the other case", whatever strategy filter_strategy is set to instead of the minimum sum + heuristic is used. + */ + if(settings->filter_palette_zero && + (info->colortype == LCT_PALETTE || info->bitdepth < 8)) strategy = LFS_ZERO; + + if(bpp == 0) return 31; /*error: invalid color type*/ + + if(strategy == LFS_ZERO) + { + for(y = 0; y < h; y++) + { + size_t outindex = (1 + linebytes) * y; /*the extra filterbyte added to each row*/ + size_t inindex = linebytes * y; + out[outindex] = 0; /*filter type byte*/ + filterScanline(&out[outindex + 1], &in[inindex], prevline, linebytes, bytewidth, 0); + prevline = &in[inindex]; + } + } + else if(strategy == LFS_MINSUM) + { + /*adaptive filtering*/ + size_t sum[5]; + ucvector attempt[5]; /*five filtering attempts, one for each filter type*/ + size_t smallest = 0; + unsigned char type, bestType = 0; + + for(type = 0; type < 5; type++) + { + ucvector_init(&attempt[type]); + if(!ucvector_resize(&attempt[type], linebytes)) return 83; /*alloc fail*/ + } + + if(!error) + { + for(y = 0; y < h; y++) + { + /*try the 5 filter types*/ + for(type = 0; type < 5; type++) + { + filterScanline(attempt[type].data, &in[y * linebytes], prevline, linebytes, bytewidth, type); + + /*calculate the sum of the result*/ + sum[type] = 0; + if(type == 0) + { + for(x = 0; x < linebytes; x++) sum[type] += (unsigned char)(attempt[type].data[x]); + } + else + { + for(x = 0; x < linebytes; x++) + { + /*For differences, each byte should be treated as signed, values above 127 are negative + (converted to signed char). Filtertype 0 isn't a difference though, so use unsigned there. + This means filtertype 0 is almost never chosen, but that is justified.*/ + unsigned char s = attempt[type].data[x]; + sum[type] += s < 128 ? s : (255U - s); + } + } + + /*check if this is smallest sum (or if type == 0 it's the first case so always store the values)*/ + if(type == 0 || sum[type] < smallest) + { + bestType = type; + smallest = sum[type]; + } + } + + prevline = &in[y * linebytes]; + + /*now fill the out values*/ + out[y * (linebytes + 1)] = bestType; /*the first byte of a scanline will be the filter type*/ + for(x = 0; x < linebytes; x++) out[y * (linebytes + 1) + 1 + x] = attempt[bestType].data[x]; + } + } + + for(type = 0; type < 5; type++) ucvector_cleanup(&attempt[type]); + } + else if(strategy == LFS_ENTROPY) + { + float sum[5]; + ucvector attempt[5]; /*five filtering attempts, one for each filter type*/ + float smallest = 0; + unsigned type, bestType = 0; + unsigned count[256]; + + for(type = 0; type < 5; type++) + { + ucvector_init(&attempt[type]); + if(!ucvector_resize(&attempt[type], linebytes)) return 83; /*alloc fail*/ + } + + for(y = 0; y < h; y++) + { + /*try the 5 filter types*/ + for(type = 0; type < 5; type++) + { + filterScanline(attempt[type].data, &in[y * linebytes], prevline, linebytes, bytewidth, type); + for(x = 0; x < 256; x++) count[x] = 0; + for(x = 0; x < linebytes; x++) count[attempt[type].data[x]]++; + count[type]++; /*the filter type itself is part of the scanline*/ + sum[type] = 0; + for(x = 0; x < 256; x++) + { + float p = count[x] / (float)(linebytes + 1); + sum[type] += count[x] == 0 ? 0 : flog2(1 / p) * p; + } + /*check if this is smallest sum (or if type == 0 it's the first case so always store the values)*/ + if(type == 0 || sum[type] < smallest) + { + bestType = type; + smallest = sum[type]; + } + } + + prevline = &in[y * linebytes]; + + /*now fill the out values*/ + out[y * (linebytes + 1)] = bestType; /*the first byte of a scanline will be the filter type*/ + for(x = 0; x < linebytes; x++) out[y * (linebytes + 1) + 1 + x] = attempt[bestType].data[x]; + } + + for(type = 0; type < 5; type++) ucvector_cleanup(&attempt[type]); + } + else if(strategy == LFS_PREDEFINED) + { + for(y = 0; y < h; y++) + { + size_t outindex = (1 + linebytes) * y; /*the extra filterbyte added to each row*/ + size_t inindex = linebytes * y; + unsigned char type = settings->predefined_filters[y]; + out[outindex] = type; /*filter type byte*/ + filterScanline(&out[outindex + 1], &in[inindex], prevline, linebytes, bytewidth, type); + prevline = &in[inindex]; + } + } + else if(strategy == LFS_BRUTE_FORCE) + { + /*brute force filter chooser. + deflate the scanline after every filter attempt to see which one deflates best. + This is very slow and gives only slightly smaller, sometimes even larger, result*/ + size_t size[5]; + ucvector attempt[5]; /*five filtering attempts, one for each filter type*/ + size_t smallest = 0; + unsigned type = 0, bestType = 0; + unsigned char* dummy; + LodePNGCompressSettings zlibsettings = settings->zlibsettings; + /*use fixed tree on the attempts so that the tree is not adapted to the filtertype on purpose, + to simulate the true case where the tree is the same for the whole image. Sometimes it gives + better result with dynamic tree anyway. Using the fixed tree sometimes gives worse, but in rare + cases better compression. It does make this a bit less slow, so it's worth doing this.*/ + zlibsettings.btype = 1; + /*a custom encoder likely doesn't read the btype setting and is optimized for complete PNG + images only, so disable it*/ + zlibsettings.custom_zlib = 0; + zlibsettings.custom_deflate = 0; + for(type = 0; type < 5; type++) + { + ucvector_init(&attempt[type]); + ucvector_resize(&attempt[type], linebytes); /*todo: give error if resize failed*/ + } + for(y = 0; y < h; y++) /*try the 5 filter types*/ + { + for(type = 0; type < 5; type++) + { + unsigned testsize = attempt[type].size; + /*if(testsize > 8) testsize /= 8;*/ /*it already works good enough by testing a part of the row*/ + + filterScanline(attempt[type].data, &in[y * linebytes], prevline, linebytes, bytewidth, type); + size[type] = 0; + dummy = 0; + zlib_compress(&dummy, &size[type], attempt[type].data, testsize, &zlibsettings); + lodepng_free(dummy); + /*check if this is smallest size (or if type == 0 it's the first case so always store the values)*/ + if(type == 0 || size[type] < smallest) + { + bestType = type; + smallest = size[type]; + } + } + prevline = &in[y * linebytes]; + out[y * (linebytes + 1)] = bestType; /*the first byte of a scanline will be the filter type*/ + for(x = 0; x < linebytes; x++) out[y * (linebytes + 1) + 1 + x] = attempt[bestType].data[x]; + } + for(type = 0; type < 5; type++) ucvector_cleanup(&attempt[type]); + } + else return 88; /* unknown filter strategy */ + + return error; +} + +static void addPaddingBits(unsigned char* out, const unsigned char* in, + size_t olinebits, size_t ilinebits, unsigned h) +{ + /*The opposite of the removePaddingBits function + olinebits must be >= ilinebits*/ + unsigned y; + size_t diff = olinebits - ilinebits; + size_t obp = 0, ibp = 0; /*bit pointers*/ + for(y = 0; y < h; y++) + { + size_t x; + for(x = 0; x < ilinebits; x++) + { + unsigned char bit = readBitFromReversedStream(&ibp, in); + setBitOfReversedStream(&obp, out, bit); + } + /*obp += diff; --> no, fill in some value in the padding bits too, to avoid + "Use of uninitialised value of size ###" warning from valgrind*/ + for(x = 0; x < diff; x++) setBitOfReversedStream(&obp, out, 0); + } +} + +/* +in: non-interlaced image with size w*h +out: the same pixels, but re-ordered according to PNG's Adam7 interlacing, with + no padding bits between scanlines, but between reduced images so that each + reduced image starts at a byte. +bpp: bits per pixel +there are no padding bits, not between scanlines, not between reduced images +in has the following size in bits: w * h * bpp. +out is possibly bigger due to padding bits between reduced images +NOTE: comments about padding bits are only relevant if bpp < 8 +*/ +static void Adam7_interlace(unsigned char* out, const unsigned char* in, unsigned w, unsigned h, unsigned bpp) +{ + unsigned passw[7], passh[7]; + size_t filter_passstart[8], padded_passstart[8], passstart[8]; + unsigned i; + + Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp); + + if(bpp >= 8) + { + for(i = 0; i < 7; i++) + { + unsigned x, y, b; + size_t bytewidth = bpp / 8; + for(y = 0; y < passh[i]; y++) + for(x = 0; x < passw[i]; x++) + { + size_t pixelinstart = ((ADAM7_IY[i] + y * ADAM7_DY[i]) * w + ADAM7_IX[i] + x * ADAM7_DX[i]) * bytewidth; + size_t pixeloutstart = passstart[i] + (y * passw[i] + x) * bytewidth; + for(b = 0; b < bytewidth; b++) + { + out[pixeloutstart + b] = in[pixelinstart + b]; + } + } + } + } + else /*bpp < 8: Adam7 with pixels < 8 bit is a bit trickier: with bit pointers*/ + { + for(i = 0; i < 7; i++) + { + unsigned x, y, b; + unsigned ilinebits = bpp * passw[i]; + unsigned olinebits = bpp * w; + size_t obp, ibp; /*bit pointers (for out and in buffer)*/ + for(y = 0; y < passh[i]; y++) + for(x = 0; x < passw[i]; x++) + { + ibp = (ADAM7_IY[i] + y * ADAM7_DY[i]) * olinebits + (ADAM7_IX[i] + x * ADAM7_DX[i]) * bpp; + obp = (8 * passstart[i]) + (y * ilinebits + x * bpp); + for(b = 0; b < bpp; b++) + { + unsigned char bit = readBitFromReversedStream(&ibp, in); + setBitOfReversedStream(&obp, out, bit); + } + } + } + } +} + +/*out must be buffer big enough to contain uncompressed IDAT chunk data, and in must contain the full image. +return value is error**/ +static unsigned preProcessScanlines(unsigned char** out, size_t* outsize, const unsigned char* in, + unsigned w, unsigned h, + const LodePNGInfo* info_png, const LodePNGEncoderSettings* settings) +{ + /* + This function converts the pure 2D image with the PNG's colortype, into filtered-padded-interlaced data. Steps: + *) if no Adam7: 1) add padding bits (= posible extra bits per scanline if bpp < 8) 2) filter + *) if adam7: 1) Adam7_interlace 2) 7x add padding bits 3) 7x filter + */ + unsigned bpp = lodepng_get_bpp(&info_png->color); + unsigned error = 0; + + if(info_png->interlace_method == 0) + { + *outsize = h + (h * ((w * bpp + 7) / 8)); /*image size plus an extra byte per scanline + possible padding bits*/ + *out = (unsigned char*)lodepng_malloc(*outsize); + if(!(*out) && (*outsize)) error = 83; /*alloc fail*/ + + if(!error) + { + /*non multiple of 8 bits per scanline, padding bits needed per scanline*/ + if(bpp < 8 && w * bpp != ((w * bpp + 7) / 8) * 8) + { + unsigned char* padded = (unsigned char*)lodepng_malloc(h * ((w * bpp + 7) / 8)); + if(!padded) error = 83; /*alloc fail*/ + if(!error) + { + addPaddingBits(padded, in, ((w * bpp + 7) / 8) * 8, w * bpp, h); + error = filter(*out, padded, w, h, &info_png->color, settings); + } + lodepng_free(padded); + } + else + { + /*we can immediatly filter into the out buffer, no other steps needed*/ + error = filter(*out, in, w, h, &info_png->color, settings); + } + } + } + else /*interlace_method is 1 (Adam7)*/ + { + unsigned passw[7], passh[7]; + size_t filter_passstart[8], padded_passstart[8], passstart[8]; + unsigned char* adam7; + + Adam7_getpassvalues(passw, passh, filter_passstart, padded_passstart, passstart, w, h, bpp); + + *outsize = filter_passstart[7]; /*image size plus an extra byte per scanline + possible padding bits*/ + *out = (unsigned char*)lodepng_malloc(*outsize); + if(!(*out)) error = 83; /*alloc fail*/ + + adam7 = (unsigned char*)lodepng_malloc(passstart[7]); + if(!adam7 && passstart[7]) error = 83; /*alloc fail*/ + + if(!error) + { + unsigned i; + + Adam7_interlace(adam7, in, w, h, bpp); + for(i = 0; i < 7; i++) + { + if(bpp < 8) + { + unsigned char* padded = (unsigned char*)lodepng_malloc(padded_passstart[i + 1] - padded_passstart[i]); + if(!padded) ERROR_BREAK(83); /*alloc fail*/ + addPaddingBits(padded, &adam7[passstart[i]], + ((passw[i] * bpp + 7) / 8) * 8, passw[i] * bpp, passh[i]); + error = filter(&(*out)[filter_passstart[i]], padded, + passw[i], passh[i], &info_png->color, settings); + lodepng_free(padded); + } + else + { + error = filter(&(*out)[filter_passstart[i]], &adam7[padded_passstart[i]], + passw[i], passh[i], &info_png->color, settings); + } + + if(error) break; + } + } + + lodepng_free(adam7); + } + + return error; +} + +/* +palette must have 4 * palettesize bytes allocated, and given in format RGBARGBARGBARGBA... +returns 0 if the palette is opaque, +returns 1 if the palette has a single color with alpha 0 ==> color key +returns 2 if the palette is semi-translucent. +*/ +static unsigned getPaletteTranslucency(const unsigned char* palette, size_t palettesize) +{ + size_t i; + unsigned key = 0; + unsigned r = 0, g = 0, b = 0; /*the value of the color with alpha 0, so long as color keying is possible*/ + for(i = 0; i < palettesize; i++) + { + if(!key && palette[4 * i + 3] == 0) + { + r = palette[4 * i + 0]; g = palette[4 * i + 1]; b = palette[4 * i + 2]; + key = 1; + i = (size_t)(-1); /*restart from beginning, to detect earlier opaque colors with key's value*/ + } + else if(palette[4 * i + 3] != 255) return 2; + /*when key, no opaque RGB may have key's RGB*/ + else if(key && r == palette[i * 4 + 0] && g == palette[i * 4 + 1] && b == palette[i * 4 + 2]) return 2; + } + return key; +} + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS +static unsigned addUnknownChunks(ucvector* out, unsigned char* data, size_t datasize) +{ + unsigned char* inchunk = data; + while((size_t)(inchunk - data) < datasize) + { + CERROR_TRY_RETURN(lodepng_chunk_append(&out->data, &out->size, inchunk)); + out->allocsize = out->size; /*fix the allocsize again*/ + inchunk = lodepng_chunk_next(inchunk); + } + return 0; +} +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +unsigned lodepng_encode(unsigned char** out, size_t* outsize, + const unsigned char* image, unsigned w, unsigned h, + LodePNGState* state) +{ + LodePNGInfo info; + ucvector outv; + unsigned char* data = 0; /*uncompressed version of the IDAT chunk data*/ + size_t datasize = 0; + + /*provide some proper output values if error will happen*/ + *out = 0; + *outsize = 0; + state->error = 0; + + lodepng_info_init(&info); + lodepng_info_copy(&info, &state->info_png); + + if((info.color.colortype == LCT_PALETTE || state->encoder.force_palette) + && (info.color.palettesize == 0 || info.color.palettesize > 256)) + { + state->error = 68; /*invalid palette size, it is only allowed to be 1-256*/ + return state->error; + } + + if(state->encoder.auto_convert) + { + state->error = lodepng_auto_choose_color(&info.color, image, w, h, &state->info_raw); + } + if(state->error) return state->error; + + if(state->encoder.zlibsettings.btype > 2) + { + CERROR_RETURN_ERROR(state->error, 61); /*error: unexisting btype*/ + } + if(state->info_png.interlace_method > 1) + { + CERROR_RETURN_ERROR(state->error, 71); /*error: unexisting interlace mode*/ + } + + state->error = checkColorValidity(info.color.colortype, info.color.bitdepth); + if(state->error) return state->error; /*error: unexisting color type given*/ + state->error = checkColorValidity(state->info_raw.colortype, state->info_raw.bitdepth); + if(state->error) return state->error; /*error: unexisting color type given*/ + + if(!lodepng_color_mode_equal(&state->info_raw, &info.color)) + { + unsigned char* converted; + size_t size = (w * h * lodepng_get_bpp(&info.color) + 7) / 8; + + converted = (unsigned char*)lodepng_malloc(size); + if(!converted && size) state->error = 83; /*alloc fail*/ + if(!state->error) + { + state->error = lodepng_convert(converted, image, &info.color, &state->info_raw, w, h); + } + if(!state->error) preProcessScanlines(&data, &datasize, converted, w, h, &info, &state->encoder); + lodepng_free(converted); + } + else preProcessScanlines(&data, &datasize, image, w, h, &info, &state->encoder); + + ucvector_init(&outv); + while(!state->error) /*while only executed once, to break on error*/ + { +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + size_t i; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + /*write signature and chunks*/ + writeSignature(&outv); + /*IHDR*/ + addChunk_IHDR(&outv, w, h, info.color.colortype, info.color.bitdepth, info.interlace_method); +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /*unknown chunks between IHDR and PLTE*/ + if(info.unknown_chunks_data[0]) + { + state->error = addUnknownChunks(&outv, info.unknown_chunks_data[0], info.unknown_chunks_size[0]); + if(state->error) break; + } +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + /*PLTE*/ + if(info.color.colortype == LCT_PALETTE) + { + addChunk_PLTE(&outv, &info.color); + } + if(state->encoder.force_palette && (info.color.colortype == LCT_RGB || info.color.colortype == LCT_RGBA)) + { + addChunk_PLTE(&outv, &info.color); + } + /*tRNS*/ + if(info.color.colortype == LCT_PALETTE && getPaletteTranslucency(info.color.palette, info.color.palettesize) != 0) + { + addChunk_tRNS(&outv, &info.color); + } + if((info.color.colortype == LCT_GREY || info.color.colortype == LCT_RGB) && info.color.key_defined) + { + addChunk_tRNS(&outv, &info.color); + } +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /*bKGD (must come between PLTE and the IDAt chunks*/ + if(info.background_defined) addChunk_bKGD(&outv, &info); + /*pHYs (must come before the IDAT chunks)*/ + if(info.phys_defined) addChunk_pHYs(&outv, &info); + + /*unknown chunks between PLTE and IDAT*/ + if(info.unknown_chunks_data[1]) + { + state->error = addUnknownChunks(&outv, info.unknown_chunks_data[1], info.unknown_chunks_size[1]); + if(state->error) break; + } +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + /*IDAT (multiple IDAT chunks must be consecutive)*/ + state->error = addChunk_IDAT(&outv, data, datasize, &state->encoder.zlibsettings); + if(state->error) break; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /*tIME*/ + if(info.time_defined) addChunk_tIME(&outv, &info.time); + /*tEXt and/or zTXt*/ + for(i = 0; i < info.text_num; i++) + { + if(strlen(info.text_keys[i]) > 79) + { + state->error = 66; /*text chunk too large*/ + break; + } + if(strlen(info.text_keys[i]) < 1) + { + state->error = 67; /*text chunk too small*/ + break; + } + if(state->encoder.text_compression) + { + addChunk_zTXt(&outv, info.text_keys[i], info.text_strings[i], &state->encoder.zlibsettings); + } + else + { + addChunk_tEXt(&outv, info.text_keys[i], info.text_strings[i]); + } + } + /*LodePNG version id in text chunk*/ + if(state->encoder.add_id) + { + unsigned alread_added_id_text = 0; + for(i = 0; i < info.text_num; i++) + { + if(!strcmp(info.text_keys[i], "LodePNG")) + { + alread_added_id_text = 1; + break; + } + } + if(alread_added_id_text == 0) + { + addChunk_tEXt(&outv, "LodePNG", VERSION_STRING); /*it's shorter as tEXt than as zTXt chunk*/ + } + } + /*iTXt*/ + for(i = 0; i < info.itext_num; i++) + { + if(strlen(info.itext_keys[i]) > 79) + { + state->error = 66; /*text chunk too large*/ + break; + } + if(strlen(info.itext_keys[i]) < 1) + { + state->error = 67; /*text chunk too small*/ + break; + } + addChunk_iTXt(&outv, state->encoder.text_compression, + info.itext_keys[i], info.itext_langtags[i], info.itext_transkeys[i], info.itext_strings[i], + &state->encoder.zlibsettings); + } + + /*unknown chunks between IDAT and IEND*/ + if(info.unknown_chunks_data[2]) + { + state->error = addUnknownChunks(&outv, info.unknown_chunks_data[2], info.unknown_chunks_size[2]); + if(state->error) break; + } +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + addChunk_IEND(&outv); + + break; /*this isn't really a while loop; no error happened so break out now!*/ + } + + lodepng_info_cleanup(&info); + lodepng_free(data); + /*instead of cleaning the vector up, give it to the output*/ + *out = outv.data; + *outsize = outv.size; + + return state->error; +} + +unsigned lodepng_encode_memory(unsigned char** out, size_t* outsize, const unsigned char* image, + unsigned w, unsigned h, LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned error; + LodePNGState state; + lodepng_state_init(&state); + state.info_raw.colortype = colortype; + state.info_raw.bitdepth = bitdepth; + state.info_png.color.colortype = colortype; + state.info_png.color.bitdepth = bitdepth; + lodepng_encode(out, outsize, image, w, h, &state); + error = state.error; + lodepng_state_cleanup(&state); + return error; +} + +unsigned lodepng_encode32(unsigned char** out, size_t* outsize, const unsigned char* image, unsigned w, unsigned h) +{ + return lodepng_encode_memory(out, outsize, image, w, h, LCT_RGBA, 8); +} + +unsigned lodepng_encode24(unsigned char** out, size_t* outsize, const unsigned char* image, unsigned w, unsigned h) +{ + return lodepng_encode_memory(out, outsize, image, w, h, LCT_RGB, 8); +} + +#ifdef LODEPNG_COMPILE_DISK +unsigned lodepng_encode_file(const char* filename, const unsigned char* image, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned char* buffer; + size_t buffersize; + unsigned error = lodepng_encode_memory(&buffer, &buffersize, image, w, h, colortype, bitdepth); + if(!error) error = lodepng_save_file(buffer, buffersize, filename); + lodepng_free(buffer); + return error; +} + +unsigned lodepng_encode32_file(const char* filename, const unsigned char* image, unsigned w, unsigned h) +{ + return lodepng_encode_file(filename, image, w, h, LCT_RGBA, 8); +} + +unsigned lodepng_encode24_file(const char* filename, const unsigned char* image, unsigned w, unsigned h) +{ + return lodepng_encode_file(filename, image, w, h, LCT_RGB, 8); +} +#endif /*LODEPNG_COMPILE_DISK*/ + +void lodepng_encoder_settings_init(LodePNGEncoderSettings* settings) +{ + lodepng_compress_settings_init(&settings->zlibsettings); + settings->filter_palette_zero = 1; + settings->filter_strategy = LFS_MINSUM; + settings->auto_convert = 1; + settings->force_palette = 0; + settings->predefined_filters = 0; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + settings->add_id = 0; + settings->text_compression = 1; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} + +#endif /*LODEPNG_COMPILE_ENCODER*/ +#endif /*LODEPNG_COMPILE_PNG*/ + +#ifdef LODEPNG_COMPILE_ERROR_TEXT +/* +This returns the description of a numerical error code in English. This is also +the documentation of all the error codes. +*/ +const char* lodepng_error_text(unsigned code) +{ + switch(code) + { + case 0: return "no error, everything went ok"; + case 1: return "nothing done yet"; /*the Encoder/Decoder has done nothing yet, error checking makes no sense yet*/ + case 10: return "end of input memory reached without huffman end code"; /*while huffman decoding*/ + case 11: return "error in code tree made it jump outside of huffman tree"; /*while huffman decoding*/ + case 13: return "problem while processing dynamic deflate block"; + case 14: return "problem while processing dynamic deflate block"; + case 15: return "problem while processing dynamic deflate block"; + case 16: return "unexisting code while processing dynamic deflate block"; + case 17: return "end of out buffer memory reached while inflating"; + case 18: return "invalid distance code while inflating"; + case 19: return "end of out buffer memory reached while inflating"; + case 20: return "invalid deflate block BTYPE encountered while decoding"; + case 21: return "NLEN is not ones complement of LEN in a deflate block"; + /*end of out buffer memory reached while inflating: + This can happen if the inflated deflate data is longer than the amount of bytes required to fill up + all the pixels of the image, given the color depth and image dimensions. Something that doesn't + happen in a normal, well encoded, PNG image.*/ + case 22: return "end of out buffer memory reached while inflating"; + case 23: return "end of in buffer memory reached while inflating"; + case 24: return "invalid FCHECK in zlib header"; + case 25: return "invalid compression method in zlib header"; + case 26: return "FDICT encountered in zlib header while it's not used for PNG"; + case 27: return "PNG file is smaller than a PNG header"; + /*Checks the magic file header, the first 8 bytes of the PNG file*/ + case 28: return "incorrect PNG signature, it's no PNG or corrupted"; + case 29: return "first chunk is not the header chunk"; + case 30: return "chunk length too large, chunk broken off at end of file"; + case 31: return "illegal PNG color type or bpp"; + case 32: return "illegal PNG compression method"; + case 33: return "illegal PNG filter method"; + case 34: return "illegal PNG interlace method"; + case 35: return "chunk length of a chunk is too large or the chunk too small"; + case 36: return "illegal PNG filter type encountered"; + case 37: return "illegal bit depth for this color type given"; + case 38: return "the palette is too big"; /*more than 256 colors*/ + case 39: return "more palette alpha values given in tRNS chunk than there are colors in the palette"; + case 40: return "tRNS chunk has wrong size for greyscale image"; + case 41: return "tRNS chunk has wrong size for RGB image"; + case 42: return "tRNS chunk appeared while it was not allowed for this color type"; + case 43: return "bKGD chunk has wrong size for palette image"; + case 44: return "bKGD chunk has wrong size for greyscale image"; + case 45: return "bKGD chunk has wrong size for RGB image"; + /*the input data is empty, maybe a PNG file doesn't exist or is in the wrong path*/ + case 48: return "empty input or file doesn't exist"; + case 49: return "jumped past memory while generating dynamic huffman tree"; + case 50: return "jumped past memory while generating dynamic huffman tree"; + case 51: return "jumped past memory while inflating huffman block"; + case 52: return "jumped past memory while inflating"; + case 53: return "size of zlib data too small"; + case 54: return "repeat symbol in tree while there was no value symbol yet"; + /*jumped past tree while generating huffman tree, this could be when the + tree will have more leaves than symbols after generating it out of the + given lenghts. They call this an oversubscribed dynamic bit lengths tree in zlib.*/ + case 55: return "jumped past tree while generating huffman tree"; + case 56: return "given output image colortype or bitdepth not supported for color conversion"; + case 57: return "invalid CRC encountered (checking CRC can be disabled)"; + case 58: return "invalid ADLER32 encountered (checking ADLER32 can be disabled)"; + case 59: return "requested color conversion not supported"; + case 60: return "invalid window size given in the settings of the encoder (must be 0-32768)"; + case 61: return "invalid BTYPE given in the settings of the encoder (only 0, 1 and 2 are allowed)"; + /*LodePNG leaves the choice of RGB to greyscale conversion formula to the user.*/ + case 62: return "conversion from color to greyscale not supported"; + case 63: return "length of a chunk too long, max allowed for PNG is 2147483647 bytes per chunk"; /*(2^31-1)*/ + /*this would result in the inability of a deflated block to ever contain an end code. It must be at least 1.*/ + case 64: return "the length of the END symbol 256 in the Huffman tree is 0"; + case 66: return "the length of a text chunk keyword given to the encoder is longer than the maximum of 79 bytes"; + case 67: return "the length of a text chunk keyword given to the encoder is smaller than the minimum of 1 byte"; + case 68: return "tried to encode a PLTE chunk with a palette that has less than 1 or more than 256 colors"; + case 69: return "unknown chunk type with 'critical' flag encountered by the decoder"; + case 71: return "unexisting interlace mode given to encoder (must be 0 or 1)"; + case 72: return "while decoding, unexisting compression method encountering in zTXt or iTXt chunk (it must be 0)"; + case 73: return "invalid tIME chunk size"; + case 74: return "invalid pHYs chunk size"; + /*length could be wrong, or data chopped off*/ + case 75: return "no null termination char found while decoding text chunk"; + case 76: return "iTXt chunk too short to contain required bytes"; + case 77: return "integer overflow in buffer size"; + case 78: return "failed to open file for reading"; /*file doesn't exist or couldn't be opened for reading*/ + case 79: return "failed to open file for writing"; + case 80: return "tried creating a tree of 0 symbols"; + case 81: return "lazy matching at pos 0 is impossible"; + case 82: return "color conversion to palette requested while a color isn't in palette"; + case 83: return "memory allocation failed"; + case 84: return "given image too small to contain all pixels to be encoded"; + case 86: return "impossible offset in lz77 encoding (internal bug)"; + case 87: return "must provide custom zlib function pointer if LODEPNG_COMPILE_ZLIB is not defined"; + case 88: return "invalid filter strategy given for LodePNGEncoderSettings.filter_strategy"; + case 89: return "text chunk keyword too short or long: must have size 1-79"; + /*the windowsize in the LodePNGCompressSettings. Requiring POT(==> & instead of %) makes encoding 12% faster.*/ + case 90: return "windowsize must be a power of two"; + } + return "unknown error code"; +} +#endif /*LODEPNG_COMPILE_ERROR_TEXT*/ + +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* // C++ Wrapper // */ +/* ////////////////////////////////////////////////////////////////////////// */ +/* ////////////////////////////////////////////////////////////////////////// */ + +#ifdef LODEPNG_COMPILE_CPP +namespace lodepng +{ + +#ifdef LODEPNG_COMPILE_DISK +void load_file(std::vector& buffer, const std::string& filename) +{ + std::ifstream file(filename.c_str(), std::ios::in|std::ios::binary|std::ios::ate); + + /*get filesize*/ + std::streamsize size = 0; + if(file.seekg(0, std::ios::end).good()) size = file.tellg(); + if(file.seekg(0, std::ios::beg).good()) size -= file.tellg(); + + /*read contents of the file into the vector*/ + buffer.resize(size_t(size)); + if(size > 0) file.read((char*)(&buffer[0]), size); +} + +/*write given buffer to the file, overwriting the file, it doesn't append to it.*/ +void save_file(const std::vector& buffer, const std::string& filename) +{ + std::ofstream file(filename.c_str(), std::ios::out|std::ios::binary); + file.write(buffer.empty() ? 0 : (char*)&buffer[0], std::streamsize(buffer.size())); +} +#endif //LODEPNG_COMPILE_DISK + +#ifdef LODEPNG_COMPILE_ZLIB +#ifdef LODEPNG_COMPILE_DECODER +unsigned decompress(std::vector& out, const unsigned char* in, size_t insize, + const LodePNGDecompressSettings& settings) +{ + unsigned char* buffer = 0; + size_t buffersize = 0; + unsigned error = zlib_decompress(&buffer, &buffersize, in, insize, &settings); + if(buffer) + { + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + lodepng_free(buffer); + } + return error; +} + +unsigned decompress(std::vector& out, const std::vector& in, + const LodePNGDecompressSettings& settings) +{ + return decompress(out, in.empty() ? 0 : &in[0], in.size(), settings); +} +#endif //LODEPNG_COMPILE_DECODER + +#ifdef LODEPNG_COMPILE_ENCODER +unsigned compress(std::vector& out, const unsigned char* in, size_t insize, + const LodePNGCompressSettings& settings) +{ + unsigned char* buffer = 0; + size_t buffersize = 0; + unsigned error = zlib_compress(&buffer, &buffersize, in, insize, &settings); + if(buffer) + { + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + lodepng_free(buffer); + } + return error; +} + +unsigned compress(std::vector& out, const std::vector& in, + const LodePNGCompressSettings& settings) +{ + return compress(out, in.empty() ? 0 : &in[0], in.size(), settings); +} +#endif //LODEPNG_COMPILE_ENCODER +#endif //LODEPNG_COMPILE_ZLIB + + +#ifdef LODEPNG_COMPILE_PNG + +State::State() +{ + lodepng_state_init(this); +} + +State::State(const State& other) +{ + lodepng_state_init(this); + lodepng_state_copy(this, &other); +} + +State::~State() +{ + lodepng_state_cleanup(this); +} + +State& State::operator=(const State& other) +{ + lodepng_state_copy(this, &other); + return *this; +} + +#ifdef LODEPNG_COMPILE_DECODER + +unsigned decode(std::vector& out, unsigned& w, unsigned& h, const unsigned char* in, + size_t insize, LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned char* buffer; + unsigned error = lodepng_decode_memory(&buffer, &w, &h, in, insize, colortype, bitdepth); + if(buffer && !error) + { + State state; + state.info_raw.colortype = colortype; + state.info_raw.bitdepth = bitdepth; + size_t buffersize = lodepng_get_raw_size(w, h, &state.info_raw); + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + lodepng_free(buffer); + } + return error; +} + +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + const std::vector& in, LodePNGColorType colortype, unsigned bitdepth) +{ + return decode(out, w, h, in.empty() ? 0 : &in[0], (unsigned)in.size(), colortype, bitdepth); +} + +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + State& state, + const unsigned char* in, size_t insize) +{ + unsigned char* buffer = NULL; + unsigned error = lodepng_decode(&buffer, &w, &h, &state, in, insize); + if(buffer && !error) + { + size_t buffersize = lodepng_get_raw_size(w, h, &state.info_raw); + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + } + lodepng_free(buffer); + return error; +} + +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + State& state, + const std::vector& in) +{ + return decode(out, w, h, state, in.empty() ? 0 : &in[0], in.size()); +} + +#ifdef LODEPNG_COMPILE_DISK +unsigned decode(std::vector& out, unsigned& w, unsigned& h, const std::string& filename, + LodePNGColorType colortype, unsigned bitdepth) +{ + std::vector buffer; + load_file(buffer, filename); + return decode(out, w, h, buffer, colortype, bitdepth); +} +#endif //LODEPNG_COMPILE_DECODER +#endif //LODEPNG_COMPILE_DISK + +#ifdef LODEPNG_COMPILE_ENCODER +unsigned encode(std::vector& out, const unsigned char* in, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth) +{ + unsigned char* buffer; + size_t buffersize; + unsigned error = lodepng_encode_memory(&buffer, &buffersize, in, w, h, colortype, bitdepth); + if(buffer) + { + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + lodepng_free(buffer); + } + return error; +} + +unsigned encode(std::vector& out, + const std::vector& in, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth) +{ + if(lodepng_get_raw_size_lct(w, h, colortype, bitdepth) > in.size()) return 84; + return encode(out, in.empty() ? 0 : &in[0], w, h, colortype, bitdepth); +} + +unsigned encode(std::vector& out, + const unsigned char* in, unsigned w, unsigned h, + State& state) +{ + unsigned char* buffer; + size_t buffersize; + unsigned error = lodepng_encode(&buffer, &buffersize, in, w, h, &state); + if(buffer) + { + out.insert(out.end(), &buffer[0], &buffer[buffersize]); + lodepng_free(buffer); + } + return error; +} + +unsigned encode(std::vector& out, + const std::vector& in, unsigned w, unsigned h, + State& state) +{ + if(lodepng_get_raw_size(w, h, &state.info_raw) > in.size()) return 84; + return encode(out, in.empty() ? 0 : &in[0], w, h, state); +} + +#ifdef LODEPNG_COMPILE_DISK +unsigned encode(const std::string& filename, + const unsigned char* in, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth) +{ + std::vector buffer; + unsigned error = encode(buffer, in, w, h, colortype, bitdepth); + if(!error) save_file(buffer, filename); + return error; +} + +unsigned encode(const std::string& filename, + const std::vector& in, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth) +{ + if(lodepng_get_raw_size_lct(w, h, colortype, bitdepth) > in.size()) return 84; + return encode(filename, in.empty() ? 0 : &in[0], w, h, colortype, bitdepth); +} +#endif //LODEPNG_COMPILE_DISK +#endif //LODEPNG_COMPILE_ENCODER +#endif //LODEPNG_COMPILE_PNG +} //namespace lodepng +#endif /*LODEPNG_COMPILE_CPP*/ diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.h b/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.h new file mode 100644 index 000000000..ef2c82067 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/lodepng.h @@ -0,0 +1,1702 @@ +/* +LodePNG version 20140823 + +Copyright (c) 2005-2014 Lode Vandevenne + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source + distribution. +*/ + +#ifndef LODEPNG_H +#define LODEPNG_H + +#include /*for size_t*/ + +#ifdef __cplusplus +#include +#include +#endif /*__cplusplus*/ + +/* +The following #defines are used to create code sections. They can be disabled +to disable code sections, which can give faster compile time and smaller binary. +The "NO_COMPILE" defines are designed to be used to pass as defines to the +compiler command to disable them without modifying this header, e.g. +-DLODEPNG_NO_COMPILE_ZLIB for gcc. +*/ +/*deflate & zlib. If disabled, you must specify alternative zlib functions in +the custom_zlib field of the compress and decompress settings*/ +#ifndef LODEPNG_NO_COMPILE_ZLIB +#define LODEPNG_COMPILE_ZLIB +#endif +/*png encoder and png decoder*/ +#ifndef LODEPNG_NO_COMPILE_PNG +#define LODEPNG_COMPILE_PNG +#endif +/*deflate&zlib decoder and png decoder*/ +#ifndef LODEPNG_NO_COMPILE_DECODER +#define LODEPNG_COMPILE_DECODER +#endif +/*deflate&zlib encoder and png encoder*/ +#ifndef LODEPNG_NO_COMPILE_ENCODER +#define LODEPNG_COMPILE_ENCODER +#endif +/*the optional built in harddisk file loading and saving functions*/ +#ifndef LODEPNG_NO_COMPILE_DISK +#define LODEPNG_COMPILE_DISK +#endif +/*support for chunks other than IHDR, IDAT, PLTE, tRNS, IEND: ancillary and unknown chunks*/ +#ifndef LODEPNG_NO_COMPILE_ANCILLARY_CHUNKS +#define LODEPNG_COMPILE_ANCILLARY_CHUNKS +#endif +/*ability to convert error numerical codes to English text string*/ +#ifndef LODEPNG_NO_COMPILE_ERROR_TEXT +#define LODEPNG_COMPILE_ERROR_TEXT +#endif +/*Compile the default allocators (C's free, malloc and realloc). If you disable this, +you can define the functions lodepng_free, lodepng_malloc and lodepng_realloc in your +source files with custom allocators.*/ +#ifndef LODEPNG_NO_COMPILE_ALLOCATORS +#define LODEPNG_COMPILE_ALLOCATORS +#endif +/*compile the C++ version (you can disable the C++ wrapper here even when compiling for C++)*/ +#ifdef __cplusplus +#ifndef LODEPNG_NO_COMPILE_CPP +#define LODEPNG_COMPILE_CPP +#endif +#endif + +#ifdef LODEPNG_COMPILE_PNG +/*The PNG color types (also used for raw).*/ +typedef enum LodePNGColorType +{ + LCT_GREY = 0, /*greyscale: 1,2,4,8,16 bit*/ + LCT_RGB = 2, /*RGB: 8,16 bit*/ + LCT_PALETTE = 3, /*palette: 1,2,4,8 bit*/ + LCT_GREY_ALPHA = 4, /*greyscale with alpha: 8,16 bit*/ + LCT_RGBA = 6 /*RGB with alpha: 8,16 bit*/ +} LodePNGColorType; + +#ifdef LODEPNG_COMPILE_DECODER +/* +Converts PNG data in memory to raw pixel data. +out: Output parameter. Pointer to buffer that will contain the raw pixel data. + After decoding, its size is w * h * (bytes per pixel) bytes larger than + initially. Bytes per pixel depends on colortype and bitdepth. + Must be freed after usage with free(*out). + Note: for 16-bit per channel colors, uses big endian format like PNG does. +w: Output parameter. Pointer to width of pixel data. +h: Output parameter. Pointer to height of pixel data. +in: Memory buffer with the PNG file. +insize: size of the in buffer. +colortype: the desired color type for the raw output image. See explanation on PNG color types. +bitdepth: the desired bit depth for the raw output image. See explanation on PNG color types. +Return value: LodePNG error code (0 means no error). +*/ +unsigned lodepng_decode_memory(unsigned char** out, unsigned* w, unsigned* h, + const unsigned char* in, size_t insize, + LodePNGColorType colortype, unsigned bitdepth); + +/*Same as lodepng_decode_memory, but always decodes to 32-bit RGBA raw image*/ +unsigned lodepng_decode32(unsigned char** out, unsigned* w, unsigned* h, + const unsigned char* in, size_t insize); + +/*Same as lodepng_decode_memory, but always decodes to 24-bit RGB raw image*/ +unsigned lodepng_decode24(unsigned char** out, unsigned* w, unsigned* h, + const unsigned char* in, size_t insize); + +#ifdef LODEPNG_COMPILE_DISK +/* +Load PNG from disk, from file with given name. +Same as the other decode functions, but instead takes a filename as input. +*/ +unsigned lodepng_decode_file(unsigned char** out, unsigned* w, unsigned* h, + const char* filename, + LodePNGColorType colortype, unsigned bitdepth); + +/*Same as lodepng_decode_file, but always decodes to 32-bit RGBA raw image.*/ +unsigned lodepng_decode32_file(unsigned char** out, unsigned* w, unsigned* h, + const char* filename); + +/*Same as lodepng_decode_file, but always decodes to 24-bit RGB raw image.*/ +unsigned lodepng_decode24_file(unsigned char** out, unsigned* w, unsigned* h, + const char* filename); +#endif /*LODEPNG_COMPILE_DISK*/ +#endif /*LODEPNG_COMPILE_DECODER*/ + + +#ifdef LODEPNG_COMPILE_ENCODER +/* +Converts raw pixel data into a PNG image in memory. The colortype and bitdepth + of the output PNG image cannot be chosen, they are automatically determined + by the colortype, bitdepth and content of the input pixel data. + Note: for 16-bit per channel colors, needs big endian format like PNG does. +out: Output parameter. Pointer to buffer that will contain the PNG image data. + Must be freed after usage with free(*out). +outsize: Output parameter. Pointer to the size in bytes of the out buffer. +image: The raw pixel data to encode. The size of this buffer should be + w * h * (bytes per pixel), bytes per pixel depends on colortype and bitdepth. +w: width of the raw pixel data in pixels. +h: height of the raw pixel data in pixels. +colortype: the color type of the raw input image. See explanation on PNG color types. +bitdepth: the bit depth of the raw input image. See explanation on PNG color types. +Return value: LodePNG error code (0 means no error). +*/ +unsigned lodepng_encode_memory(unsigned char** out, size_t* outsize, + const unsigned char* image, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth); + +/*Same as lodepng_encode_memory, but always encodes from 32-bit RGBA raw image.*/ +unsigned lodepng_encode32(unsigned char** out, size_t* outsize, + const unsigned char* image, unsigned w, unsigned h); + +/*Same as lodepng_encode_memory, but always encodes from 24-bit RGB raw image.*/ +unsigned lodepng_encode24(unsigned char** out, size_t* outsize, + const unsigned char* image, unsigned w, unsigned h); + +#ifdef LODEPNG_COMPILE_DISK +/* +Converts raw pixel data into a PNG file on disk. +Same as the other encode functions, but instead takes a filename as output. +NOTE: This overwrites existing files without warning! +*/ +unsigned lodepng_encode_file(const char* filename, + const unsigned char* image, unsigned w, unsigned h, + LodePNGColorType colortype, unsigned bitdepth); + +/*Same as lodepng_encode_file, but always encodes from 32-bit RGBA raw image.*/ +unsigned lodepng_encode32_file(const char* filename, + const unsigned char* image, unsigned w, unsigned h); + +/*Same as lodepng_encode_file, but always encodes from 24-bit RGB raw image.*/ +unsigned lodepng_encode24_file(const char* filename, + const unsigned char* image, unsigned w, unsigned h); +#endif /*LODEPNG_COMPILE_DISK*/ +#endif /*LODEPNG_COMPILE_ENCODER*/ + + +#ifdef LODEPNG_COMPILE_CPP +namespace lodepng +{ +#ifdef LODEPNG_COMPILE_DECODER +/*Same as lodepng_decode_memory, but decodes to an std::vector. The colortype +is the format to output the pixels to. Default is RGBA 8-bit per channel.*/ +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + const unsigned char* in, size_t insize, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + const std::vector& in, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +#ifdef LODEPNG_COMPILE_DISK +/* +Converts PNG file from disk to raw pixel data in memory. +Same as the other decode functions, but instead takes a filename as input. +*/ +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + const std::string& filename, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +#endif //LODEPNG_COMPILE_DISK +#endif //LODEPNG_COMPILE_DECODER + +#ifdef LODEPNG_COMPILE_ENCODER +/*Same as lodepng_encode_memory, but encodes to an std::vector. colortype +is that of the raw input data. The output PNG color type will be auto chosen.*/ +unsigned encode(std::vector& out, + const unsigned char* in, unsigned w, unsigned h, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +unsigned encode(std::vector& out, + const std::vector& in, unsigned w, unsigned h, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +#ifdef LODEPNG_COMPILE_DISK +/* +Converts 32-bit RGBA raw pixel data into a PNG file on disk. +Same as the other encode functions, but instead takes a filename as output. +NOTE: This overwrites existing files without warning! +*/ +unsigned encode(const std::string& filename, + const unsigned char* in, unsigned w, unsigned h, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +unsigned encode(const std::string& filename, + const std::vector& in, unsigned w, unsigned h, + LodePNGColorType colortype = LCT_RGBA, unsigned bitdepth = 8); +#endif //LODEPNG_COMPILE_DISK +#endif //LODEPNG_COMPILE_ENCODER +} //namespace lodepng +#endif /*LODEPNG_COMPILE_CPP*/ +#endif /*LODEPNG_COMPILE_PNG*/ + +#ifdef LODEPNG_COMPILE_ERROR_TEXT +/*Returns an English description of the numerical error code.*/ +const char* lodepng_error_text(unsigned code); +#endif /*LODEPNG_COMPILE_ERROR_TEXT*/ + +#ifdef LODEPNG_COMPILE_DECODER +/*Settings for zlib decompression*/ +typedef struct LodePNGDecompressSettings LodePNGDecompressSettings; +struct LodePNGDecompressSettings +{ + unsigned ignore_adler32; /*if 1, continue and don't give an error message if the Adler32 checksum is corrupted*/ + + /*use custom zlib decoder instead of built in one (default: null)*/ + unsigned (*custom_zlib)(unsigned char**, size_t*, + const unsigned char*, size_t, + const LodePNGDecompressSettings*); + /*use custom deflate decoder instead of built in one (default: null) + if custom_zlib is used, custom_deflate is ignored since only the built in + zlib function will call custom_deflate*/ + unsigned (*custom_inflate)(unsigned char**, size_t*, + const unsigned char*, size_t, + const LodePNGDecompressSettings*); + + const void* custom_context; /*optional custom settings for custom functions*/ +}; + +extern const LodePNGDecompressSettings lodepng_default_decompress_settings; +void lodepng_decompress_settings_init(LodePNGDecompressSettings* settings); +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER +/* +Settings for zlib compression. Tweaking these settings tweaks the balance +between speed and compression ratio. +*/ +typedef struct LodePNGCompressSettings LodePNGCompressSettings; +struct LodePNGCompressSettings /*deflate = compress*/ +{ + /*LZ77 related settings*/ + unsigned btype; /*the block type for LZ (0, 1, 2 or 3, see zlib standard). Should be 2 for proper compression.*/ + unsigned use_lz77; /*whether or not to use LZ77. Should be 1 for proper compression.*/ + unsigned windowsize; /*must be a power of two <= 32768. higher compresses more but is slower. Default value: 2048.*/ + unsigned minmatch; /*mininum lz77 length. 3 is normally best, 6 can be better for some PNGs. Default: 0*/ + unsigned nicematch; /*stop searching if >= this length found. Set to 258 for best compression. Default: 128*/ + unsigned lazymatching; /*use lazy matching: better compression but a bit slower. Default: true*/ + + /*use custom zlib encoder instead of built in one (default: null)*/ + unsigned (*custom_zlib)(unsigned char**, size_t*, + const unsigned char*, size_t, + const LodePNGCompressSettings*); + /*use custom deflate encoder instead of built in one (default: null) + if custom_zlib is used, custom_deflate is ignored since only the built in + zlib function will call custom_deflate*/ + unsigned (*custom_deflate)(unsigned char**, size_t*, + const unsigned char*, size_t, + const LodePNGCompressSettings*); + + const void* custom_context; /*optional custom settings for custom functions*/ +}; + +extern const LodePNGCompressSettings lodepng_default_compress_settings; +void lodepng_compress_settings_init(LodePNGCompressSettings* settings); +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#ifdef LODEPNG_COMPILE_PNG +/* +Color mode of an image. Contains all information required to decode the pixel +bits to RGBA colors. This information is the same as used in the PNG file +format, and is used both for PNG and raw image data in LodePNG. +*/ +typedef struct LodePNGColorMode +{ + /*header (IHDR)*/ + LodePNGColorType colortype; /*color type, see PNG standard or documentation further in this header file*/ + unsigned bitdepth; /*bits per sample, see PNG standard or documentation further in this header file*/ + + /* + palette (PLTE and tRNS) + + Dynamically allocated with the colors of the palette, including alpha. + When encoding a PNG, to store your colors in the palette of the LodePNGColorMode, first use + lodepng_palette_clear, then for each color use lodepng_palette_add. + If you encode an image without alpha with palette, don't forget to put value 255 in each A byte of the palette. + + When decoding, by default you can ignore this palette, since LodePNG already + fills the palette colors in the pixels of the raw RGBA output. + + The palette is only supported for color type 3. + */ + unsigned char* palette; /*palette in RGBARGBA... order. When allocated, must be either 0, or have size 1024*/ + size_t palettesize; /*palette size in number of colors (amount of bytes is 4 * palettesize)*/ + + /* + transparent color key (tRNS) + + This color uses the same bit depth as the bitdepth value in this struct, which can be 1-bit to 16-bit. + For greyscale PNGs, r, g and b will all 3 be set to the same. + + When decoding, by default you can ignore this information, since LodePNG sets + pixels with this key to transparent already in the raw RGBA output. + + The color key is only supported for color types 0 and 2. + */ + unsigned key_defined; /*is a transparent color key given? 0 = false, 1 = true*/ + unsigned key_r; /*red/greyscale component of color key*/ + unsigned key_g; /*green component of color key*/ + unsigned key_b; /*blue component of color key*/ +} LodePNGColorMode; + +/*init, cleanup and copy functions to use with this struct*/ +void lodepng_color_mode_init(LodePNGColorMode* info); +void lodepng_color_mode_cleanup(LodePNGColorMode* info); +/*return value is error code (0 means no error)*/ +unsigned lodepng_color_mode_copy(LodePNGColorMode* dest, const LodePNGColorMode* source); + +void lodepng_palette_clear(LodePNGColorMode* info); +/*add 1 color to the palette*/ +unsigned lodepng_palette_add(LodePNGColorMode* info, + unsigned char r, unsigned char g, unsigned char b, unsigned char a); + +/*get the total amount of bits per pixel, based on colortype and bitdepth in the struct*/ +unsigned lodepng_get_bpp(const LodePNGColorMode* info); +/*get the amount of color channels used, based on colortype in the struct. +If a palette is used, it counts as 1 channel.*/ +unsigned lodepng_get_channels(const LodePNGColorMode* info); +/*is it a greyscale type? (only colortype 0 or 4)*/ +unsigned lodepng_is_greyscale_type(const LodePNGColorMode* info); +/*has it got an alpha channel? (only colortype 2 or 6)*/ +unsigned lodepng_is_alpha_type(const LodePNGColorMode* info); +/*has it got a palette? (only colortype 3)*/ +unsigned lodepng_is_palette_type(const LodePNGColorMode* info); +/*only returns true if there is a palette and there is a value in the palette with alpha < 255. +Loops through the palette to check this.*/ +unsigned lodepng_has_palette_alpha(const LodePNGColorMode* info); +/* +Check if the given color info indicates the possibility of having non-opaque pixels in the PNG image. +Returns true if the image can have translucent or invisible pixels (it still be opaque if it doesn't use such pixels). +Returns false if the image can only have opaque pixels. +In detail, it returns true only if it's a color type with alpha, or has a palette with non-opaque values, +or if "key_defined" is true. +*/ +unsigned lodepng_can_have_alpha(const LodePNGColorMode* info); +/*Returns the byte size of a raw image buffer with given width, height and color mode*/ +size_t lodepng_get_raw_size(unsigned w, unsigned h, const LodePNGColorMode* color); + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS +/*The information of a Time chunk in PNG.*/ +typedef struct LodePNGTime +{ + unsigned year; /*2 bytes used (0-65535)*/ + unsigned month; /*1-12*/ + unsigned day; /*1-31*/ + unsigned hour; /*0-23*/ + unsigned minute; /*0-59*/ + unsigned second; /*0-60 (to allow for leap seconds)*/ +} LodePNGTime; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +/*Information about the PNG image, except pixels, width and height.*/ +typedef struct LodePNGInfo +{ + /*header (IHDR), palette (PLTE) and transparency (tRNS) chunks*/ + unsigned compression_method;/*compression method of the original file. Always 0.*/ + unsigned filter_method; /*filter method of the original file*/ + unsigned interlace_method; /*interlace method of the original file*/ + LodePNGColorMode color; /*color type and bits, palette and transparency of the PNG file*/ + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /* + suggested background color chunk (bKGD) + This color uses the same color mode as the PNG (except alpha channel), which can be 1-bit to 16-bit. + + For greyscale PNGs, r, g and b will all 3 be set to the same. When encoding + the encoder writes the red one. For palette PNGs: When decoding, the RGB value + will be stored, not a palette index. But when encoding, specify the index of + the palette in background_r, the other two are then ignored. + + The decoder does not use this background color to edit the color of pixels. + */ + unsigned background_defined; /*is a suggested background color given?*/ + unsigned background_r; /*red component of suggested background color*/ + unsigned background_g; /*green component of suggested background color*/ + unsigned background_b; /*blue component of suggested background color*/ + + /* + non-international text chunks (tEXt and zTXt) + + The char** arrays each contain num strings. The actual messages are in + text_strings, while text_keys are keywords that give a short description what + the actual text represents, e.g. Title, Author, Description, or anything else. + + A keyword is minimum 1 character and maximum 79 characters long. It's + discouraged to use a single line length longer than 79 characters for texts. + + Don't allocate these text buffers yourself. Use the init/cleanup functions + correctly and use lodepng_add_text and lodepng_clear_text. + */ + size_t text_num; /*the amount of texts in these char** buffers (there may be more texts in itext)*/ + char** text_keys; /*the keyword of a text chunk (e.g. "Comment")*/ + char** text_strings; /*the actual text*/ + + /* + international text chunks (iTXt) + Similar to the non-international text chunks, but with additional strings + "langtags" and "transkeys". + */ + size_t itext_num; /*the amount of international texts in this PNG*/ + char** itext_keys; /*the English keyword of the text chunk (e.g. "Comment")*/ + char** itext_langtags; /*language tag for this text's language, ISO/IEC 646 string, e.g. ISO 639 language tag*/ + char** itext_transkeys; /*keyword translated to the international language - UTF-8 string*/ + char** itext_strings; /*the actual international text - UTF-8 string*/ + + /*time chunk (tIME)*/ + unsigned time_defined; /*set to 1 to make the encoder generate a tIME chunk*/ + LodePNGTime time; + + /*phys chunk (pHYs)*/ + unsigned phys_defined; /*if 0, there is no pHYs chunk and the values below are undefined, if 1 else there is one*/ + unsigned phys_x; /*pixels per unit in x direction*/ + unsigned phys_y; /*pixels per unit in y direction*/ + unsigned phys_unit; /*may be 0 (unknown unit) or 1 (metre)*/ + + /* + unknown chunks + There are 3 buffers, one for each position in the PNG where unknown chunks can appear + each buffer contains all unknown chunks for that position consecutively + The 3 buffers are the unknown chunks between certain critical chunks: + 0: IHDR-PLTE, 1: PLTE-IDAT, 2: IDAT-IEND + Do not allocate or traverse this data yourself. Use the chunk traversing functions declared + later, such as lodepng_chunk_next and lodepng_chunk_append, to read/write this struct. + */ + unsigned char* unknown_chunks_data[3]; + size_t unknown_chunks_size[3]; /*size in bytes of the unknown chunks, given for protection*/ +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} LodePNGInfo; + +/*init, cleanup and copy functions to use with this struct*/ +void lodepng_info_init(LodePNGInfo* info); +void lodepng_info_cleanup(LodePNGInfo* info); +/*return value is error code (0 means no error)*/ +unsigned lodepng_info_copy(LodePNGInfo* dest, const LodePNGInfo* source); + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS +void lodepng_clear_text(LodePNGInfo* info); /*use this to clear the texts again after you filled them in*/ +unsigned lodepng_add_text(LodePNGInfo* info, const char* key, const char* str); /*push back both texts at once*/ + +void lodepng_clear_itext(LodePNGInfo* info); /*use this to clear the itexts again after you filled them in*/ +unsigned lodepng_add_itext(LodePNGInfo* info, const char* key, const char* langtag, + const char* transkey, const char* str); /*push back the 4 texts of 1 chunk at once*/ +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ + +/* +Converts raw buffer from one color type to another color type, based on +LodePNGColorMode structs to describe the input and output color type. +See the reference manual at the end of this header file to see which color conversions are supported. +return value = LodePNG error code (0 if all went ok, an error if the conversion isn't supported) +The out buffer must have size (w * h * bpp + 7) / 8, where bpp is the bits per pixel +of the output color type (lodepng_get_bpp). +For < 8 bpp images, there should not be padding bits at the end of scanlines. +For 16-bit per channel colors, uses big endian format like PNG does. +Return value is LodePNG error code +*/ +unsigned lodepng_convert(unsigned char* out, const unsigned char* in, + LodePNGColorMode* mode_out, const LodePNGColorMode* mode_in, + unsigned w, unsigned h); + +#ifdef LODEPNG_COMPILE_DECODER +/* +Settings for the decoder. This contains settings for the PNG and the Zlib +decoder, but not the Info settings from the Info structs. +*/ +typedef struct LodePNGDecoderSettings +{ + LodePNGDecompressSettings zlibsettings; /*in here is the setting to ignore Adler32 checksums*/ + + unsigned ignore_crc; /*ignore CRC checksums*/ + + unsigned color_convert; /*whether to convert the PNG to the color type you want. Default: yes*/ + +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + unsigned read_text_chunks; /*if false but remember_unknown_chunks is true, they're stored in the unknown chunks*/ + /*store all bytes from unknown chunks in the LodePNGInfo (off by default, useful for a png editor)*/ + unsigned remember_unknown_chunks; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} LodePNGDecoderSettings; + +void lodepng_decoder_settings_init(LodePNGDecoderSettings* settings); +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER +/*automatically use color type with less bits per pixel if losslessly possible. Default: AUTO*/ +typedef enum LodePNGFilterStrategy +{ + /*every filter at zero*/ + LFS_ZERO, + /*Use filter that gives minumum sum, as described in the official PNG filter heuristic.*/ + LFS_MINSUM, + /*Use the filter type that gives smallest Shannon entropy for this scanline. Depending + on the image, this is better or worse than minsum.*/ + LFS_ENTROPY, + /* + Brute-force-search PNG filters by compressing each filter for each scanline. + Experimental, very slow, and only rarely gives better compression than MINSUM. + */ + LFS_BRUTE_FORCE, + /*use predefined_filters buffer: you specify the filter type for each scanline*/ + LFS_PREDEFINED +} LodePNGFilterStrategy; + +/*Gives characteristics about the colors of the image, which helps decide which color model to use for encoding. +Used internally by default if "auto_convert" is enabled. Public because it's useful for custom algorithms.*/ +typedef struct LodePNGColorProfile +{ + unsigned colored; /*not greyscale*/ + unsigned key; /*if true, image is not opaque. Only if true and alpha is false, color key is possible.*/ + unsigned short key_r; /*these values are always in 16-bit bitdepth in the profile*/ + unsigned short key_g; + unsigned short key_b; + unsigned alpha; /*alpha channel or alpha palette required*/ + unsigned numcolors; /*amount of colors, up to 257. Not valid if bits == 16.*/ + unsigned char palette[1024]; /*Remembers up to the first 256 RGBA colors, in no particular order*/ + unsigned bits; /*bits per channel (not for palette). 1,2 or 4 for greyscale only. 16 if 16-bit per channel required.*/ +} LodePNGColorProfile; + +void lodepng_color_profile_init(LodePNGColorProfile* profile); + +/*Get a LodePNGColorProfile of the image.*/ +unsigned get_color_profile(LodePNGColorProfile* profile, + const unsigned char* image, unsigned w, unsigned h, + const LodePNGColorMode* mode_in); +/*The function LodePNG uses internally to decide the PNG color with auto_convert. +Chooses an optimal color model, e.g. grey if only grey pixels, palette if < 256 colors, ...*/ +unsigned lodepng_auto_choose_color(LodePNGColorMode* mode_out, + const unsigned char* image, unsigned w, unsigned h, + const LodePNGColorMode* mode_in); + +/*Settings for the encoder.*/ +typedef struct LodePNGEncoderSettings +{ + LodePNGCompressSettings zlibsettings; /*settings for the zlib encoder, such as window size, ...*/ + + unsigned auto_convert; /*automatically choose output PNG color type. Default: true*/ + + /*If true, follows the official PNG heuristic: if the PNG uses a palette or lower than + 8 bit depth, set all filters to zero. Otherwise use the filter_strategy. Note that to + completely follow the official PNG heuristic, filter_palette_zero must be true and + filter_strategy must be LFS_MINSUM*/ + unsigned filter_palette_zero; + /*Which filter strategy to use when not using zeroes due to filter_palette_zero. + Set filter_palette_zero to 0 to ensure always using your chosen strategy. Default: LFS_MINSUM*/ + LodePNGFilterStrategy filter_strategy; + /*used if filter_strategy is LFS_PREDEFINED. In that case, this must point to a buffer with + the same length as the amount of scanlines in the image, and each value must <= 5. You + have to cleanup this buffer, LodePNG will never free it. Don't forget that filter_palette_zero + must be set to 0 to ensure this is also used on palette or low bitdepth images.*/ + const unsigned char* predefined_filters; + + /*force creating a PLTE chunk if colortype is 2 or 6 (= a suggested palette). + If colortype is 3, PLTE is _always_ created.*/ + unsigned force_palette; +#ifdef LODEPNG_COMPILE_ANCILLARY_CHUNKS + /*add LodePNG identifier and version as a text chunk, for debugging*/ + unsigned add_id; + /*encode text chunks as zTXt chunks instead of tEXt chunks, and use compression in iTXt chunks*/ + unsigned text_compression; +#endif /*LODEPNG_COMPILE_ANCILLARY_CHUNKS*/ +} LodePNGEncoderSettings; + +void lodepng_encoder_settings_init(LodePNGEncoderSettings* settings); +#endif /*LODEPNG_COMPILE_ENCODER*/ + + +#if defined(LODEPNG_COMPILE_DECODER) || defined(LODEPNG_COMPILE_ENCODER) +/*The settings, state and information for extended encoding and decoding.*/ +typedef struct LodePNGState +{ +#ifdef LODEPNG_COMPILE_DECODER + LodePNGDecoderSettings decoder; /*the decoding settings*/ +#endif /*LODEPNG_COMPILE_DECODER*/ +#ifdef LODEPNG_COMPILE_ENCODER + LodePNGEncoderSettings encoder; /*the encoding settings*/ +#endif /*LODEPNG_COMPILE_ENCODER*/ + LodePNGColorMode info_raw; /*specifies the format in which you would like to get the raw pixel buffer*/ + LodePNGInfo info_png; /*info of the PNG image obtained after decoding*/ + unsigned error; +#ifdef LODEPNG_COMPILE_CPP + //For the lodepng::State subclass. + virtual ~LodePNGState(){} +#endif +} LodePNGState; + +/*init, cleanup and copy functions to use with this struct*/ +void lodepng_state_init(LodePNGState* state); +void lodepng_state_cleanup(LodePNGState* state); +void lodepng_state_copy(LodePNGState* dest, const LodePNGState* source); +#endif /* defined(LODEPNG_COMPILE_DECODER) || defined(LODEPNG_COMPILE_ENCODER) */ + +#ifdef LODEPNG_COMPILE_DECODER +/* +Same as lodepng_decode_memory, but uses a LodePNGState to allow custom settings and +getting much more information about the PNG image and color mode. +*/ +unsigned lodepng_decode(unsigned char** out, unsigned* w, unsigned* h, + LodePNGState* state, + const unsigned char* in, size_t insize); + +/* +Read the PNG header, but not the actual data. This returns only the information +that is in the header chunk of the PNG, such as width, height and color type. The +information is placed in the info_png field of the LodePNGState. +*/ +unsigned lodepng_inspect(unsigned* w, unsigned* h, + LodePNGState* state, + const unsigned char* in, size_t insize); +#endif /*LODEPNG_COMPILE_DECODER*/ + + +#ifdef LODEPNG_COMPILE_ENCODER +/*This function allocates the out buffer with standard malloc and stores the size in *outsize.*/ +unsigned lodepng_encode(unsigned char** out, size_t* outsize, + const unsigned char* image, unsigned w, unsigned h, + LodePNGState* state); +#endif /*LODEPNG_COMPILE_ENCODER*/ + +/* +The lodepng_chunk functions are normally not needed, except to traverse the +unknown chunks stored in the LodePNGInfo struct, or add new ones to it. +It also allows traversing the chunks of an encoded PNG file yourself. + +PNG standard chunk naming conventions: +First byte: uppercase = critical, lowercase = ancillary +Second byte: uppercase = public, lowercase = private +Third byte: must be uppercase +Fourth byte: uppercase = unsafe to copy, lowercase = safe to copy +*/ + +/*get the length of the data of the chunk. Total chunk length has 12 bytes more.*/ +unsigned lodepng_chunk_length(const unsigned char* chunk); + +/*puts the 4-byte type in null terminated string*/ +void lodepng_chunk_type(char type[5], const unsigned char* chunk); + +/*check if the type is the given type*/ +unsigned char lodepng_chunk_type_equals(const unsigned char* chunk, const char* type); + +/*0: it's one of the critical chunk types, 1: it's an ancillary chunk (see PNG standard)*/ +unsigned char lodepng_chunk_ancillary(const unsigned char* chunk); + +/*0: public, 1: private (see PNG standard)*/ +unsigned char lodepng_chunk_private(const unsigned char* chunk); + +/*0: the chunk is unsafe to copy, 1: the chunk is safe to copy (see PNG standard)*/ +unsigned char lodepng_chunk_safetocopy(const unsigned char* chunk); + +/*get pointer to the data of the chunk, where the input points to the header of the chunk*/ +unsigned char* lodepng_chunk_data(unsigned char* chunk); +const unsigned char* lodepng_chunk_data_const(const unsigned char* chunk); + +/*returns 0 if the crc is correct, 1 if it's incorrect (0 for OK as usual!)*/ +unsigned lodepng_chunk_check_crc(const unsigned char* chunk); + +/*generates the correct CRC from the data and puts it in the last 4 bytes of the chunk*/ +void lodepng_chunk_generate_crc(unsigned char* chunk); + +/*iterate to next chunks. don't use on IEND chunk, as there is no next chunk then*/ +unsigned char* lodepng_chunk_next(unsigned char* chunk); +const unsigned char* lodepng_chunk_next_const(const unsigned char* chunk); + +/* +Appends chunk to the data in out. The given chunk should already have its chunk header. +The out variable and outlength are updated to reflect the new reallocated buffer. +Returns error code (0 if it went ok) +*/ +unsigned lodepng_chunk_append(unsigned char** out, size_t* outlength, const unsigned char* chunk); + +/* +Appends new chunk to out. The chunk to append is given by giving its length, type +and data separately. The type is a 4-letter string. +The out variable and outlength are updated to reflect the new reallocated buffer. +Returne error code (0 if it went ok) +*/ +unsigned lodepng_chunk_create(unsigned char** out, size_t* outlength, unsigned length, + const char* type, const unsigned char* data); + + +/*Calculate CRC32 of buffer*/ +unsigned lodepng_crc32(const unsigned char* buf, size_t len); +#endif /*LODEPNG_COMPILE_PNG*/ + + +#ifdef LODEPNG_COMPILE_ZLIB +/* +This zlib part can be used independently to zlib compress and decompress a +buffer. It cannot be used to create gzip files however, and it only supports the +part of zlib that is required for PNG, it does not support dictionaries. +*/ + +#ifdef LODEPNG_COMPILE_DECODER +/*Inflate a buffer. Inflate is the decompression step of deflate. Out buffer must be freed after use.*/ +unsigned lodepng_inflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGDecompressSettings* settings); + +/* +Decompresses Zlib data. Reallocates the out buffer and appends the data. The +data must be according to the zlib specification. +Either, *out must be NULL and *outsize must be 0, or, *out must be a valid +buffer and *outsize its size in bytes. out must be freed by user after usage. +*/ +unsigned lodepng_zlib_decompress(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGDecompressSettings* settings); +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER +/* +Compresses data with Zlib. Reallocates the out buffer and appends the data. +Zlib adds a small header and trailer around the deflate data. +The data is output in the format of the zlib specification. +Either, *out must be NULL and *outsize must be 0, or, *out must be a valid +buffer and *outsize its size in bytes. out must be freed by user after usage. +*/ +unsigned lodepng_zlib_compress(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGCompressSettings* settings); + +/* +Find length-limited Huffman code for given frequencies. This function is in the +public interface only for tests, it's used internally by lodepng_deflate. +*/ +unsigned lodepng_huffman_code_lengths(unsigned* lengths, const unsigned* frequencies, + size_t numcodes, unsigned maxbitlen); + +/*Compress a buffer with deflate. See RFC 1951. Out buffer must be freed after use.*/ +unsigned lodepng_deflate(unsigned char** out, size_t* outsize, + const unsigned char* in, size_t insize, + const LodePNGCompressSettings* settings); + +#endif /*LODEPNG_COMPILE_ENCODER*/ +#endif /*LODEPNG_COMPILE_ZLIB*/ + +#ifdef LODEPNG_COMPILE_DISK +/* +Load a file from disk into buffer. The function allocates the out buffer, and +after usage you should free it. +out: output parameter, contains pointer to loaded buffer. +outsize: output parameter, size of the allocated out buffer +filename: the path to the file to load +return value: error code (0 means ok) +*/ +unsigned lodepng_load_file(unsigned char** out, size_t* outsize, const char* filename); + +/* +Save a file from buffer to disk. Warning, if it exists, this function overwrites +the file without warning! +buffer: the buffer to write +buffersize: size of the buffer to write +filename: the path to the file to save to +return value: error code (0 means ok) +*/ +unsigned lodepng_save_file(const unsigned char* buffer, size_t buffersize, const char* filename); +#endif /*LODEPNG_COMPILE_DISK*/ + +#ifdef LODEPNG_COMPILE_CPP +//The LodePNG C++ wrapper uses std::vectors instead of manually allocated memory buffers. +namespace lodepng +{ +#ifdef LODEPNG_COMPILE_PNG +class State : public LodePNGState +{ + public: + State(); + State(const State& other); + virtual ~State(); + State& operator=(const State& other); +}; + +#ifdef LODEPNG_COMPILE_DECODER +//Same as other lodepng::decode, but using a State for more settings and information. +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + State& state, + const unsigned char* in, size_t insize); +unsigned decode(std::vector& out, unsigned& w, unsigned& h, + State& state, + const std::vector& in); +#endif /*LODEPNG_COMPILE_DECODER*/ + +#ifdef LODEPNG_COMPILE_ENCODER +//Same as other lodepng::encode, but using a State for more settings and information. +unsigned encode(std::vector& out, + const unsigned char* in, unsigned w, unsigned h, + State& state); +unsigned encode(std::vector& out, + const std::vector& in, unsigned w, unsigned h, + State& state); +#endif /*LODEPNG_COMPILE_ENCODER*/ + +#ifdef LODEPNG_COMPILE_DISK +/* +Load a file from disk into an std::vector. If the vector is empty, then either +the file doesn't exist or is an empty file. +*/ +void load_file(std::vector& buffer, const std::string& filename); + +/* +Save the binary data in an std::vector to a file on disk. The file is overwritten +without warning. +*/ +void save_file(const std::vector& buffer, const std::string& filename); +#endif //LODEPNG_COMPILE_DISK +#endif //LODEPNG_COMPILE_PNG + +#ifdef LODEPNG_COMPILE_ZLIB +#ifdef LODEPNG_COMPILE_DECODER +//Zlib-decompress an unsigned char buffer +unsigned decompress(std::vector& out, const unsigned char* in, size_t insize, + const LodePNGDecompressSettings& settings = lodepng_default_decompress_settings); + +//Zlib-decompress an std::vector +unsigned decompress(std::vector& out, const std::vector& in, + const LodePNGDecompressSettings& settings = lodepng_default_decompress_settings); +#endif //LODEPNG_COMPILE_DECODER + +#ifdef LODEPNG_COMPILE_ENCODER +//Zlib-compress an unsigned char buffer +unsigned compress(std::vector& out, const unsigned char* in, size_t insize, + const LodePNGCompressSettings& settings = lodepng_default_compress_settings); + +//Zlib-compress an std::vector +unsigned compress(std::vector& out, const std::vector& in, + const LodePNGCompressSettings& settings = lodepng_default_compress_settings); +#endif //LODEPNG_COMPILE_ENCODER +#endif //LODEPNG_COMPILE_ZLIB +} //namespace lodepng +#endif /*LODEPNG_COMPILE_CPP*/ + +/* +TODO: +[.] test if there are no memory leaks or security exploits - done a lot but needs to be checked often +[.] check compatibility with vareous compilers - done but needs to be redone for every newer version +[X] converting color to 16-bit per channel types +[ ] read all public PNG chunk types (but never let the color profile and gamma ones touch RGB values) +[ ] make sure encoder generates no chunks with size > (2^31)-1 +[ ] partial decoding (stream processing) +[X] let the "isFullyOpaque" function check color keys and transparent palettes too +[X] better name for the variables "codes", "codesD", "codelengthcodes", "clcl" and "lldl" +[ ] don't stop decoding on errors like 69, 57, 58 (make warnings) +[ ] make option to choose if the raw image with non multiple of 8 bits per scanline should have padding bits or not +[ ] let the C++ wrapper catch exceptions coming from the standard library and return LodePNG error codes +*/ + +#endif /*LODEPNG_H inclusion guard*/ + +/* +LodePNG Documentation +--------------------- + +0. table of contents +-------------------- + + 1. about + 1.1. supported features + 1.2. features not supported + 2. C and C++ version + 3. security + 4. decoding + 5. encoding + 6. color conversions + 6.1. PNG color types + 6.2. color conversions + 6.3. padding bits + 6.4. A note about 16-bits per channel and endianness + 7. error values + 8. chunks and PNG editing + 9. compiler support + 10. examples + 10.1. decoder C++ example + 10.2. decoder C example + 11. changes + 12. contact information + + +1. about +-------- + +PNG is a file format to store raster images losslessly with good compression, +supporting different color types and alpha channel. + +LodePNG is a PNG codec according to the Portable Network Graphics (PNG) +Specification (Second Edition) - W3C Recommendation 10 November 2003. + +The specifications used are: + +*) Portable Network Graphics (PNG) Specification (Second Edition): + http://www.w3.org/TR/2003/REC-PNG-20031110 +*) RFC 1950 ZLIB Compressed Data Format version 3.3: + http://www.gzip.org/zlib/rfc-zlib.html +*) RFC 1951 DEFLATE Compressed Data Format Specification ver 1.3: + http://www.gzip.org/zlib/rfc-deflate.html + +The most recent version of LodePNG can currently be found at +http://lodev.org/lodepng/ + +LodePNG works both in C (ISO C90) and C++, with a C++ wrapper that adds +extra functionality. + +LodePNG exists out of two files: +-lodepng.h: the header file for both C and C++ +-lodepng.c(pp): give it the name lodepng.c or lodepng.cpp (or .cc) depending on your usage + +If you want to start using LodePNG right away without reading this doc, get the +examples from the LodePNG website to see how to use it in code, or check the +smaller examples in chapter 13 here. + +LodePNG is simple but only supports the basic requirements. To achieve +simplicity, the following design choices were made: There are no dependencies +on any external library. There are functions to decode and encode a PNG with +a single function call, and extended versions of these functions taking a +LodePNGState struct allowing to specify or get more information. By default +the colors of the raw image are always RGB or RGBA, no matter what color type +the PNG file uses. To read and write files, there are simple functions to +convert the files to/from buffers in memory. + +This all makes LodePNG suitable for loading textures in games, demos and small +programs, ... It's less suitable for full fledged image editors, loading PNGs +over network (it requires all the image data to be available before decoding can +begin), life-critical systems, ... + +1.1. supported features +----------------------- + +The following features are supported by the decoder: + +*) decoding of PNGs with any color type, bit depth and interlace mode, to a 24- or 32-bit color raw image, + or the same color type as the PNG +*) encoding of PNGs, from any raw image to 24- or 32-bit color, or the same color type as the raw image +*) Adam7 interlace and deinterlace for any color type +*) loading the image from harddisk or decoding it from a buffer from other sources than harddisk +*) support for alpha channels, including RGBA color model, translucent palettes and color keying +*) zlib decompression (inflate) +*) zlib compression (deflate) +*) CRC32 and ADLER32 checksums +*) handling of unknown chunks, allowing making a PNG editor that stores custom and unknown chunks. +*) the following chunks are supported (generated/interpreted) by both encoder and decoder: + IHDR: header information + PLTE: color palette + IDAT: pixel data + IEND: the final chunk + tRNS: transparency for palettized images + tEXt: textual information + zTXt: compressed textual information + iTXt: international textual information + bKGD: suggested background color + pHYs: physical dimensions + tIME: modification time + +1.2. features not supported +--------------------------- + +The following features are _not_ supported: + +*) some features needed to make a conformant PNG-Editor might be still missing. +*) partial loading/stream processing. All data must be available and is processed in one call. +*) The following public chunks are not supported but treated as unknown chunks by LodePNG + cHRM, gAMA, iCCP, sRGB, sBIT, hIST, sPLT + Some of these are not supported on purpose: LodePNG wants to provide the RGB values + stored in the pixels, not values modified by system dependent gamma or color models. + + +2. C and C++ version +-------------------- + +The C version uses buffers allocated with alloc that you need to free() +yourself. You need to use init and cleanup functions for each struct whenever +using a struct from the C version to avoid exploits and memory leaks. + +The C++ version has extra functions with std::vectors in the interface and the +lodepng::State class which is a LodePNGState with constructor and destructor. + +These files work without modification for both C and C++ compilers because all +the additional C++ code is in "#ifdef __cplusplus" blocks that make C-compilers +ignore it, and the C code is made to compile both with strict ISO C90 and C++. + +To use the C++ version, you need to rename the source file to lodepng.cpp +(instead of lodepng.c), and compile it with a C++ compiler. + +To use the C version, you need to rename the source file to lodepng.c (instead +of lodepng.cpp), and compile it with a C compiler. + + +3. Security +----------- + +Even if carefully designed, it's always possible that LodePNG contains possible +exploits. If you discover one, please let me know, and it will be fixed. + +When using LodePNG, care has to be taken with the C version of LodePNG, as well +as the C-style structs when working with C++. The following conventions are used +for all C-style structs: + +-if a struct has a corresponding init function, always call the init function when making a new one +-if a struct has a corresponding cleanup function, call it before the struct disappears to avoid memory leaks +-if a struct has a corresponding copy function, use the copy function instead of "=". + The destination must also be inited already. + + +4. Decoding +----------- + +Decoding converts a PNG compressed image to a raw pixel buffer. + +Most documentation on using the decoder is at its declarations in the header +above. For C, simple decoding can be done with functions such as +lodepng_decode32, and more advanced decoding can be done with the struct +LodePNGState and lodepng_decode. For C++, all decoding can be done with the +various lodepng::decode functions, and lodepng::State can be used for advanced +features. + +When using the LodePNGState, it uses the following fields for decoding: +*) LodePNGInfo info_png: it stores extra information about the PNG (the input) in here +*) LodePNGColorMode info_raw: here you can say what color mode of the raw image (the output) you want to get +*) LodePNGDecoderSettings decoder: you can specify a few extra settings for the decoder to use + +LodePNGInfo info_png +-------------------- + +After decoding, this contains extra information of the PNG image, except the actual +pixels, width and height because these are already gotten directly from the decoder +functions. + +It contains for example the original color type of the PNG image, text comments, +suggested background color, etc... More details about the LodePNGInfo struct are +at its declaration documentation. + +LodePNGColorMode info_raw +------------------------- + +When decoding, here you can specify which color type you want +the resulting raw image to be. If this is different from the colortype of the +PNG, then the decoder will automatically convert the result. This conversion +always works, except if you want it to convert a color PNG to greyscale or to +a palette with missing colors. + +By default, 32-bit color is used for the result. + +LodePNGDecoderSettings decoder +------------------------------ + +The settings can be used to ignore the errors created by invalid CRC and Adler32 +chunks, and to disable the decoding of tEXt chunks. + +There's also a setting color_convert, true by default. If false, no conversion +is done, the resulting data will be as it was in the PNG (after decompression) +and you'll have to puzzle the colors of the pixels together yourself using the +color type information in the LodePNGInfo. + + +5. Encoding +----------- + +Encoding converts a raw pixel buffer to a PNG compressed image. + +Most documentation on using the encoder is at its declarations in the header +above. For C, simple encoding can be done with functions such as +lodepng_encode32, and more advanced decoding can be done with the struct +LodePNGState and lodepng_encode. For C++, all encoding can be done with the +various lodepng::encode functions, and lodepng::State can be used for advanced +features. + +Like the decoder, the encoder can also give errors. However it gives less errors +since the encoder input is trusted, the decoder input (a PNG image that could +be forged by anyone) is not trusted. + +When using the LodePNGState, it uses the following fields for encoding: +*) LodePNGInfo info_png: here you specify how you want the PNG (the output) to be. +*) LodePNGColorMode info_raw: here you say what color type of the raw image (the input) has +*) LodePNGEncoderSettings encoder: you can specify a few settings for the encoder to use + +LodePNGInfo info_png +-------------------- + +When encoding, you use this the opposite way as when decoding: for encoding, +you fill in the values you want the PNG to have before encoding. By default it's +not needed to specify a color type for the PNG since it's automatically chosen, +but it's possible to choose it yourself given the right settings. + +The encoder will not always exactly match the LodePNGInfo struct you give, +it tries as close as possible. Some things are ignored by the encoder. The +encoder uses, for example, the following settings from it when applicable: +colortype and bitdepth, text chunks, time chunk, the color key, the palette, the +background color, the interlace method, unknown chunks, ... + +When encoding to a PNG with colortype 3, the encoder will generate a PLTE chunk. +If the palette contains any colors for which the alpha channel is not 255 (so +there are translucent colors in the palette), it'll add a tRNS chunk. + +LodePNGColorMode info_raw +------------------------- + +You specify the color type of the raw image that you give to the input here, +including a possible transparent color key and palette you happen to be using in +your raw image data. + +By default, 32-bit color is assumed, meaning your input has to be in RGBA +format with 4 bytes (unsigned chars) per pixel. + +LodePNGEncoderSettings encoder +------------------------------ + +The following settings are supported (some are in sub-structs): +*) auto_convert: when this option is enabled, the encoder will +automatically choose the smallest possible color mode (including color key) that +can encode the colors of all pixels without information loss. +*) btype: the block type for LZ77. 0 = uncompressed, 1 = fixed huffman tree, + 2 = dynamic huffman tree (best compression). Should be 2 for proper + compression. +*) use_lz77: whether or not to use LZ77 for compressed block types. Should be + true for proper compression. +*) windowsize: the window size used by the LZ77 encoder (1 - 32768). Has value + 2048 by default, but can be set to 32768 for better, but slow, compression. +*) force_palette: if colortype is 2 or 6, you can make the encoder write a PLTE + chunk if force_palette is true. This can used as suggested palette to convert + to by viewers that don't support more than 256 colors (if those still exist) +*) add_id: add text chunk "Encoder: LodePNG " to the image. +*) text_compression: default 1. If 1, it'll store texts as zTXt instead of tEXt chunks. + zTXt chunks use zlib compression on the text. This gives a smaller result on + large texts but a larger result on small texts (such as a single program name). + It's all tEXt or all zTXt though, there's no separate setting per text yet. + + +6. color conversions +-------------------- + +An important thing to note about LodePNG, is that the color type of the PNG, and +the color type of the raw image, are completely independent. By default, when +you decode a PNG, you get the result as a raw image in the color type you want, +no matter whether the PNG was encoded with a palette, greyscale or RGBA color. +And if you encode an image, by default LodePNG will automatically choose the PNG +color type that gives good compression based on the values of colors and amount +of colors in the image. It can be configured to let you control it instead as +well, though. + +To be able to do this, LodePNG does conversions from one color mode to another. +It can convert from almost any color type to any other color type, except the +following conversions: RGB to greyscale is not supported, and converting to a +palette when the palette doesn't have a required color is not supported. This is +not supported on purpose: this is information loss which requires a color +reduction algorithm that is beyong the scope of a PNG encoder (yes, RGB to grey +is easy, but there are multiple ways if you want to give some channels more +weight). + +By default, when decoding, you get the raw image in 32-bit RGBA or 24-bit RGB +color, no matter what color type the PNG has. And by default when encoding, +LodePNG automatically picks the best color model for the output PNG, and expects +the input image to be 32-bit RGBA or 24-bit RGB. So, unless you want to control +the color format of the images yourself, you can skip this chapter. + +6.1. PNG color types +-------------------- + +A PNG image can have many color types, ranging from 1-bit color to 64-bit color, +as well as palettized color modes. After the zlib decompression and unfiltering +in the PNG image is done, the raw pixel data will have that color type and thus +a certain amount of bits per pixel. If you want the output raw image after +decoding to have another color type, a conversion is done by LodePNG. + +The PNG specification gives the following color types: + +0: greyscale, bit depths 1, 2, 4, 8, 16 +2: RGB, bit depths 8 and 16 +3: palette, bit depths 1, 2, 4 and 8 +4: greyscale with alpha, bit depths 8 and 16 +6: RGBA, bit depths 8 and 16 + +Bit depth is the amount of bits per pixel per color channel. So the total amount +of bits per pixel is: amount of channels * bitdepth. + +6.2. color conversions +---------------------- + +As explained in the sections about the encoder and decoder, you can specify +color types and bit depths in info_png and info_raw to change the default +behaviour. + +If, when decoding, you want the raw image to be something else than the default, +you need to set the color type and bit depth you want in the LodePNGColorMode, +or the parameters colortype and bitdepth of the simple decoding function. + +If, when encoding, you use another color type than the default in the raw input +image, you need to specify its color type and bit depth in the LodePNGColorMode +of the raw image, or use the parameters colortype and bitdepth of the simple +encoding function. + +If, when encoding, you don't want LodePNG to choose the output PNG color type +but control it yourself, you need to set auto_convert in the encoder settings +to false, and specify the color type you want in the LodePNGInfo of the +encoder (including palette: it can generate a palette if auto_convert is true, +otherwise not). + +If the input and output color type differ (whether user chosen or auto chosen), +LodePNG will do a color conversion, which follows the rules below, and may +sometimes result in an error. + +To avoid some confusion: +-the decoder converts from PNG to raw image +-the encoder converts from raw image to PNG +-the colortype and bitdepth in LodePNGColorMode info_raw, are those of the raw image +-the colortype and bitdepth in the color field of LodePNGInfo info_png, are those of the PNG +-when encoding, the color type in LodePNGInfo is ignored if auto_convert + is enabled, it is automatically generated instead +-when decoding, the color type in LodePNGInfo is set by the decoder to that of the original + PNG image, but it can be ignored since the raw image has the color type you requested instead +-if the color type of the LodePNGColorMode and PNG image aren't the same, a conversion + between the color types is done if the color types are supported. If it is not + supported, an error is returned. If the types are the same, no conversion is done. +-even though some conversions aren't supported, LodePNG supports loading PNGs from any + colortype and saving PNGs to any colortype, sometimes it just requires preparing + the raw image correctly before encoding. +-both encoder and decoder use the same color converter. + +Non supported color conversions: +-color to greyscale: no error is thrown, but the result will look ugly because +only the red channel is taken +-anything to palette when that palette does not have that color in it: in this +case an error is thrown + +Supported color conversions: +-anything to 8-bit RGB, 8-bit RGBA, 16-bit RGB, 16-bit RGBA +-any grey or grey+alpha, to grey or grey+alpha +-anything to a palette, as long as the palette has the requested colors in it +-removing alpha channel +-higher to smaller bitdepth, and vice versa + +If you want no color conversion to be done (e.g. for speed or control): +-In the encoder, you can make it save a PNG with any color type by giving the +raw color mode and LodePNGInfo the same color mode, and setting auto_convert to +false. +-In the decoder, you can make it store the pixel data in the same color type +as the PNG has, by setting the color_convert setting to false. Settings in +info_raw are then ignored. + +The function lodepng_convert does the color conversion. It is available in the +interface but normally isn't needed since the encoder and decoder already call +it. + +6.3. padding bits +----------------- + +In the PNG file format, if a less than 8-bit per pixel color type is used and the scanlines +have a bit amount that isn't a multiple of 8, then padding bits are used so that each +scanline starts at a fresh byte. But that is NOT true for the LodePNG raw input and output. +The raw input image you give to the encoder, and the raw output image you get from the decoder +will NOT have these padding bits, e.g. in the case of a 1-bit image with a width +of 7 pixels, the first pixel of the second scanline will the the 8th bit of the first byte, +not the first bit of a new byte. + +6.4. A note about 16-bits per channel and endianness +---------------------------------------------------- + +LodePNG uses unsigned char arrays for 16-bit per channel colors too, just like +for any other color format. The 16-bit values are stored in big endian (most +significant byte first) in these arrays. This is the opposite order of the +little endian used by x86 CPU's. + +LodePNG always uses big endian because the PNG file format does so internally. +Conversions to other formats than PNG uses internally are not supported by +LodePNG on purpose, there are myriads of formats, including endianness of 16-bit +colors, the order in which you store R, G, B and A, and so on. Supporting and +converting to/from all that is outside the scope of LodePNG. + +This may mean that, depending on your use case, you may want to convert the big +endian output of LodePNG to little endian with a for loop. This is certainly not +always needed, many applications and libraries support big endian 16-bit colors +anyway, but it means you cannot simply cast the unsigned char* buffer to an +unsigned short* buffer on x86 CPUs. + + +7. error values +--------------- + +All functions in LodePNG that return an error code, return 0 if everything went +OK, or a non-zero code if there was an error. + +The meaning of the LodePNG error values can be retrieved with the function +lodepng_error_text: given the numerical error code, it returns a description +of the error in English as a string. + +Check the implementation of lodepng_error_text to see the meaning of each code. + + +8. chunks and PNG editing +------------------------- + +If you want to add extra chunks to a PNG you encode, or use LodePNG for a PNG +editor that should follow the rules about handling of unknown chunks, or if your +program is able to read other types of chunks than the ones handled by LodePNG, +then that's possible with the chunk functions of LodePNG. + +A PNG chunk has the following layout: + +4 bytes length +4 bytes type name +length bytes data +4 bytes CRC + +8.1. iterating through chunks +----------------------------- + +If you have a buffer containing the PNG image data, then the first chunk (the +IHDR chunk) starts at byte number 8 of that buffer. The first 8 bytes are the +signature of the PNG and are not part of a chunk. But if you start at byte 8 +then you have a chunk, and can check the following things of it. + +NOTE: none of these functions check for memory buffer boundaries. To avoid +exploits, always make sure the buffer contains all the data of the chunks. +When using lodepng_chunk_next, make sure the returned value is within the +allocated memory. + +unsigned lodepng_chunk_length(const unsigned char* chunk): + +Get the length of the chunk's data. The total chunk length is this length + 12. + +void lodepng_chunk_type(char type[5], const unsigned char* chunk): +unsigned char lodepng_chunk_type_equals(const unsigned char* chunk, const char* type): + +Get the type of the chunk or compare if it's a certain type + +unsigned char lodepng_chunk_critical(const unsigned char* chunk): +unsigned char lodepng_chunk_private(const unsigned char* chunk): +unsigned char lodepng_chunk_safetocopy(const unsigned char* chunk): + +Check if the chunk is critical in the PNG standard (only IHDR, PLTE, IDAT and IEND are). +Check if the chunk is private (public chunks are part of the standard, private ones not). +Check if the chunk is safe to copy. If it's not, then, when modifying data in a critical +chunk, unsafe to copy chunks of the old image may NOT be saved in the new one if your +program doesn't handle that type of unknown chunk. + +unsigned char* lodepng_chunk_data(unsigned char* chunk): +const unsigned char* lodepng_chunk_data_const(const unsigned char* chunk): + +Get a pointer to the start of the data of the chunk. + +unsigned lodepng_chunk_check_crc(const unsigned char* chunk): +void lodepng_chunk_generate_crc(unsigned char* chunk): + +Check if the crc is correct or generate a correct one. + +unsigned char* lodepng_chunk_next(unsigned char* chunk): +const unsigned char* lodepng_chunk_next_const(const unsigned char* chunk): + +Iterate to the next chunk. This works if you have a buffer with consecutive chunks. Note that these +functions do no boundary checking of the allocated data whatsoever, so make sure there is enough +data available in the buffer to be able to go to the next chunk. + +unsigned lodepng_chunk_append(unsigned char** out, size_t* outlength, const unsigned char* chunk): +unsigned lodepng_chunk_create(unsigned char** out, size_t* outlength, unsigned length, + const char* type, const unsigned char* data): + +These functions are used to create new chunks that are appended to the data in *out that has +length *outlength. The append function appends an existing chunk to the new data. The create +function creates a new chunk with the given parameters and appends it. Type is the 4-letter +name of the chunk. + +8.2. chunks in info_png +----------------------- + +The LodePNGInfo struct contains fields with the unknown chunk in it. It has 3 +buffers (each with size) to contain 3 types of unknown chunks: +the ones that come before the PLTE chunk, the ones that come between the PLTE +and the IDAT chunks, and the ones that come after the IDAT chunks. +It's necessary to make the distionction between these 3 cases because the PNG +standard forces to keep the ordering of unknown chunks compared to the critical +chunks, but does not force any other ordering rules. + +info_png.unknown_chunks_data[0] is the chunks before PLTE +info_png.unknown_chunks_data[1] is the chunks after PLTE, before IDAT +info_png.unknown_chunks_data[2] is the chunks after IDAT + +The chunks in these 3 buffers can be iterated through and read by using the same +way described in the previous subchapter. + +When using the decoder to decode a PNG, you can make it store all unknown chunks +if you set the option settings.remember_unknown_chunks to 1. By default, this +option is off (0). + +The encoder will always encode unknown chunks that are stored in the info_png. +If you need it to add a particular chunk that isn't known by LodePNG, you can +use lodepng_chunk_append or lodepng_chunk_create to the chunk data in +info_png.unknown_chunks_data[x]. + +Chunks that are known by LodePNG should not be added in that way. E.g. to make +LodePNG add a bKGD chunk, set background_defined to true and add the correct +parameters there instead. + + +9. compiler support +------------------- + +No libraries other than the current standard C library are needed to compile +LodePNG. For the C++ version, only the standard C++ library is needed on top. +Add the files lodepng.c(pp) and lodepng.h to your project, include +lodepng.h where needed, and your program can read/write PNG files. + +It is compatible with C90 and up, and C++03 and up. + +If performance is important, use optimization when compiling! For both the +encoder and decoder, this makes a large difference. + +Make sure that LodePNG is compiled with the same compiler of the same version +and with the same settings as the rest of the program, or the interfaces with +std::vectors and std::strings in C++ can be incompatible. + +CHAR_BITS must be 8 or higher, because LodePNG uses unsigned chars for octets. + +*) gcc and g++ + +LodePNG is developed in gcc so this compiler is natively supported. It gives no +warnings with compiler options "-Wall -Wextra -pedantic -ansi", with gcc and g++ +version 4.7.1 on Linux, 32-bit and 64-bit. + +*) Clang + +Fully supported and warning-free. + +*) Mingw + +The Mingw compiler (a port of gcc for Windows) should be fully supported by +LodePNG. + +*) Visual Studio and Visual C++ Express Edition + +LodePNG should be warning-free with warning level W4. Two warnings were disabled +with pragmas though: warning 4244 about implicit conversions, and warning 4996 +where it wants to use a non-standard function fopen_s instead of the standard C +fopen. + +Visual Studio may want "stdafx.h" files to be included in each source file and +give an error "unexpected end of file while looking for precompiled header". +This is not standard C++ and will not be added to the stock LodePNG. You can +disable it for lodepng.cpp only by right clicking it, Properties, C/C++, +Precompiled Headers, and set it to Not Using Precompiled Headers there. + +NOTE: Modern versions of VS should be fully supported, but old versions, e.g. +VS6, are not guaranteed to work. + +*) Compilers on Macintosh + +LodePNG has been reported to work both with gcc and LLVM for Macintosh, both for +C and C++. + +*) Other Compilers + +If you encounter problems on any compilers, feel free to let me know and I may +try to fix it if the compiler is modern and standards complient. + + +10. examples +------------ + +This decoder example shows the most basic usage of LodePNG. More complex +examples can be found on the LodePNG website. + +10.1. decoder C++ example +------------------------- + +#include "lodepng.h" +#include + +int main(int argc, char *argv[]) +{ + const char* filename = argc > 1 ? argv[1] : "test.png"; + + //load and decode + std::vector image; + unsigned width, height; + unsigned error = lodepng::decode(image, width, height, filename); + + //if there's an error, display it + if(error) std::cout << "decoder error " << error << ": " << lodepng_error_text(error) << std::endl; + + //the pixels are now in the vector "image", 4 bytes per pixel, ordered RGBARGBA..., use it as texture, draw it, ... +} + +10.2. decoder C example +----------------------- + +#include "lodepng.h" + +int main(int argc, char *argv[]) +{ + unsigned error; + unsigned char* image; + size_t width, height; + const char* filename = argc > 1 ? argv[1] : "test.png"; + + error = lodepng_decode32_file(&image, &width, &height, filename); + + if(error) printf("decoder error %u: %s\n", error, lodepng_error_text(error)); + + / * use image here * / + + free(image); + return 0; +} + + +11. changes +----------- + +The version number of LodePNG is the date of the change given in the format +yyyymmdd. + +Some changes aren't backwards compatible. Those are indicated with a (!) +symbol. + +*) 23 aug 2014: Reduced needless memory usage of decoder. +*) 28 jun 2014: Removed fix_png setting, always support palette OOB for + simplicity. Made ColorProfile public. +*) 09 jun 2014: Faster encoder by fixing hash bug and more zeros optimization. +*) 22 dec 2013: Power of two windowsize required for optimization. +*) 15 apr 2013: Fixed bug with LAC_ALPHA and color key. +*) 25 mar 2013: Added an optional feature to ignore some PNG errors (fix_png). +*) 11 mar 2013 (!): Bugfix with custom free. Changed from "my" to "lodepng_" + prefix for the custom allocators and made it possible with a new #define to + use custom ones in your project without needing to change lodepng's code. +*) 28 jan 2013: Bugfix with color key. +*) 27 okt 2012: Tweaks in text chunk keyword length error handling. +*) 8 okt 2012 (!): Added new filter strategy (entropy) and new auto color mode. + (no palette). Better deflate tree encoding. New compression tweak settings. + Faster color conversions while decoding. Some internal cleanups. +*) 23 sep 2012: Reduced warnings in Visual Studio a little bit. +*) 1 sep 2012 (!): Removed #define's for giving custom (de)compression functions + and made it work with function pointers instead. +*) 23 jun 2012: Added more filter strategies. Made it easier to use custom alloc + and free functions and toggle #defines from compiler flags. Small fixes. +*) 6 may 2012 (!): Made plugging in custom zlib/deflate functions more flexible. +*) 22 apr 2012 (!): Made interface more consistent, renaming a lot. Removed + redundant C++ codec classes. Reduced amount of structs. Everything changed, + but it is cleaner now imho and functionality remains the same. Also fixed + several bugs and shrinked the implementation code. Made new samples. +*) 6 nov 2011 (!): By default, the encoder now automatically chooses the best + PNG color model and bit depth, based on the amount and type of colors of the + raw image. For this, autoLeaveOutAlphaChannel replaced by auto_choose_color. +*) 9 okt 2011: simpler hash chain implementation for the encoder. +*) 8 sep 2011: lz77 encoder lazy matching instead of greedy matching. +*) 23 aug 2011: tweaked the zlib compression parameters after benchmarking. + A bug with the PNG filtertype heuristic was fixed, so that it chooses much + better ones (it's quite significant). A setting to do an experimental, slow, + brute force search for PNG filter types is added. +*) 17 aug 2011 (!): changed some C zlib related function names. +*) 16 aug 2011: made the code less wide (max 120 characters per line). +*) 17 apr 2011: code cleanup. Bugfixes. Convert low to 16-bit per sample colors. +*) 21 feb 2011: fixed compiling for C90. Fixed compiling with sections disabled. +*) 11 dec 2010: encoding is made faster, based on suggestion by Peter Eastman + to optimize long sequences of zeros. +*) 13 nov 2010: added LodePNG_InfoColor_hasPaletteAlpha and + LodePNG_InfoColor_canHaveAlpha functions for convenience. +*) 7 nov 2010: added LodePNG_error_text function to get error code description. +*) 30 okt 2010: made decoding slightly faster +*) 26 okt 2010: (!) changed some C function and struct names (more consistent). + Reorganized the documentation and the declaration order in the header. +*) 08 aug 2010: only changed some comments and external samples. +*) 05 jul 2010: fixed bug thanks to warnings in the new gcc version. +*) 14 mar 2010: fixed bug where too much memory was allocated for char buffers. +*) 02 sep 2008: fixed bug where it could create empty tree that linux apps could + read by ignoring the problem but windows apps couldn't. +*) 06 jun 2008: added more error checks for out of memory cases. +*) 26 apr 2008: added a few more checks here and there to ensure more safety. +*) 06 mar 2008: crash with encoding of strings fixed +*) 02 feb 2008: support for international text chunks added (iTXt) +*) 23 jan 2008: small cleanups, and #defines to divide code in sections +*) 20 jan 2008: support for unknown chunks allowing using LodePNG for an editor. +*) 18 jan 2008: support for tIME and pHYs chunks added to encoder and decoder. +*) 17 jan 2008: ability to encode and decode compressed zTXt chunks added + Also vareous fixes, such as in the deflate and the padding bits code. +*) 13 jan 2008: Added ability to encode Adam7-interlaced images. Improved + filtering code of encoder. +*) 07 jan 2008: (!) changed LodePNG to use ISO C90 instead of C++. A + C++ wrapper around this provides an interface almost identical to before. + Having LodePNG be pure ISO C90 makes it more portable. The C and C++ code + are together in these files but it works both for C and C++ compilers. +*) 29 dec 2007: (!) changed most integer types to unsigned int + other tweaks +*) 30 aug 2007: bug fixed which makes this Borland C++ compatible +*) 09 aug 2007: some VS2005 warnings removed again +*) 21 jul 2007: deflate code placed in new namespace separate from zlib code +*) 08 jun 2007: fixed bug with 2- and 4-bit color, and small interlaced images +*) 04 jun 2007: improved support for Visual Studio 2005: crash with accessing + invalid std::vector element [0] fixed, and level 3 and 4 warnings removed +*) 02 jun 2007: made the encoder add a tag with version by default +*) 27 may 2007: zlib and png code separated (but still in the same file), + simple encoder/decoder functions added for more simple usage cases +*) 19 may 2007: minor fixes, some code cleaning, new error added (error 69), + moved some examples from here to lodepng_examples.cpp +*) 12 may 2007: palette decoding bug fixed +*) 24 apr 2007: changed the license from BSD to the zlib license +*) 11 mar 2007: very simple addition: ability to encode bKGD chunks. +*) 04 mar 2007: (!) tEXt chunk related fixes, and support for encoding + palettized PNG images. Plus little interface change with palette and texts. +*) 03 mar 2007: Made it encode dynamic Huffman shorter with repeat codes. + Fixed a bug where the end code of a block had length 0 in the Huffman tree. +*) 26 feb 2007: Huffman compression with dynamic trees (BTYPE 2) now implemented + and supported by the encoder, resulting in smaller PNGs at the output. +*) 27 jan 2007: Made the Adler-32 test faster so that a timewaste is gone. +*) 24 jan 2007: gave encoder an error interface. Added color conversion from any + greyscale type to 8-bit greyscale with or without alpha. +*) 21 jan 2007: (!) Totally changed the interface. It allows more color types + to convert to and is more uniform. See the manual for how it works now. +*) 07 jan 2007: Some cleanup & fixes, and a few changes over the last days: + encode/decode custom tEXt chunks, separate classes for zlib & deflate, and + at last made the decoder give errors for incorrect Adler32 or Crc. +*) 01 jan 2007: Fixed bug with encoding PNGs with less than 8 bits per channel. +*) 29 dec 2006: Added support for encoding images without alpha channel, and + cleaned out code as well as making certain parts faster. +*) 28 dec 2006: Added "Settings" to the encoder. +*) 26 dec 2006: The encoder now does LZ77 encoding and produces much smaller files now. + Removed some code duplication in the decoder. Fixed little bug in an example. +*) 09 dec 2006: (!) Placed output parameters of public functions as first parameter. + Fixed a bug of the decoder with 16-bit per color. +*) 15 okt 2006: Changed documentation structure +*) 09 okt 2006: Encoder class added. It encodes a valid PNG image from the + given image buffer, however for now it's not compressed. +*) 08 sep 2006: (!) Changed to interface with a Decoder class +*) 30 jul 2006: (!) LodePNG_InfoPng , width and height are now retrieved in different + way. Renamed decodePNG to decodePNGGeneric. +*) 29 jul 2006: (!) Changed the interface: image info is now returned as a + struct of type LodePNG::LodePNG_Info, instead of a vector, which was a bit clumsy. +*) 28 jul 2006: Cleaned the code and added new error checks. + Corrected terminology "deflate" into "inflate". +*) 23 jun 2006: Added SDL example in the documentation in the header, this + example allows easy debugging by displaying the PNG and its transparency. +*) 22 jun 2006: (!) Changed way to obtain error value. Added + loadFile function for convenience. Made decodePNG32 faster. +*) 21 jun 2006: (!) Changed type of info vector to unsigned. + Changed position of palette in info vector. Fixed an important bug that + happened on PNGs with an uncompressed block. +*) 16 jun 2006: Internally changed unsigned into unsigned where + needed, and performed some optimizations. +*) 07 jun 2006: (!) Renamed functions to decodePNG and placed them + in LodePNG namespace. Changed the order of the parameters. Rewrote the + documentation in the header. Renamed files to lodepng.cpp and lodepng.h +*) 22 apr 2006: Optimized and improved some code +*) 07 sep 2005: (!) Changed to std::vector interface +*) 12 aug 2005: Initial release (C++, decoder only) + + +12. contact information +----------------------- + +Feel free to contact me with suggestions, problems, comments, ... concerning +LodePNG. If you encounter a PNG image that doesn't work properly with this +decoder, feel free to send it and I'll use it to find and fix the problem. + +My email address is (puzzle the account and domain together with an @ symbol): +Domain: gmail dot com. +Account: lode dot vandevenne. + + +Copyright (c) 2005-2014 Lode Vandevenne +*/ diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.cpp b/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.cpp new file mode 100644 index 000000000..d148a5d84 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.cpp @@ -0,0 +1,560 @@ +//========= Copyright Valve Corporation ============// +#include "pathtools.h" +//#include "hmdplatform_private.h" +//#include "vrcommon/strtools.h" + +#if defined( _WIN32) +#include +#include +#include +#include +#elif defined OSX +#include +#include +#include "osxfilebridge.h" +#define _S_IFDIR S_IFDIR // really from tier0/platform.h which we dont have yet +#define _MAX_PATH MAX_PATH // yet another form of _PATH define we use +#elif defined(LINUX) +#include +#include +#endif + +#include + +#include + +/** Returns the path (including filename) to the current executable */ +std::string Path_GetExecutablePath() +{ + bool bSuccess = false; + char rchPath[ 1024 ]; + size_t nBuff = sizeof(rchPath); +#if defined( _WIN32 ) + bSuccess = ::GetModuleFileNameA(NULL, rchPath, (DWORD)nBuff) > 0; +#elif defined OSX + uint32_t _nBuff = nBuff; + bSuccess = _NSGetExecutablePath(rchPath, &_nBuff) == 0; + rchPath[nBuff-1] = '\0'; +#elif defined LINUX + ssize_t nRead = readlink("/proc/self/exe", rchPath, nBuff-1 ); + if ( nRead != -1 ) + { + rchPath[ nRead ] = 0; + bSuccess = true; + } + else + { + rchPath[ 0 ] = '\0'; + } +#else + AssertMsg( false, "Implement Plat_GetExecutablePath" ); +#endif + + if( bSuccess ) + return rchPath; + else + return ""; +} + +/** Returns the path of the current working directory */ +std::string Path_GetWorkingDirectory() +{ + std::string sPath; + char buf[ 1024 ]; +#if defined( _WIN32 ) + sPath = _getcwd( buf, sizeof( buf ) ); +#else + sPath = getcwd( buf, sizeof( buf ) ); +#endif + return sPath; +} + +/** Sets the path of the current working directory. Returns true if this was successful. */ +bool Path_SetWorkingDirectory( const std::string & sPath ) +{ + bool bSuccess; +#if defined( _WIN32 ) + bSuccess = 0 == _chdir( sPath.c_str() ); +#else + bSuccess = 0 == chdir( sPath.c_str() ); +#endif + return bSuccess; +} + +std::string Path_GetModulePath() +{ +#if defined( _WIN32 ) + char path[32768]; + HMODULE hm = NULL; + + if (!GetModuleHandleExA(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS, + (LPCSTR) &Path_GetModulePath, + &hm)) + { + int ret = GetLastError(); + fprintf(stderr, "GetModuleHandle returned %d\n", ret); + return ""; + } + GetModuleFileNameA(hm, path, sizeof(path)); + FreeLibrary( hm ); + return path; +#else + Dl_info dl_info; + dladdr((void *)Path_GetModulePath, &dl_info); + return dl_info.dli_fname; +#endif +} + +/** Returns the specified path without its filename */ +std::string Path_StripFilename( const std::string & sPath, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + std::string::size_type n = sPath.find_last_of( slash ); + if( n == std::string::npos ) + return sPath; + else + return std::string( sPath.begin(), sPath.begin() + n ); +} + +/** returns just the filename from the provided full or relative path. */ +std::string Path_StripDirectory( const std::string & sPath, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + std::string::size_type n = sPath.find_last_of( slash ); + if( n == std::string::npos ) + return sPath; + else + return std::string( sPath.begin() + n + 1, sPath.end() ); +} + +/** returns just the filename with no extension of the provided filename. +* If there is a path the path is left intact. */ +std::string Path_StripExtension( const std::string & sPath ) +{ + for( std::string::const_reverse_iterator i = sPath.rbegin(); i != sPath.rend(); i++ ) + { + if( *i == '.' ) + { + return std::string( sPath.begin(), i.base() - 1 ); + } + + // if we find a slash there is no extension + if( *i == '\\' || *i == '/' ) + break; + } + + // we didn't find an extension + return sPath; +} + + +bool Path_IsAbsolute( const std::string & sPath ) +{ + if( sPath.empty() ) + return false; + + if( sPath.find( ':' ) != std::string::npos ) + return true; + + if( sPath[0] == '\\' || sPath[0] == '/' ) + return true; + + return false; +} + + +/** Makes an absolute path from a relative path and a base path */ +std::string Path_MakeAbsolute( const std::string & sRelativePath, const std::string & sBasePath, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + if( Path_IsAbsolute( sRelativePath ) ) + return sRelativePath; + else + { + if( !Path_IsAbsolute( sBasePath ) ) + return ""; + + std::string sCompacted = Path_Compact( Path_Join( sBasePath, sRelativePath, slash ), slash ); + if( Path_IsAbsolute( sCompacted ) ) + return sCompacted; + else + return ""; + } +} + + +/** Fixes the directory separators for the current platform */ +std::string Path_FixSlashes( const std::string & sPath, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + std::string sFixed = sPath; + for( std::string::iterator i = sFixed.begin(); i != sFixed.end(); i++ ) + { + if( *i == '/' || *i == '\\' ) + *i = slash; + } + + return sFixed; +} + + +char Path_GetSlash() +{ +#if defined(_WIN32) + return '\\'; +#else + return '/'; +#endif +} + +/** Jams two paths together with the right kind of slash */ +std::string Path_Join( const std::string & first, const std::string & second, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + // only insert a slash if we don't already have one + std::string::size_type nLen = first.length(); +#if defined(_WIN32) + if( first.back() == '\\' || first.back() == '/' ) + nLen--; +#else + char last_char = first[first.length()-1]; + if (last_char == '\\' || last_char == '/') + nLen--; +#endif + + return first.substr( 0, nLen ) + std::string( 1, slash ) + second; +} + + +std::string Path_Join( const std::string & first, const std::string & second, const std::string & third, char slash ) +{ + return Path_Join( Path_Join( first, second, slash ), third, slash ); +} + +std::string Path_Join( const std::string & first, const std::string & second, const std::string & third, const std::string &fourth, char slash ) +{ + return Path_Join( Path_Join( Path_Join( first, second, slash ), third, slash ), fourth, slash ); +} + +std::string Path_Join( + const std::string & first, + const std::string & second, + const std::string & third, + const std::string & fourth, + const std::string & fifth, + char slash ) +{ + return Path_Join( Path_Join( Path_Join( Path_Join( first, second, slash ), third, slash ), fourth, slash ), fifth, slash ); +} + +/** Removes redundant /.. elements in the path. Returns an empty path if the +* specified path has a broken number of directories for its number of ..s */ +std::string Path_Compact( const std::string & sRawPath, char slash ) +{ + if( slash == 0 ) + slash = Path_GetSlash(); + + std::string sPath = Path_FixSlashes( sRawPath, slash ); + std::string sSlashString( 1, slash ); + + // strip out all /./ + for( std::string::size_type i = 0; (i + 3) < sPath.length(); ) + { + if( sPath[ i ] == slash && sPath[ i+1 ] == '.' && sPath[ i+2 ] == slash ) + { + sPath.replace( i, 3, sSlashString ); + } + else + { + ++i; + } + } + + + // get rid of trailing /. but leave the path separator + if( sPath.length() > 2 ) + { + std::string::size_type len = sPath.length(); + if( sPath[ len-1 ] == '.' && sPath[ len-2 ] == slash ) + { + // sPath.pop_back(); + sPath[len-1] = 0; // for now, at least + } + } + + // get rid of leading ./ + if( sPath.length() > 2 ) + { + if( sPath[ 0 ] == '.' && sPath[ 1 ] == slash ) + { + sPath.replace( 0, 2, "" ); + } + } + + // each time we encounter .. back up until we've found the previous directory name + // then get rid of both + std::string::size_type i = 0; + while( i < sPath.length() ) + { + if( i > 0 && sPath.length() - i >= 2 + && sPath[i] == '.' + && sPath[i+1] == '.' + && ( i + 2 == sPath.length() || sPath[ i+2 ] == slash ) + && sPath[ i-1 ] == slash ) + { + // check if we've hit the start of the string and have a bogus path + if( i == 1 ) + return ""; + + // find the separator before i-1 + std::string::size_type iDirStart = i-2; + while( iDirStart > 0 && sPath[ iDirStart - 1 ] != slash ) + --iDirStart; + + // remove everything from iDirStart to i+2 + sPath.replace( iDirStart, (i - iDirStart) + 3, "" ); + + // start over + i = 0; + } + else + { + ++i; + } + } + + return sPath; +} + +#define MAX_UNICODE_PATH 32768 +#define MAX_UNICODE_PATH_IN_UTF8 ( MAX_UNICODE_PATH * 4 ) + +/** Returns the path to the current DLL or exe */ +std::string GetThisModulePath() +{ + // gets the path of vrclient.dll itself +#ifdef WIN32 + HMODULE hmodule = NULL; + + ::GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, reinterpret_cast(GetThisModulePath), &hmodule); + + wchar_t *pwchPath = new wchar_t[MAX_UNICODE_PATH]; + char *pchPath = new char[ MAX_UNICODE_PATH_IN_UTF8 ]; + ::GetModuleFileNameW( hmodule, pwchPath, MAX_UNICODE_PATH ); + WideCharToMultiByte( CP_UTF8, 0, pwchPath, -1, pchPath, MAX_UNICODE_PATH_IN_UTF8, NULL, NULL ); + delete[] pwchPath; + + std::string sPath = pchPath; + delete [] pchPath; + return sPath; + +#elif defined( OSX ) || defined( LINUX ) + // get the addr of a function in vrclient.so and then ask the dlopen system about it + Dl_info info; + dladdr( (void *)GetThisModulePath, &info ); + return info.dli_fname; +#endif + +} + + +/** returns true if the specified path exists and is a directory */ +bool Path_IsDirectory( const std::string & sPath ) +{ + std::string sFixedPath = Path_FixSlashes( sPath ); + if( sFixedPath.empty() ) + return false; + char cLast = sFixedPath[ sFixedPath.length() - 1 ]; + if( cLast == '/' || cLast == '\\' ) + sFixedPath.erase( sFixedPath.end() - 1, sFixedPath.end() ); + + // see if the specified path actually exists. + struct stat buf; + if ( stat ( sFixedPath.c_str(), &buf ) == -1) + { + return false; + } + +#if defined(LINUX) + return S_ISDIR( buf.st_mode ); +#else + return ( buf.st_mode & _S_IFDIR ) != 0; +#endif +} + + +//----------------------------------------------------------------------------- +// Purpose: returns true if the the path exists +//----------------------------------------------------------------------------- +bool Path_Exists( const std::string & sPath ) +{ + std::string sFixedPath = Path_FixSlashes( sPath ); + if( sFixedPath.empty() ) + return false; + + struct stat buf; + if ( stat ( sFixedPath.c_str(), &buf ) == -1) + { + return false; + } + + return true; +} + + +//----------------------------------------------------------------------------- +// Purpose: helper to find a directory upstream from a given path +//----------------------------------------------------------------------------- +std::string Path_FindParentDirectoryRecursively( const std::string &strStartDirectory, const std::string &strDirectoryName ) +{ + std::string strFoundPath = ""; + std::string strCurrentPath = Path_FixSlashes( strStartDirectory ); + if ( strCurrentPath.length() == 0 ) + return ""; + + bool bExists = Path_Exists( strCurrentPath ); + std::string strCurrentDirectoryName = Path_StripDirectory( strCurrentPath ); + if ( bExists && stricmp( strCurrentDirectoryName.c_str(), strDirectoryName.c_str() ) == 0 ) + return strCurrentPath; + + while( bExists && strCurrentPath.length() != 0 ) + { + strCurrentPath = Path_StripFilename( strCurrentPath ); + strCurrentDirectoryName = Path_StripDirectory( strCurrentPath ); + bExists = Path_Exists( strCurrentPath ); + if ( bExists && stricmp( strCurrentDirectoryName.c_str(), strDirectoryName.c_str() ) == 0 ) + return strCurrentPath; + } + + return ""; +} + + +//----------------------------------------------------------------------------- +// Purpose: helper to find a subdirectory upstream from a given path +//----------------------------------------------------------------------------- +std::string Path_FindParentSubDirectoryRecursively( const std::string &strStartDirectory, const std::string &strDirectoryName ) +{ + std::string strFoundPath = ""; + std::string strCurrentPath = Path_FixSlashes( strStartDirectory ); + if ( strCurrentPath.length() == 0 ) + return ""; + + bool bExists = Path_Exists( strCurrentPath ); + while( bExists && strCurrentPath.length() != 0 ) + { + strCurrentPath = Path_StripFilename( strCurrentPath ); + bExists = Path_Exists( strCurrentPath ); + + if( Path_Exists( Path_Join( strCurrentPath, strDirectoryName ) ) ) + { + strFoundPath = Path_Join( strCurrentPath, strDirectoryName ); + break; + } + } + return strFoundPath; +} + + +//----------------------------------------------------------------------------- +// Purpose: reading and writing files in the vortex directory +//----------------------------------------------------------------------------- +unsigned char * Path_ReadBinaryFile( const std::string &strFilename, int *pSize ) +{ + FILE *f; +#if defined( POSIX ) + f = fopen( strFilename.c_str(), "rb" ); +#else + errno_t err = fopen_s(&f, strFilename.c_str(), "rb"); + if ( err != 0 ) + { + f = NULL; + } +#endif + + unsigned char* buf = NULL; + + if ( f != NULL ) + { + fseek(f, 0, SEEK_END); + int size = ftell(f); + fseek(f, 0, SEEK_SET); + + buf = new unsigned char[size]; + if (buf && fread(buf, size, 1, f) == 1) + { + if (pSize) + *pSize = size; + } + else + { + delete[] buf; + buf = 0; + } + + fclose(f); + } + + return buf; +} + + +std::string Path_ReadTextFile( const std::string &strFilename ) +{ + // doing it this way seems backwards, but I don't + // see an easy way to do this with C/C++ style IO + // that isn't worse... + int size; + unsigned char* buf = Path_ReadBinaryFile( strFilename, &size ); + if (!buf) + return ""; + + // convert CRLF -> LF + int outsize = 1; + for (int i=1; i < size; i++) + { + if (buf[i] == '\n' && buf[i-1] == '\r') // CRLF + buf[outsize-1] = '\n'; // ->LF + else + buf[outsize++] = buf[i]; // just copy + } + + std::string ret((char *)buf, (char *)(buf + outsize)); + delete[] buf; + return ret; +} + + +bool Path_WriteStringToTextFile( const std::string &strFilename, const char *pchData ) +{ + FILE *f; +#if defined( POSIX ) + f = fopen( strFilename.c_str(), "w" ); +#else + errno_t err = fopen_s(&f, strFilename.c_str(), "w"); + if ( err != 0 ) + { + f = NULL; + } +#endif + + bool ok = false; + + if ( f != NULL ) + { + ok = fputs( pchData, f) >= 0; + fclose(f); + } + + return ok; +} \ No newline at end of file diff --git a/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.h b/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.h new file mode 100644 index 000000000..c38ec5612 --- /dev/null +++ b/examples/ThirdPartyLibs/openvr/samples/shared/pathtools.h @@ -0,0 +1,98 @@ +//========= Copyright Valve Corporation ============// +#pragma once + +#include + +/** Returns the path (including filename) to the current executable */ +std::string Path_GetExecutablePath(); + +/** Returns the path of the current working directory */ +std::string Path_GetWorkingDirectory(); + +/** Sets the path of the current working directory. Returns true if this was successful. */ +bool Path_SetWorkingDirectory( const std::string & sPath ); + +/** returns the path (including filename) of the current shared lib or DLL */ +std::string Path_GetModulePath(); + +/** Returns the specified path without its filename. +* If slash is unspecified the native path separator of the current platform +* will be used. */ +std::string Path_StripFilename( const std::string & sPath, char slash = 0 ); + +/** returns just the filename from the provided full or relative path. */ +std::string Path_StripDirectory( const std::string & sPath, char slash = 0 ); + +/** returns just the filename with no extension of the provided filename. +* If there is a path the path is left intact. */ +std::string Path_StripExtension( const std::string & sPath ); + +/** Returns true if the path is absolute */ +bool Path_IsAbsolute( const std::string & sPath ); + +/** Makes an absolute path from a relative path and a base path */ +std::string Path_MakeAbsolute( const std::string & sRelativePath, const std::string & sBasePath, char slash = 0 ); + +/** Fixes the directory separators for the current platform. +* If slash is unspecified the native path separator of the current platform +* will be used. */ +std::string Path_FixSlashes( const std::string & sPath, char slash = 0 ); + +/** Returns the path separator for the current platform */ +char Path_GetSlash(); + +/** Jams two paths together with the right kind of slash */ +std::string Path_Join( const std::string & first, const std::string & second, char slash = 0 ); +std::string Path_Join( const std::string & first, const std::string & second, const std::string & third, char slash = 0 ); +std::string Path_Join( const std::string & first, const std::string & second, const std::string & third, const std::string &fourth, char slash = 0 ); +std::string Path_Join( + const std::string & first, + const std::string & second, + const std::string & third, + const std::string & fourth, + const std::string & fifth, + char slash = 0 ); + + +/** Removes redundant /.. elements in the path. Returns an empty path if the +* specified path has a broken number of directories for its number of ..s. +* If slash is unspecified the native path separator of the current platform +* will be used. */ +std::string Path_Compact( const std::string & sRawPath, char slash = 0 ); + +/** returns true if the specified path exists and is a directory */ +bool Path_IsDirectory( const std::string & sPath ); + +/** Returns the path to the current DLL or exe */ +std::string GetThisModulePath(); + +/** returns true if the the path exists */ +bool Path_Exists( const std::string & sPath ); + +/** Helper functions to find parent directories or subdirectories of parent directories */ +std::string Path_FindParentDirectoryRecursively( const std::string &strStartDirectory, const std::string &strDirectoryName ); +std::string Path_FindParentSubDirectoryRecursively( const std::string &strStartDirectory, const std::string &strDirectoryName ); + +/** Path operations to read or write text/binary files */ +unsigned char * Path_ReadBinaryFile( const std::string &strFilename, int *pSize ); +std::string Path_ReadTextFile( const std::string &strFilename ); +bool Path_WriteStringToTextFile( const std::string &strFilename, const char *pchData ); + +//----------------------------------------------------------------------------- +#if defined(_WIN32) +#define DYNAMIC_LIB_EXT ".dll" +#ifdef _WIN64 +#define PLATSUBDIR "win64" +#else +#define PLATSUBDIR "win32" +#endif +#elif defined(OSX) +#define DYNAMIC_LIB_EXT ".dylib" +#define PLATSUBDIR "osx32" +#elif defined(LINUX) +#define DYNAMIC_LIB_EXT ".so" +#define PLATSUBDIR "linux32" +#else +#warning "Unknown platform for PLATSUBDIR" +#define PLATSUBDIR "unknown_platform" +#endif