forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
Binary file not shown.
After Width: | Height: | Size: 103 KiB |
@ -0,0 +1,283 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_Util.h
|
||||
\brief This header provides LibOVR utility function declarations
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
*************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_Util_h
|
||||
#define OVR_CAPI_Util_h
|
||||
|
||||
#include "OVR_CAPI.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/// Enumerates modifications to the projection matrix based on the application's needs.
|
||||
///
|
||||
/// \see ovrMatrix4f_Projection
|
||||
///
|
||||
typedef enum ovrProjectionModifier_ {
|
||||
/// Use for generating a default projection matrix that is:
|
||||
/// * Right-handed.
|
||||
/// * Near depth values stored in the depth buffer are smaller than far depth values.
|
||||
/// * Both near and far are explicitly defined.
|
||||
/// * With a clipping range that is (0 to w).
|
||||
ovrProjection_None = 0x00,
|
||||
|
||||
/// Enable if using left-handed transformations in your application.
|
||||
ovrProjection_LeftHanded = 0x01,
|
||||
|
||||
/// After the projection transform is applied, far values stored in the depth buffer will be less
|
||||
/// than closer depth values.
|
||||
/// NOTE: Enable only if the application is using a floating-point depth buffer for proper
|
||||
/// precision.
|
||||
ovrProjection_FarLessThanNear = 0x02,
|
||||
|
||||
/// When this flag is used, the zfar value pushed into ovrMatrix4f_Projection() will be ignored
|
||||
/// NOTE: Enable only if ovrProjection_FarLessThanNear is also enabled where the far clipping
|
||||
/// plane will be pushed to infinity.
|
||||
ovrProjection_FarClipAtInfinity = 0x04,
|
||||
|
||||
/// Enable if the application is rendering with OpenGL and expects a projection matrix with a
|
||||
/// clipping range of (-w to w).
|
||||
/// Ignore this flag if your application already handles the conversion from D3D range (0 to w) to
|
||||
/// OpenGL.
|
||||
ovrProjection_ClipRangeOpenGL = 0x08,
|
||||
} ovrProjectionModifier;
|
||||
|
||||
/// Return values for ovr_Detect.
|
||||
///
|
||||
/// \see ovr_Detect
|
||||
///
|
||||
typedef struct OVR_ALIGNAS(8) ovrDetectResult_ {
|
||||
/// Is ovrFalse when the Oculus Service is not running.
|
||||
/// This means that the Oculus Service is either uninstalled or stopped.
|
||||
/// IsOculusHMDConnected will be ovrFalse in this case.
|
||||
/// Is ovrTrue when the Oculus Service is running.
|
||||
/// This means that the Oculus Service is installed and running.
|
||||
/// IsOculusHMDConnected will reflect the state of the HMD.
|
||||
ovrBool IsOculusServiceRunning;
|
||||
|
||||
/// Is ovrFalse when an Oculus HMD is not detected.
|
||||
/// If the Oculus Service is not running, this will be ovrFalse.
|
||||
/// Is ovrTrue when an Oculus HMD is detected.
|
||||
/// This implies that the Oculus Service is also installed and running.
|
||||
ovrBool IsOculusHMDConnected;
|
||||
|
||||
OVR_UNUSED_STRUCT_PAD(pad0, 6) ///< \internal struct padding
|
||||
|
||||
} ovrDetectResult;
|
||||
|
||||
OVR_STATIC_ASSERT(sizeof(ovrDetectResult) == 8, "ovrDetectResult size mismatch");
|
||||
|
||||
/// Modes used to generate Touch Haptics from audio PCM buffer.
|
||||
///
|
||||
typedef enum ovrHapticsGenMode_ {
|
||||
/// Point sample original signal at Haptics frequency
|
||||
ovrHapticsGenMode_PointSample,
|
||||
ovrHapticsGenMode_Count
|
||||
} ovrHapticsGenMode;
|
||||
|
||||
/// Store audio PCM data (as 32b float samples) for an audio channel.
|
||||
/// Note: needs to be released with ovr_ReleaseAudioChannelData to avoid memory leak.
|
||||
///
|
||||
typedef struct ovrAudioChannelData_ {
|
||||
/// Samples stored as floats [-1.0f, 1.0f].
|
||||
const float* Samples;
|
||||
|
||||
/// Number of samples
|
||||
int SamplesCount;
|
||||
|
||||
/// Frequency (e.g. 44100)
|
||||
int Frequency;
|
||||
} ovrAudioChannelData;
|
||||
|
||||
/// Store a full Haptics clip, which can be used as data source for multiple ovrHapticsBuffers.
|
||||
///
|
||||
typedef struct ovrHapticsClip_ {
|
||||
/// Samples stored in opaque format
|
||||
const void* Samples;
|
||||
|
||||
/// Number of samples
|
||||
int SamplesCount;
|
||||
} ovrHapticsClip;
|
||||
|
||||
/// Detects Oculus Runtime and Device Status
|
||||
///
|
||||
/// Checks for Oculus Runtime and Oculus HMD device status without loading the LibOVRRT
|
||||
/// shared library. This may be called before ovr_Initialize() to help decide whether or
|
||||
/// not to initialize LibOVR.
|
||||
///
|
||||
/// \param[in] timeoutMilliseconds Specifies a timeout to wait for HMD to be attached or 0 to poll.
|
||||
///
|
||||
/// \return Returns an ovrDetectResult object indicating the result of detection.
|
||||
///
|
||||
/// \see ovrDetectResult
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrDetectResult) ovr_Detect(int timeoutMilliseconds);
|
||||
|
||||
// On the Windows platform,
|
||||
#ifdef _WIN32
|
||||
/// This is the Windows Named Event name that is used to check for HMD connected state.
|
||||
#define OVR_HMD_CONNECTED_EVENT_NAME L"OculusHMDConnected"
|
||||
#endif // _WIN32
|
||||
|
||||
/// Used to generate projection from ovrEyeDesc::Fov.
|
||||
///
|
||||
/// \param[in] fov Specifies the ovrFovPort to use.
|
||||
/// \param[in] znear Distance to near Z limit.
|
||||
/// \param[in] zfar Distance to far Z limit.
|
||||
/// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
|
||||
///
|
||||
/// \return Returns the calculated projection matrix.
|
||||
///
|
||||
/// \see ovrProjectionModifier
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrMatrix4f)
|
||||
ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags);
|
||||
|
||||
/// Extracts the required data from the result of ovrMatrix4f_Projection.
|
||||
///
|
||||
/// \param[in] projection Specifies the project matrix from which to
|
||||
/// extract ovrTimewarpProjectionDesc.
|
||||
/// \param[in] projectionModFlags A combination of the ovrProjectionModifier flags.
|
||||
/// \return Returns the extracted ovrTimewarpProjectionDesc.
|
||||
/// \see ovrTimewarpProjectionDesc
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrTimewarpProjectionDesc)
|
||||
ovrTimewarpProjectionDesc_FromProjection(ovrMatrix4f projection, unsigned int projectionModFlags);
|
||||
|
||||
/// Generates an orthographic sub-projection.
|
||||
///
|
||||
/// Used for 2D rendering, Y is down.
|
||||
///
|
||||
/// \param[in] projection The perspective matrix that the orthographic matrix is derived from.
|
||||
/// \param[in] orthoScale Equal to 1.0f / pixelsPerTanAngleAtCenter.
|
||||
/// \param[in] orthoDistance Equal to the distance from the camera in meters, such as 0.8m.
|
||||
/// \param[in] HmdToEyeOffsetX Specifies the offset of the eye from the center.
|
||||
///
|
||||
/// \return Returns the calculated projection matrix.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrMatrix4f)
|
||||
ovrMatrix4f_OrthoSubProjection(
|
||||
ovrMatrix4f projection,
|
||||
ovrVector2f orthoScale,
|
||||
float orthoDistance,
|
||||
float HmdToEyeOffsetX);
|
||||
|
||||
/// Computes offset eye poses based on headPose returned by ovrTrackingState.
|
||||
///
|
||||
/// \param[in] headPose Indicates the HMD position and orientation to use for the calculation.
|
||||
/// \param[in] hmdToEyePose Can be ovrEyeRenderDesc.HmdToEyePose returned from
|
||||
/// ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average
|
||||
/// of the two position vectors for each eyes.
|
||||
/// \param[out] outEyePoses If outEyePoses are used for rendering, they should be passed to
|
||||
/// ovr_SubmitFrame in ovrLayerEyeFov::RenderPose or ovrLayerEyeFovDepth::RenderPose.
|
||||
///
|
||||
#undef ovr_CalcEyePoses
|
||||
OVR_PUBLIC_FUNCTION(void)
|
||||
ovr_CalcEyePoses(ovrPosef headPose, const ovrVector3f hmdToEyeOffset[2], ovrPosef outEyePoses[2]);
|
||||
OVR_PRIVATE_FUNCTION(void)
|
||||
ovr_CalcEyePoses2(ovrPosef headPose, const ovrPosef HmdToEyePose[2], ovrPosef outEyePoses[2]);
|
||||
#define ovr_CalcEyePoses ovr_CalcEyePoses2
|
||||
|
||||
/// Returns the predicted head pose in outHmdTrackingState and offset eye poses in outEyePoses.
|
||||
///
|
||||
/// This is a thread-safe function where caller should increment frameIndex with every frame
|
||||
/// and pass that index where applicable to functions called on the rendering thread.
|
||||
/// Assuming outEyePoses are used for rendering, it should be passed as a part of ovrLayerEyeFov.
|
||||
/// The caller does not need to worry about applying HmdToEyePose to the returned outEyePoses
|
||||
/// variables.
|
||||
///
|
||||
/// \param[in] hmd Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] frameIndex Specifies the targeted frame index, or 0 to refer to one frame after
|
||||
/// the last time ovr_SubmitFrame was called.
|
||||
/// \param[in] latencyMarker Specifies that this call is the point in time where
|
||||
/// the "App-to-Mid-Photon" latency timer starts from. If a given ovrLayer
|
||||
/// provides "SensorSampleTimestamp", that will override the value stored here.
|
||||
/// \param[in] hmdToEyePose Can be ovrEyeRenderDesc.HmdToEyePose returned from
|
||||
/// ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average
|
||||
/// of the two position vectors for each eyes.
|
||||
/// \param[out] outEyePoses The predicted eye poses.
|
||||
/// \param[out] outSensorSampleTime The time when this function was called. May be NULL, in which
|
||||
/// case it is ignored.
|
||||
///
|
||||
#undef ovr_GetEyePoses
|
||||
OVR_PUBLIC_FUNCTION(void)
|
||||
ovr_GetEyePoses(
|
||||
ovrSession session,
|
||||
long long frameIndex,
|
||||
ovrBool latencyMarker,
|
||||
const ovrVector3f hmdToEyeOffset[2],
|
||||
ovrPosef outEyePoses[2],
|
||||
double* outSensorSampleTime);
|
||||
OVR_PRIVATE_FUNCTION(void)
|
||||
ovr_GetEyePoses2(
|
||||
ovrSession session,
|
||||
long long frameIndex,
|
||||
ovrBool latencyMarker,
|
||||
const ovrPosef HmdToEyePose[2],
|
||||
ovrPosef outEyePoses[2],
|
||||
double* outSensorSampleTime);
|
||||
#define ovr_GetEyePoses ovr_GetEyePoses2
|
||||
|
||||
/// Tracking poses provided by the SDK come in a right-handed coordinate system. If an application
|
||||
/// is passing in ovrProjection_LeftHanded into ovrMatrix4f_Projection, then it should also use
|
||||
/// this function to flip the HMD tracking poses to be left-handed.
|
||||
///
|
||||
/// While this utility function is intended to convert a left-handed ovrPosef into a right-handed
|
||||
/// coordinate system, it will also work for converting right-handed to left-handed since the
|
||||
/// flip operation is the same for both cases.
|
||||
///
|
||||
/// \param[in] inPose that is right-handed
|
||||
/// \param[out] outPose that is requested to be left-handed (can be the same pointer to inPose)
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(void) ovrPosef_FlipHandedness(const ovrPosef* inPose, ovrPosef* outPose);
|
||||
|
||||
/// Reads an audio channel from Wav (Waveform Audio File) data.
|
||||
/// Input must be a byte buffer representing a valid Wav file. Audio samples from the specified
|
||||
/// channel are read,
|
||||
/// converted to float [-1.0f, 1.0f] and returned through ovrAudioChannelData.
|
||||
///
|
||||
/// Supported formats: PCM 8b, 16b, 32b and IEEE float (little-endian only).
|
||||
///
|
||||
/// \param[out] outAudioChannel output audio channel data.
|
||||
/// \param[in] inputData a binary buffer representing a valid Wav file data.
|
||||
/// \param[in] dataSizeInBytes size of the buffer in bytes.
|
||||
/// \param[in] stereoChannelToUse audio channel index to extract (0 for mono).
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_ReadWavFromBuffer(
|
||||
ovrAudioChannelData* outAudioChannel,
|
||||
const void* inputData,
|
||||
int dataSizeInBytes,
|
||||
int stereoChannelToUse);
|
||||
|
||||
/// Generates playable Touch Haptics data from an audio channel.
|
||||
///
|
||||
/// \param[out] outHapticsClip generated Haptics clip.
|
||||
/// \param[in] audioChannel input audio channel data.
|
||||
/// \param[in] genMode mode used to convert and audio channel data to Haptics data.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GenHapticsFromAudioData(
|
||||
ovrHapticsClip* outHapticsClip,
|
||||
const ovrAudioChannelData* audioChannel,
|
||||
ovrHapticsGenMode genMode);
|
||||
|
||||
/// Releases memory allocated for ovrAudioChannelData. Must be called to avoid memory leak.
|
||||
/// \param[in] audioChannel pointer to an audio channel
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(void) ovr_ReleaseAudioChannelData(ovrAudioChannelData* audioChannel);
|
||||
|
||||
/// Releases memory allocated for ovrHapticsClip. Must be called to avoid memory leak.
|
||||
/// \param[in] hapticsClip pointer to a haptics clip
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(void) ovr_ReleaseHapticsClip(ovrHapticsClip* hapticsClip);
|
||||
|
||||
#ifdef __cplusplus
|
||||
} /* extern "C" */
|
||||
#endif
|
||||
|
||||
#endif // Header include guard
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,73 @@
|
||||
/************************************************************************************
|
||||
|
||||
Filename : OVR_StereoProjection.h
|
||||
Content : Stereo projection functions
|
||||
Created : November 30, 2013
|
||||
Authors : Tom Fosyth
|
||||
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Licensed under the Oculus VR Rift SDK License Version 3.3 (the "License");
|
||||
you may not use the Oculus VR Rift SDK except in compliance with the License,
|
||||
which is provided at the time of installation or download, or which
|
||||
otherwise accompanies this software in either electronic or hard copy form.
|
||||
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.oculusvr.com/licenses/LICENSE-3.3
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
*************************************************************************************/
|
||||
|
||||
#ifndef OVR_StereoProjection_h
|
||||
#define OVR_StereoProjection_h
|
||||
|
||||
#include "Extras/OVR_Math.h"
|
||||
|
||||
namespace OVR {
|
||||
|
||||
//-----------------------------------------------------------------------------------
|
||||
// ***** Stereo Enumerations
|
||||
|
||||
// StereoEye specifies which eye we are rendering for; it is used to
|
||||
// retrieve StereoEyeParams.
|
||||
enum StereoEye { StereoEye_Left, StereoEye_Right, StereoEye_Center };
|
||||
|
||||
//-----------------------------------------------------------------------------------
|
||||
// ***** Propjection functions
|
||||
|
||||
Matrix4f CreateProjection(
|
||||
bool rightHanded,
|
||||
bool isOpenGL,
|
||||
FovPort fov,
|
||||
StereoEye eye,
|
||||
float zNear = 0.01f,
|
||||
float zFar = 10000.0f,
|
||||
bool flipZ = false,
|
||||
bool farAtInfinity = false);
|
||||
|
||||
Matrix4f CreateOrthoSubProjection(
|
||||
bool rightHanded,
|
||||
StereoEye eyeType,
|
||||
float tanHalfFovX,
|
||||
float tanHalfFovY,
|
||||
float unitsX,
|
||||
float unitsY,
|
||||
float distanceFromCamera,
|
||||
float interpupillaryDistance,
|
||||
Matrix4f const& projection,
|
||||
float zNear = 0.0f,
|
||||
float zFar = 0.0f,
|
||||
bool flipZ = false,
|
||||
bool farAtInfinity = false);
|
||||
|
||||
ScaleAndOffset2D CreateNDCScaleAndOffsetFromFov(FovPort fov);
|
||||
|
||||
} // namespace OVR
|
||||
|
||||
#endif // OVR_StereoProjection_h
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,85 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_Audio.h
|
||||
\brief CAPI audio functions.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_Audio_h
|
||||
#define OVR_CAPI_Audio_h
|
||||
|
||||
#ifdef _WIN32
|
||||
// Prevents <Windows.h> from defining min() and max() macro symbols.
|
||||
#ifndef NOMINMAX
|
||||
#define NOMINMAX
|
||||
#endif
|
||||
#include <windows.h>
|
||||
#include "OVR_CAPI.h"
|
||||
#define OVR_AUDIO_MAX_DEVICE_STR_SIZE 128
|
||||
|
||||
#if !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
/// Gets the ID of the preferred VR audio output device.
|
||||
///
|
||||
/// \param[out] deviceOutId The ID of the user's preferred VR audio device to use,
|
||||
/// which will be valid upon a successful return value, else it will be WAVE_MAPPER.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult) ovr_GetAudioDeviceOutWaveId(UINT* deviceOutId);
|
||||
|
||||
/// Gets the ID of the preferred VR audio input device.
|
||||
///
|
||||
/// \param[out] deviceInId The ID of the user's preferred VR audio device to use,
|
||||
/// which will be valid upon a successful return value, else it will be WAVE_MAPPER.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult) ovr_GetAudioDeviceInWaveId(UINT* deviceInId);
|
||||
|
||||
/// Gets the GUID of the preferred VR audio device as a string.
|
||||
///
|
||||
/// \param[out] deviceOutStrBuffer A buffer where the GUID string for the device will copied to.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetAudioDeviceOutGuidStr(WCHAR deviceOutStrBuffer[OVR_AUDIO_MAX_DEVICE_STR_SIZE]);
|
||||
|
||||
/// Gets the GUID of the preferred VR audio device.
|
||||
///
|
||||
/// \param[out] deviceOutGuid The GUID of the user's preferred VR audio device to use,
|
||||
/// which will be valid upon a successful return value, else it will be NULL.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult) ovr_GetAudioDeviceOutGuid(GUID* deviceOutGuid);
|
||||
|
||||
/// Gets the GUID of the preferred VR microphone device as a string.
|
||||
///
|
||||
/// \param[out] deviceInStrBuffer A buffer where the GUID string for the device will copied to.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetAudioDeviceInGuidStr(WCHAR deviceInStrBuffer[OVR_AUDIO_MAX_DEVICE_STR_SIZE]);
|
||||
|
||||
/// Gets the GUID of the preferred VR microphone device.
|
||||
///
|
||||
/// \param[out] deviceInGuid The GUID of the user's preferred VR audio device to use,
|
||||
/// which will be valid upon a successful return value, else it will be NULL.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult) ovr_GetAudioDeviceInGuid(GUID* deviceInGuid);
|
||||
|
||||
#endif // !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
#endif // OVR_OS_MS
|
||||
|
||||
#endif // OVR_CAPI_Audio_h
|
@ -0,0 +1,203 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_D3D.h
|
||||
\brief D3D specific structures used by the CAPI interface.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_D3D_h
|
||||
#define OVR_CAPI_D3D_h
|
||||
|
||||
#include "OVR_CAPI.h"
|
||||
#include "OVR_Version.h"
|
||||
|
||||
#if defined(_WIN32)
|
||||
#include <Unknwn.h>
|
||||
#include <guiddef.h>
|
||||
|
||||
#if !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
//-----------------------------------------------------------------------------------
|
||||
// ***** Direct3D Specific
|
||||
|
||||
/// Create Texture Swap Chain suitable for use with Direct3D 11 and 12.
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] d3dPtr Specifies the application's D3D11Device to create resources with
|
||||
/// or the D3D12CommandQueue which must be the same one the application renders
|
||||
/// to the eye textures with.
|
||||
/// \param[in] desc Specifies requested texture properties. See notes for more info
|
||||
/// about texture format.
|
||||
/// \param[in] bindFlags Specifies what ovrTextureBindFlags the application requires
|
||||
/// for this texture chain.
|
||||
/// \param[out] out_TextureSwapChain Returns the created ovrTextureSwapChain, which will
|
||||
/// be valid upon a successful return value, else it will be NULL.
|
||||
/// This texture chain must be eventually destroyed via ovr_DestroyTextureSwapChain
|
||||
/// before destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The texture format provided in \a desc should be thought of as the format the
|
||||
/// distortion-compositor will use for the ShaderResourceView when reading the contents of
|
||||
/// the texture. To that end, it is highly recommended that the application requests texture
|
||||
// swapchain formats that are in sRGB-space (e.g. OVR_FORMAT_R8G8B8A8_UNORM_SRGB)
|
||||
/// as the compositor does sRGB-correct rendering. As such, the compositor relies on the
|
||||
/// GPU's hardware sampler to do the sRGB-to-linear conversion. If the application still
|
||||
/// prefers to render to a linear format (e.g. OVR_FORMAT_R8G8B8A8_UNORM) while handling the
|
||||
/// linear-to-gamma conversion via HLSL code, then the application must still request the
|
||||
/// corresponding sRGB format and also use the \a ovrTextureMisc_DX_Typeless flag in the
|
||||
/// ovrTextureSwapChainDesc's Flag field. This will allow the application to create
|
||||
/// a RenderTargetView that is the desired linear format while the compositor continues to
|
||||
/// treat it as sRGB. Failure to do so will cause the compositor to apply unexpected gamma
|
||||
/// conversions leading to gamma-curve artifacts. The \a ovrTextureMisc_DX_Typeless
|
||||
/// flag for depth buffer formats (e.g. OVR_FORMAT_D32_FLOAT) is ignored as they are always
|
||||
/// converted to be typeless.
|
||||
///
|
||||
/// \see ovr_GetTextureSwapChainLength
|
||||
/// \see ovr_GetTextureSwapChainCurrentIndex
|
||||
/// \see ovr_GetTextureSwapChainDesc
|
||||
/// \see ovr_GetTextureSwapChainBufferDX
|
||||
/// \see ovr_DestroyTextureSwapChain
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateTextureSwapChainDX(
|
||||
ovrSession session,
|
||||
IUnknown* d3dPtr,
|
||||
const ovrTextureSwapChainDesc* desc,
|
||||
ovrTextureSwapChain* out_TextureSwapChain);
|
||||
|
||||
/// Get a specific buffer within the chain as any compatible COM interface (similar to
|
||||
/// QueryInterface)
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] chain Specifies an ovrTextureSwapChain previously returned
|
||||
/// by ovr_CreateTextureSwapChainDX
|
||||
/// \param[in] index Specifies the index within the chain to retrieve.
|
||||
/// Must be between 0 and length (see ovr_GetTextureSwapChainLength),
|
||||
/// or may pass -1 to get the buffer at the CurrentIndex location. (Saving a call to
|
||||
/// GetTextureSwapChainCurrentIndex)
|
||||
/// \param[in] iid Specifies the interface ID of the interface pointer to query the buffer for.
|
||||
/// \param[out] out_Buffer Returns the COM interface pointer retrieved.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// ovr_GetTextureSwapChainBufferDX(s, d3d11Chain, 0, IID_ID3D11Texture2D, &d3d11Texture);
|
||||
/// ovr_GetTextureSwapChainBufferDX(s, d3d11Chain, 1, IID_PPV_ARGS(&dxgiResource));
|
||||
/// ovr_GetTextureSwapChainBufferDX(s, d3d12Chain, 0, IID_ID3D12Resource, &d3d12Texture);
|
||||
/// \endcode
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetTextureSwapChainBufferDX(
|
||||
ovrSession session,
|
||||
ovrTextureSwapChain chain,
|
||||
int index,
|
||||
IID iid,
|
||||
void** out_Buffer);
|
||||
|
||||
/// Create Mirror Texture which is auto-refreshed to mirror Rift contents produced by this
|
||||
/// application.
|
||||
///
|
||||
/// A second call to ovr_CreateMirrorTextureWithOptionsDX for a given ovrSession before destroying
|
||||
/// the first one is not supported and will result in an error return.
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] d3dPtr Specifies the application's D3D11Device to create resources with
|
||||
/// or the D3D12CommandQueue which must be the same one the application renders to
|
||||
/// the textures with.
|
||||
/// \param[in] desc Specifies requested texture properties.
|
||||
/// See notes for more info about texture format.
|
||||
/// \param[out] out_MirrorTexture Returns the created ovrMirrorTexture, which will be valid upon a
|
||||
/// successful return value, else it will be NULL.
|
||||
/// This texture must be eventually destroyed via ovr_DestroyMirrorTexture before
|
||||
/// destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The texture format provided in \a desc should be thought of as the format the compositor
|
||||
/// will use for the RenderTargetView when writing into mirror texture. To that end, it is
|
||||
/// highly recommended that the application requests a mirror texture format that is
|
||||
/// in sRGB-space (e.g. OVR_FORMAT_R8G8B8A8_UNORM_SRGB) as the compositor does sRGB-correct
|
||||
/// rendering. If however the application wants to still read the mirror texture as a linear
|
||||
/// format (e.g. OVR_FORMAT_R8G8B8A8_UNORM) and handle the sRGB-to-linear conversion in
|
||||
/// HLSL code, then it is recommended the application still requests an sRGB format and also
|
||||
/// use the \a ovrTextureMisc_DX_Typeless flag in the ovrMirrorTextureDesc's Flags field.
|
||||
/// This will allow the application to bind a ShaderResourceView that is a linear format
|
||||
/// while the compositor continues to treat is as sRGB. Failure to do so will cause the
|
||||
/// compositor to apply unexpected gamma conversions leading to gamma-curve artifacts.
|
||||
///
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// ovrMirrorTexture mirrorTexture = nullptr;
|
||||
/// ovrMirrorTextureDesc mirrorDesc = {};
|
||||
/// mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
|
||||
/// mirrorDesc.Width = mirrorWindowWidth;
|
||||
/// mirrorDesc.Height = mirrorWindowHeight;
|
||||
/// ovrResult result = ovr_CreateMirrorTextureWithOptionsDX(session, d3d11Device,
|
||||
/// &mirrorDesc, &mirrorTexture);
|
||||
/// [...]
|
||||
/// // Destroy the texture when done with it.
|
||||
/// ovr_DestroyMirrorTexture(session, mirrorTexture);
|
||||
/// mirrorTexture = nullptr;
|
||||
/// \endcode
|
||||
///
|
||||
/// \see ovr_GetMirrorTextureBufferDX
|
||||
/// \see ovr_DestroyMirrorTexture
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateMirrorTextureWithOptionsDX(
|
||||
ovrSession session,
|
||||
IUnknown* d3dPtr,
|
||||
const ovrMirrorTextureDesc* desc,
|
||||
ovrMirrorTexture* out_MirrorTexture);
|
||||
|
||||
/// Deprecated. Use ovr_CreateMirrorTextureWithOptionsDX instead
|
||||
///
|
||||
/// Same as ovr_CreateMirrorTextureWithOptionsDX except doesn't use ovrMirrorOptions flags as part
|
||||
/// of ovrMirrorTextureDesc's MirrorOptions field, and defaults to ovrMirrorOption_PostDistortion
|
||||
///
|
||||
/// \see ovrMirrorOptions, ovr_CreateMirrorTextureWithOptionsDX
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateMirrorTextureDX(
|
||||
ovrSession session,
|
||||
IUnknown* d3dPtr,
|
||||
const ovrMirrorTextureDesc* desc,
|
||||
ovrMirrorTexture* out_MirrorTexture);
|
||||
|
||||
/// Get a the underlying buffer as any compatible COM interface (similar to QueryInterface)
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] mirrorTexture Specifies an ovrMirrorTexture previously returned
|
||||
/// by ovr_CreateMirrorTextureWithOptionsDX
|
||||
/// \param[in] iid Specifies the interface ID of the interface pointer to query the buffer for.
|
||||
/// \param[out] out_Buffer Returns the COM interface pointer retrieved.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// ID3D11Texture2D* d3d11Texture = nullptr;
|
||||
/// ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&d3d11Texture));
|
||||
/// d3d11DeviceContext->CopyResource(d3d11TextureBackBuffer, d3d11Texture);
|
||||
/// d3d11Texture->Release();
|
||||
/// dxgiSwapChain->Present(0, 0);
|
||||
/// \endcode
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetMirrorTextureBufferDX(
|
||||
ovrSession session,
|
||||
ovrMirrorTexture mirrorTexture,
|
||||
IID iid,
|
||||
void** out_Buffer);
|
||||
|
||||
#endif // !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
#endif // _WIN32
|
||||
|
||||
#endif // OVR_CAPI_D3D_h
|
@ -0,0 +1,137 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_GL.h
|
||||
\brief OpenGL-specific structures used by the CAPI interface.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_GL_h
|
||||
#define OVR_CAPI_GL_h
|
||||
|
||||
#include "OVR_CAPI.h"
|
||||
|
||||
#if !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
/// Creates a TextureSwapChain suitable for use with OpenGL.
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] desc Specifies the requested texture properties.
|
||||
/// See notes for more info about texture format.
|
||||
/// \param[out] out_TextureSwapChain Returns the created ovrTextureSwapChain,
|
||||
/// which will be valid upon a successful return value, else it will be NULL.
|
||||
/// This texture swap chain must be eventually destroyed via
|
||||
// ovr_DestroyTextureSwapChain before destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The \a format provided should be thought of as the format the distortion compositor will
|
||||
/// use when reading the contents of the texture. To that end, it is highly recommended
|
||||
/// that the application requests texture swap chain formats that are in sRGB-space
|
||||
/// (e.g. OVR_FORMAT_R8G8B8A8_UNORM_SRGB) as the distortion compositor does sRGB-correct
|
||||
/// rendering. Furthermore, the app should then make sure "glEnable(GL_FRAMEBUFFER_SRGB);"
|
||||
/// is called before rendering into these textures. Even though it is not recommended,
|
||||
/// if the application would like to treat the texture as a linear format and do
|
||||
/// linear-to-gamma conversion in GLSL, then the application can avoid
|
||||
/// calling "glEnable(GL_FRAMEBUFFER_SRGB);", but should still pass in an sRGB variant for
|
||||
/// the \a format. Failure to do so will cause the distortion compositor to apply incorrect
|
||||
/// gamma conversions leading to gamma-curve artifacts.
|
||||
///
|
||||
/// \see ovr_GetTextureSwapChainLength
|
||||
/// \see ovr_GetTextureSwapChainCurrentIndex
|
||||
/// \see ovr_GetTextureSwapChainDesc
|
||||
/// \see ovr_GetTextureSwapChainBufferGL
|
||||
/// \see ovr_DestroyTextureSwapChain
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateTextureSwapChainGL(
|
||||
ovrSession session,
|
||||
const ovrTextureSwapChainDesc* desc,
|
||||
ovrTextureSwapChain* out_TextureSwapChain);
|
||||
|
||||
/// Get a specific buffer within the chain as a GL texture name
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] chain Specifies an ovrTextureSwapChain previously returned
|
||||
/// by ovr_CreateTextureSwapChainGL
|
||||
/// \param[in] index Specifies the index within the chain to retrieve.
|
||||
/// Must be between 0 and length (see ovr_GetTextureSwapChainLength)
|
||||
/// or may pass -1 to get the buffer at the CurrentIndex location.
|
||||
/// (Saving a call to GetTextureSwapChainCurrentIndex)
|
||||
/// \param[out] out_TexId Returns the GL texture object name associated with
|
||||
/// the specific index requested
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure.
|
||||
/// In the case of failure, use ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetTextureSwapChainBufferGL(
|
||||
ovrSession session,
|
||||
ovrTextureSwapChain chain,
|
||||
int index,
|
||||
unsigned int* out_TexId);
|
||||
|
||||
/// Creates a Mirror Texture which is auto-refreshed to mirror Rift contents produced by this
|
||||
/// application.
|
||||
///
|
||||
/// A second call to ovr_CreateMirrorTextureWithOptionsGL for a given ovrSession before destroying
|
||||
/// the first one is not supported and will result in an error return.
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] desc Specifies the requested mirror texture description.
|
||||
/// \param[out] out_MirrorTexture Specifies the created ovrMirrorTexture, which will be
|
||||
/// valid upon a successful return value, else it will be NULL.
|
||||
/// This texture must be eventually destroyed via ovr_DestroyMirrorTexture before
|
||||
/// destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The \a format provided should be thought of as the format the distortion compositor will
|
||||
/// use when writing into the mirror texture. It is highly recommended that mirror textures
|
||||
// are requested as sRGB formats because the distortion compositor does sRGB-correct
|
||||
/// rendering. If the application requests a non-sRGB format (e.g. R8G8B8A8_UNORM) as the
|
||||
/// mirror texture, then the application might have to apply a manual linear-to-gamma
|
||||
/// conversion when reading from the mirror texture. Failure to do so can result in
|
||||
// incorrect gamma conversions leading to gamma-curve artifacts and color banding.
|
||||
///
|
||||
/// \see ovr_GetMirrorTextureBufferGL
|
||||
/// \see ovr_DestroyMirrorTexture
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateMirrorTextureWithOptionsGL(
|
||||
ovrSession session,
|
||||
const ovrMirrorTextureDesc* desc,
|
||||
ovrMirrorTexture* out_MirrorTexture);
|
||||
|
||||
/// Deprecated. Use ovr_CreateMirrorTextureWithOptionsGL instead
|
||||
///
|
||||
/// Same as ovr_CreateMirrorTextureWithOptionsGL except doesn't use ovrMirrorOptions flags as part
|
||||
/// of ovrMirrorTextureDesc's MirrorOptions field, and defaults to ovrMirrorOption_PostDistortion
|
||||
///
|
||||
/// \see ovrMirrorOptions, ovr_CreateMirrorTextureWithOptionsGL
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateMirrorTextureGL(
|
||||
ovrSession session,
|
||||
const ovrMirrorTextureDesc* desc,
|
||||
ovrMirrorTexture* out_MirrorTexture);
|
||||
|
||||
/// Get a the underlying buffer as a GL texture name
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] mirrorTexture Specifies an ovrMirrorTexture previously returned
|
||||
// by ovr_CreateMirrorTextureWithOptionsGL
|
||||
/// \param[out] out_TexId Specifies the GL texture object name associated with the mirror texture
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetMirrorTextureBufferGL(
|
||||
ovrSession session,
|
||||
ovrMirrorTexture mirrorTexture,
|
||||
unsigned int* out_TexId);
|
||||
|
||||
#endif // !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
#endif // OVR_CAPI_GL_h
|
@ -0,0 +1,49 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI.h
|
||||
\brief Keys for CAPI property function calls
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_Keys_h
|
||||
#define OVR_CAPI_Keys_h
|
||||
|
||||
#include "OVR_Version.h"
|
||||
|
||||
|
||||
|
||||
#define OVR_KEY_USER "User" // string
|
||||
|
||||
#define OVR_KEY_NAME "Name" // string
|
||||
|
||||
#define OVR_KEY_GENDER "Gender" // string "Male", "Female", or "Unknown"
|
||||
#define OVR_DEFAULT_GENDER "Unknown"
|
||||
|
||||
#define OVR_KEY_PLAYER_HEIGHT "PlayerHeight" // float meters
|
||||
#define OVR_DEFAULT_PLAYER_HEIGHT 1.778f
|
||||
|
||||
#define OVR_KEY_EYE_HEIGHT "EyeHeight" // float meters
|
||||
#define OVR_DEFAULT_EYE_HEIGHT 1.675f
|
||||
|
||||
#define OVR_KEY_NECK_TO_EYE_DISTANCE "NeckEyeDistance" // float[2] meters
|
||||
#define OVR_DEFAULT_NECK_TO_EYE_HORIZONTAL 0.0805f
|
||||
#define OVR_DEFAULT_NECK_TO_EYE_VERTICAL 0.075f
|
||||
|
||||
#define OVR_KEY_EYE_TO_NOSE_DISTANCE "EyeToNoseDist" // float[2] meters
|
||||
|
||||
|
||||
|
||||
#define OVR_PERF_HUD_MODE "PerfHudMode" // int, allowed values are defined in enum ovrPerfHudMode
|
||||
|
||||
#define OVR_LAYER_HUD_MODE "LayerHudMode" // int, allowed values are defined in enum ovrLayerHudMode
|
||||
#define OVR_LAYER_HUD_CURRENT_LAYER "LayerHudCurrentLayer" // int, The layer to show
|
||||
#define OVR_LAYER_HUD_SHOW_ALL_LAYERS "LayerHudShowAll" // bool, Hide other layers when hud enabled
|
||||
|
||||
#define OVR_DEBUG_HUD_STEREO_MODE "DebugHudStereoMode" // int, see enum ovrDebugHudStereoMode
|
||||
#define OVR_DEBUG_HUD_STEREO_GUIDE_INFO_ENABLE "DebugHudStereoGuideInfoEnable" // bool
|
||||
#define OVR_DEBUG_HUD_STEREO_GUIDE_SIZE "DebugHudStereoGuideSize2f" // float[2]
|
||||
#define OVR_DEBUG_HUD_STEREO_GUIDE_POSITION "DebugHudStereoGuidePosition3f" // float[3]
|
||||
#define OVR_DEBUG_HUD_STEREO_GUIDE_YAWPITCHROLL "DebugHudStereoGuideYawPitchRoll3f" // float[3]
|
||||
#define OVR_DEBUG_HUD_STEREO_GUIDE_COLOR "DebugHudStereoGuideColor4f" // float[4]
|
||||
|
||||
|
||||
#endif // OVR_CAPI_Keys_h
|
@ -0,0 +1,285 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_Vk.h
|
||||
\brief Vulkan specific structures used by the CAPI interface.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_Vk_h
|
||||
#define OVR_CAPI_Vk_h
|
||||
|
||||
#include "OVR_CAPI.h"
|
||||
#include "OVR_Version.h"
|
||||
|
||||
#if !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
//-----------------------------------------------------------------------------------
|
||||
// ***** Vulkan Specific
|
||||
|
||||
/// Get a list of Vulkan vkInstance extensions required for VR.
|
||||
///
|
||||
/// Returns a list of strings delimited by a single space identifying Vulkan extensions that must
|
||||
/// be enabled in order for the VR runtime to support Vulkan-based applications. The returned
|
||||
/// list reflects the current runtime version and the GPU the VR system is currently connected to.
|
||||
///
|
||||
/// \param[in] luid Specifies the luid for the relevant GPU, which is returned from ovr_Create.
|
||||
/// \param[in] extensionNames is a character buffer which will receive a list of extension name
|
||||
/// strings, separated by a single space char between each extension.
|
||||
/// \param[in] inoutExtensionNamesSize indicates on input the capacity of extensionNames in chars.
|
||||
/// On output it returns the number of characters written to extensionNames,
|
||||
/// including the terminating 0 char. In the case of this function returning
|
||||
/// ovrError_InsufficientArraySize, the required inoutExtensionNamesSize is returned.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information. Returns ovrError_InsufficientArraySize in
|
||||
/// the case that inoutExtensionNameSize didn't have enough space, in which case
|
||||
/// inoutExtensionNameSize will return the required inoutExtensionNamesSize.
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// char extensionNames[4096];
|
||||
/// uint32_t extensionNamesSize = sizeof(extensionNames);
|
||||
/// ovr_GetInstanceExtensionsVk(luid, extensionsnames, &extensionNamesSize);
|
||||
///
|
||||
/// uint32_t extensionCount = 0;
|
||||
/// const char* extensionNamePtrs[256];
|
||||
/// for(const char* p = extensionNames; *p; ++p) {
|
||||
/// if((p == extensionNames) || (p[-1] == ' ')) {
|
||||
/// extensionNamePtrs[extensionCount++] = p;
|
||||
/// if (p[-1] == ' ')
|
||||
/// p[-1] = '\0';
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// VkInstanceCreateInfo info = { ... };
|
||||
/// info.enabledExtensionCount = extensionCount;
|
||||
/// info.ppEnabledExtensionNames = extensionNamePtrs;
|
||||
/// [...]
|
||||
/// \endcode
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetInstanceExtensionsVk(
|
||||
ovrGraphicsLuid luid,
|
||||
char* extensionNames,
|
||||
uint32_t* inoutExtensionNamesSize);
|
||||
|
||||
/// Get a list of Vulkan vkDevice extensions required for VR.
|
||||
///
|
||||
/// Returns a list of strings delimited by a single space identifying Vulkan extensions that must
|
||||
/// be enabled in order for the VR runtime to support Vulkan-based applications. The returned
|
||||
/// list reflects the current runtime version and the GPU the VR system is currently connected to.
|
||||
///
|
||||
/// \param[in] luid Specifies the luid for the relevant GPU, which is returned from ovr_Create.
|
||||
/// \param[in] extensionNames is a character buffer which will receive a list of extension name
|
||||
/// strings, separated by a single space char between each extension.
|
||||
/// \param[in] inoutExtensionNamesSize indicates on input the capacity of extensionNames in chars.
|
||||
/// On output it returns the number of characters written to extensionNames,
|
||||
/// including the terminating 0 char. In the case of this function returning
|
||||
/// ovrError_InsufficientArraySize, the required inoutExtensionNamesSize is returned.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information. Returns ovrError_InsufficientArraySize in
|
||||
/// the case that inoutExtensionNameSize didn't have enough space, in which case
|
||||
/// inoutExtensionNameSize will return the required inoutExtensionNamesSize.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetDeviceExtensionsVk(
|
||||
ovrGraphicsLuid luid,
|
||||
char* extensionNames,
|
||||
uint32_t* inoutExtensionNamesSize);
|
||||
|
||||
/// Find VkPhysicalDevice matching ovrGraphicsLuid
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] luid Specifies the luid returned from ovr_Create.
|
||||
/// \param[in] instance Specifies a VkInstance to search for matching luids in.
|
||||
/// \param[out] out_physicalDevice Returns the VkPhysicalDevice matching the instance and luid.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note This function enumerates the current physical devices and returns the one matching the
|
||||
/// luid. It must be called at least once prior to any ovr_CreateTextureSwapChainVk or
|
||||
/// ovr_CreateMirrorTextureWithOptionsVk calls, and the instance must remain valid for the lifetime
|
||||
/// of the returned objects. It is assumed the VkDevice created by the application will be for the
|
||||
/// returned physical device.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetSessionPhysicalDeviceVk(
|
||||
ovrSession session,
|
||||
ovrGraphicsLuid luid,
|
||||
VkInstance instance,
|
||||
VkPhysicalDevice* out_physicalDevice);
|
||||
|
||||
/// Select VkQueue to block on till rendering is complete
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] queue Specifies a VkQueue to add a VkFence operation to and wait on.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The queue may be changed at any time but only the value at the time ovr_SubmitFrame
|
||||
/// is called will be used. ovr_SetSynchronizationQueueVk must be called with a valid VkQueue
|
||||
/// created on the same VkDevice the texture sets were created on prior to the first call to
|
||||
/// ovr_SubmitFrame. An internally created VkFence object will be signalled by the completion
|
||||
/// of operations on queue and waited on to synchronize the VR compositor.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult) ovr_SetSynchronizationQueueVk(ovrSession session, VkQueue queue);
|
||||
// Backwards compatibility for the original typoed version
|
||||
#define ovr_SetSynchonizationQueueVk ovr_SetSynchronizationQueueVk
|
||||
// Define OVR_PREVIEW_DEPRECATION to generate warnings for upcoming API deprecations
|
||||
#if defined(OVR_PREVIEW_DEPRECATION)
|
||||
#pragma deprecated("ovr_SetSynchonizationQueueVk")
|
||||
#endif
|
||||
|
||||
/// Create Texture Swap Chain suitable for use with Vulkan
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] device Specifies the application's VkDevice to create resources with.
|
||||
/// \param[in] desc Specifies requested texture properties. See notes for more info
|
||||
/// about texture format.
|
||||
/// \param[out] out_TextureSwapChain Returns the created ovrTextureSwapChain, which will be valid
|
||||
/// upon a successful return value, else it will be NULL.
|
||||
/// This texture chain must be eventually destroyed via ovr_DestroyTextureSwapChain
|
||||
/// before destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The texture format provided in \a desc should be thought of as the format the
|
||||
/// distortion-compositor will use for the ShaderResourceView when reading the contents
|
||||
/// of the texture. To that end, it is highly recommended that the application
|
||||
/// requests texture swapchain formats that are in sRGB-space
|
||||
/// (e.g. OVR_FORMAT_R8G8B8A8_UNORM_SRGB) as the compositor does sRGB-correct rendering.
|
||||
/// As such, the compositor relies on the GPU's hardware sampler to do the sRGB-to-linear
|
||||
/// conversion. If the application still prefers to render to a linear format (e.g.
|
||||
/// OVR_FORMAT_R8G8B8A8_UNORM) while handling the linear-to-gamma conversion via
|
||||
/// SPIRV code, then the application must still request the corresponding sRGB format and
|
||||
/// also use the \a ovrTextureMisc_DX_Typeless flag in the ovrTextureSwapChainDesc's
|
||||
/// Flag field. This will allow the application to create a RenderTargetView that is the
|
||||
/// desired linear format while the compositor continues to treat it as sRGB. Failure to
|
||||
/// do so will cause the compositor to apply unexpected gamma conversions leading to
|
||||
/// gamma-curve artifacts. The \a ovrTextureMisc_DX_Typeless flag for depth buffer formats
|
||||
/// (e.g. OVR_FORMAT_D32_FLOAT) is ignored as they are always
|
||||
/// converted to be typeless.
|
||||
///
|
||||
/// \see ovr_GetTextureSwapChainLength
|
||||
/// \see ovr_GetTextureSwapChainCurrentIndex
|
||||
/// \see ovr_GetTextureSwapChainDesc
|
||||
/// \see ovr_GetTextureSwapChainBufferVk
|
||||
/// \see ovr_DestroyTextureSwapChain
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateTextureSwapChainVk(
|
||||
ovrSession session,
|
||||
VkDevice device,
|
||||
const ovrTextureSwapChainDesc* desc,
|
||||
ovrTextureSwapChain* out_TextureSwapChain);
|
||||
|
||||
/// Get a specific VkImage within the chain
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] chain Specifies an ovrTextureSwapChain previously returned by
|
||||
/// ovr_CreateTextureSwapChainVk
|
||||
/// \param[in] index Specifies the index within the chain to retrieve.
|
||||
/// Must be between 0 and length (see ovr_GetTextureSwapChainLength),
|
||||
/// or may pass -1 to get the buffer at the CurrentIndex location (saving a
|
||||
/// call to GetTextureSwapChainCurrentIndex).
|
||||
/// \param[out] out_Image Returns the VkImage retrieved.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetTextureSwapChainBufferVk(
|
||||
ovrSession session,
|
||||
ovrTextureSwapChain chain,
|
||||
int index,
|
||||
VkImage* out_Image);
|
||||
|
||||
/// Create Mirror Texture which is auto-refreshed to mirror Rift contents produced by this
|
||||
/// application.
|
||||
///
|
||||
/// A second call to ovr_CreateMirrorTextureWithOptionsVk for a given ovrSession before destroying
|
||||
/// the first one is not supported and will result in an error return.
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] device Specifies the VkDevice to create resources with.
|
||||
/// \param[in] desc Specifies requested texture properties. See notes for more info
|
||||
/// about texture format.
|
||||
/// \param[out] out_MirrorTexture Returns the created ovrMirrorTexture, which will be
|
||||
/// valid upon a successful return value, else it will be NULL.
|
||||
/// This texture must be eventually destroyed via ovr_DestroyMirrorTexture before
|
||||
/// destroying the session with ovr_Destroy.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// \note The texture format provided in \a desc should be thought of as the format the
|
||||
/// compositor will use for the VkImageView when writing into mirror texture. To that end,
|
||||
/// it is highly recommended that the application requests a mirror texture format that is
|
||||
/// in sRGB-space (e.g. OVR_FORMAT_R8G8B8A8_UNORM_SRGB) as the compositor does sRGB-correct
|
||||
/// rendering. If however the application wants to still read the mirror texture as a
|
||||
/// linear format (e.g. OVR_FORMAT_R8G8B8A8_UNORM) and handle the sRGB-to-linear conversion
|
||||
/// in SPIRV code, then it is recommended the application still requests an sRGB format and
|
||||
/// also use the \a ovrTextureMisc_DX_Typeless flag in the ovrMirrorTextureDesc's
|
||||
/// Flags field. This will allow the application to bind a ShaderResourceView that is a
|
||||
/// linear format while the compositor continues to treat is as sRGB. Failure to do so will
|
||||
/// cause the compositor to apply unexpected gamma conversions leading to
|
||||
/// gamma-curve artifacts.
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// ovrMirrorTexture mirrorTexture = nullptr;
|
||||
/// ovrMirrorTextureDesc mirrorDesc = {};
|
||||
/// mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
|
||||
/// mirrorDesc.Width = mirrorWindowWidth;
|
||||
/// mirrorDesc.Height = mirrorWindowHeight;
|
||||
/// ovrResult result = ovr_CreateMirrorTextureWithOptionsVk(session, vkDevice, &mirrorDesc,
|
||||
/// &mirrorTexture);
|
||||
/// [...]
|
||||
/// // Destroy the texture when done with it.
|
||||
/// ovr_DestroyMirrorTexture(session, mirrorTexture);
|
||||
/// mirrorTexture = nullptr;
|
||||
/// \endcode
|
||||
///
|
||||
/// \see ovr_GetMirrorTextureBufferVk
|
||||
/// \see ovr_DestroyMirrorTexture
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_CreateMirrorTextureWithOptionsVk(
|
||||
ovrSession session,
|
||||
VkDevice device,
|
||||
const ovrMirrorTextureDesc* desc,
|
||||
ovrMirrorTexture* out_MirrorTexture);
|
||||
|
||||
/// Get a the underlying mirror VkImage
|
||||
///
|
||||
/// \param[in] session Specifies an ovrSession previously returned by ovr_Create.
|
||||
/// \param[in] mirrorTexture Specifies an ovrMirrorTexture previously returned by
|
||||
/// ovr_CreateMirrorTextureWithOptionsVk
|
||||
/// \param[out] out_Image Returns the VkImage pointer retrieved.
|
||||
///
|
||||
/// \return Returns an ovrResult indicating success or failure. In the case of failure, use
|
||||
/// ovr_GetLastErrorInfo to get more information.
|
||||
///
|
||||
/// <b>Example code</b>
|
||||
/// \code{.cpp}
|
||||
/// VkImage mirrorImage = VK_NULL_HANDLE;
|
||||
/// ovr_GetMirrorTextureBufferVk(session, mirrorTexture, &mirrorImage);
|
||||
/// ...
|
||||
/// vkCmdBlitImage(commandBuffer, mirrorImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
/// presentImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, VK_FILTER_LINEAR);
|
||||
/// ...
|
||||
/// vkQueuePresentKHR(queue, &presentInfo);
|
||||
/// \endcode
|
||||
///
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GetMirrorTextureBufferVk(
|
||||
ovrSession session,
|
||||
ovrMirrorTexture mirrorTexture,
|
||||
VkImage* out_Image);
|
||||
|
||||
#endif // !defined(OVR_EXPORTING_CAPI)
|
||||
|
||||
#endif // OVR_CAPI_Vk_h
|
@ -0,0 +1,324 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_ErrorCode.h
|
||||
\brief This header provides LibOVR error code declarations.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
*************************************************************************************/
|
||||
|
||||
#ifndef OVR_ErrorCode_h
|
||||
#define OVR_ErrorCode_h
|
||||
|
||||
#include "OVR_Version.h"
|
||||
#include <stdint.h>
|
||||
|
||||
|
||||
|
||||
#ifndef OVR_RESULT_DEFINED
|
||||
#define OVR_RESULT_DEFINED ///< Allows ovrResult to be independently defined.
|
||||
/// API call results are represented at the highest level by a single ovrResult.
|
||||
typedef int32_t ovrResult;
|
||||
#endif
|
||||
|
||||
/// \brief Indicates if an ovrResult indicates success.
|
||||
///
|
||||
/// Some functions return additional successful values other than ovrSucces and
|
||||
/// require usage of this macro to indicate successs.
|
||||
///
|
||||
#if !defined(OVR_SUCCESS)
|
||||
#define OVR_SUCCESS(result) (result >= 0)
|
||||
#endif
|
||||
|
||||
/// \brief Indicates if an ovrResult indicates an unqualified success.
|
||||
///
|
||||
/// This is useful for indicating that the code intentionally wants to
|
||||
/// check for result == ovrSuccess as opposed to OVR_SUCCESS(), which
|
||||
/// checks for result >= ovrSuccess.
|
||||
///
|
||||
#if !defined(OVR_UNQUALIFIED_SUCCESS)
|
||||
#define OVR_UNQUALIFIED_SUCCESS(result) (result == ovrSuccess)
|
||||
#endif
|
||||
|
||||
/// \brief Indicates if an ovrResult indicates failure.
|
||||
///
|
||||
#if !defined(OVR_FAILURE)
|
||||
#define OVR_FAILURE(result) (!OVR_SUCCESS(result))
|
||||
#endif
|
||||
|
||||
// Success is a value greater or equal to 0, while all error types are negative values.
|
||||
#ifndef OVR_SUCCESS_DEFINED
|
||||
#define OVR_SUCCESS_DEFINED ///< Allows ovrResult to be independently defined.
|
||||
typedef enum ovrSuccessType_ {
|
||||
/// This is a general success result. Use OVR_SUCCESS to test for success.
|
||||
ovrSuccess = 0,
|
||||
} ovrSuccessType;
|
||||
#endif
|
||||
|
||||
// Public success types
|
||||
// Success is a value greater or equal to 0, while all error types are negative values.
|
||||
typedef enum ovrSuccessTypes_ {
|
||||
/// Returned from a call to SubmitFrame. The call succeeded, but what the app
|
||||
/// rendered will not be visible on the HMD. Ideally the app should continue
|
||||
/// calling SubmitFrame, but not do any rendering. When the result becomes
|
||||
/// ovrSuccess, rendering should continue as usual.
|
||||
ovrSuccess_NotVisible = 1000,
|
||||
|
||||
/// Boundary is invalid due to sensor change or was not setup.
|
||||
ovrSuccess_BoundaryInvalid = 1001,
|
||||
|
||||
/// Device is not available for the requested operation.
|
||||
ovrSuccess_DeviceUnavailable = 1002,
|
||||
} ovrSuccessTypes;
|
||||
|
||||
// Public error types
|
||||
typedef enum ovrErrorType_ {
|
||||
/******************/
|
||||
/* General errors */
|
||||
/******************/
|
||||
|
||||
/// Failure to allocate memory.
|
||||
ovrError_MemoryAllocationFailure = -1000,
|
||||
|
||||
/// Invalid ovrSession parameter provided.
|
||||
ovrError_InvalidSession = -1002,
|
||||
|
||||
/// The operation timed out.
|
||||
ovrError_Timeout = -1003,
|
||||
|
||||
/// The system or component has not been initialized.
|
||||
ovrError_NotInitialized = -1004,
|
||||
|
||||
/// Invalid parameter provided. See error info or log for details.
|
||||
ovrError_InvalidParameter = -1005,
|
||||
|
||||
/// Generic service error. See error info or log for details.
|
||||
ovrError_ServiceError = -1006,
|
||||
|
||||
/// The given HMD doesn't exist.
|
||||
ovrError_NoHmd = -1007,
|
||||
|
||||
/// Function call is not supported on this hardware/software
|
||||
ovrError_Unsupported = -1009,
|
||||
|
||||
/// Specified device type isn't available.
|
||||
ovrError_DeviceUnavailable = -1010,
|
||||
|
||||
/// The headset was in an invalid orientation for the requested
|
||||
/// operation (e.g. vertically oriented during ovr_RecenterPose).
|
||||
ovrError_InvalidHeadsetOrientation = -1011,
|
||||
|
||||
/// The client failed to call ovr_Destroy on an active session before calling ovr_Shutdown.
|
||||
/// Or the client crashed.
|
||||
ovrError_ClientSkippedDestroy = -1012,
|
||||
|
||||
/// The client failed to call ovr_Shutdown or the client crashed.
|
||||
ovrError_ClientSkippedShutdown = -1013,
|
||||
|
||||
///< The service watchdog discovered a deadlock.
|
||||
ovrError_ServiceDeadlockDetected = -1014,
|
||||
|
||||
///< Function call is invalid for object's current state
|
||||
ovrError_InvalidOperation = -1015,
|
||||
|
||||
///< Increase size of output array
|
||||
ovrError_InsufficientArraySize = -1016,
|
||||
|
||||
/// There is not any external camera information stored by ovrServer.
|
||||
ovrError_NoExternalCameraInfo = -1017,
|
||||
|
||||
/// Tracking is lost when ovr_GetDevicePoses() is called.
|
||||
ovrError_LostTracking = -1018,
|
||||
|
||||
/// There was a problem initializing the external camera for capture
|
||||
ovrError_ExternalCameraInitializedFailed = -1019,
|
||||
|
||||
/// There was a problem capturing external camera frames
|
||||
ovrError_ExternalCameraCaptureFailed = -1020,
|
||||
|
||||
/// The external camera friendly name list and the external camera name list
|
||||
/// are not the fixed size(OVR_MAX_EXTERNAL_CAMERA_NAME_BUFFER_SIZE).
|
||||
ovrError_ExternalCameraNameListsBufferSize = -1021,
|
||||
|
||||
/// The external camera friendly name list is not the same size as
|
||||
/// the external camera name list.
|
||||
ovrError_ExternalCameraNameListsMistmatch = -1022,
|
||||
|
||||
/// The external camera property has not been sent to OVRServer
|
||||
/// when the user tries to open the camera.
|
||||
ovrError_ExternalCameraNotCalibrated = -1023,
|
||||
|
||||
/// The external camera name is larger than OVR_EXTERNAL_CAMERA_NAME_SIZE-1
|
||||
ovrError_ExternalCameraNameWrongSize = -1024,
|
||||
|
||||
/// The caller doesn't have permissions for the requested action.
|
||||
ovrError_AccessDenied = -1025,
|
||||
|
||||
/*************************************************/
|
||||
/* Audio error range, reserved for Audio errors. */
|
||||
/*************************************************/
|
||||
|
||||
/// Failure to find the specified audio device.
|
||||
ovrError_AudioDeviceNotFound = -2001,
|
||||
|
||||
/// Generic COM error.
|
||||
ovrError_AudioComError = -2002,
|
||||
|
||||
/**************************/
|
||||
/* Initialization errors. */
|
||||
/**************************/
|
||||
|
||||
/// Generic initialization error.
|
||||
ovrError_Initialize = -3000,
|
||||
|
||||
/// Couldn't load LibOVRRT.
|
||||
ovrError_LibLoad = -3001,
|
||||
|
||||
/// LibOVRRT version incompatibility.
|
||||
ovrError_LibVersion = -3002,
|
||||
|
||||
/// Couldn't connect to the OVR Service.
|
||||
ovrError_ServiceConnection = -3003,
|
||||
|
||||
/// OVR Service version incompatibility.
|
||||
ovrError_ServiceVersion = -3004,
|
||||
|
||||
/// The operating system version is incompatible.
|
||||
ovrError_IncompatibleOS = -3005,
|
||||
|
||||
/// Unable to initialize the HMD display.
|
||||
ovrError_DisplayInit = -3006,
|
||||
|
||||
/// Unable to start the server. Is it already running?
|
||||
ovrError_ServerStart = -3007,
|
||||
|
||||
/// Attempting to re-initialize with a different version.
|
||||
ovrError_Reinitialization = -3008,
|
||||
|
||||
/// Chosen rendering adapters between client and service do not match
|
||||
ovrError_MismatchedAdapters = -3009,
|
||||
|
||||
/// Calling application has leaked resources
|
||||
ovrError_LeakingResources = -3010,
|
||||
|
||||
/// Client version too old to connect to service
|
||||
ovrError_ClientVersion = -3011,
|
||||
|
||||
/// The operating system is out of date.
|
||||
ovrError_OutOfDateOS = -3012,
|
||||
|
||||
/// The graphics driver is out of date.
|
||||
ovrError_OutOfDateGfxDriver = -3013,
|
||||
|
||||
/// The graphics hardware is not supported
|
||||
ovrError_IncompatibleGPU = -3014,
|
||||
|
||||
/// No valid VR display system found.
|
||||
ovrError_NoValidVRDisplaySystem = -3015,
|
||||
|
||||
/// Feature or API is obsolete and no longer supported.
|
||||
ovrError_Obsolete = -3016,
|
||||
|
||||
/// No supported VR display system found, but disabled or driverless adapter found.
|
||||
ovrError_DisabledOrDefaultAdapter = -3017,
|
||||
|
||||
/// The system is using hybrid graphics (Optimus, etc...), which is not support.
|
||||
ovrError_HybridGraphicsNotSupported = -3018,
|
||||
|
||||
/// Initialization of the DisplayManager failed.
|
||||
ovrError_DisplayManagerInit = -3019,
|
||||
|
||||
/// Failed to get the interface for an attached tracker
|
||||
ovrError_TrackerDriverInit = -3020,
|
||||
|
||||
/// LibOVRRT signature check failure.
|
||||
ovrError_LibSignCheck = -3021,
|
||||
|
||||
/// LibOVRRT path failure.
|
||||
ovrError_LibPath = -3022,
|
||||
|
||||
/// LibOVRRT symbol resolution failure.
|
||||
ovrError_LibSymbols = -3023,
|
||||
|
||||
/// Failed to connect to the service because remote connections to the service are not allowed.
|
||||
ovrError_RemoteSession = -3024,
|
||||
|
||||
/// Vulkan initialization error.
|
||||
ovrError_InitializeVulkan = -3025,
|
||||
|
||||
/// The graphics driver is black-listed.
|
||||
ovrError_BlacklistedGfxDriver = -3026,
|
||||
|
||||
/********************/
|
||||
/* Rendering errors */
|
||||
/********************/
|
||||
|
||||
/// In the event of a system-wide graphics reset or cable unplug this is returned to the app.
|
||||
ovrError_DisplayLost = -6000,
|
||||
|
||||
/// ovr_CommitTextureSwapChain was called too many times on a texture swapchain without
|
||||
/// calling submit to use the chain.
|
||||
ovrError_TextureSwapChainFull = -6001,
|
||||
|
||||
/// The ovrTextureSwapChain is in an incomplete or inconsistent state.
|
||||
/// Ensure ovr_CommitTextureSwapChain was called at least once first.
|
||||
ovrError_TextureSwapChainInvalid = -6002,
|
||||
|
||||
/// Graphics device has been reset (TDR, etc...)
|
||||
ovrError_GraphicsDeviceReset = -6003,
|
||||
|
||||
/// HMD removed from the display adapter
|
||||
ovrError_DisplayRemoved = -6004,
|
||||
|
||||
/// Content protection is not available for the display.
|
||||
ovrError_ContentProtectionNotAvailable = -6005,
|
||||
|
||||
/// Application declared itself as an invisible type and is not allowed to submit frames.
|
||||
ovrError_ApplicationInvisible = -6006,
|
||||
|
||||
/// The given request is disallowed under the current conditions.
|
||||
ovrError_Disallowed = -6007,
|
||||
|
||||
/// Display portion of HMD is plugged into an incompatible port (ex: IGP)
|
||||
ovrError_DisplayPluggedIncorrectly = -6008,
|
||||
|
||||
/// Returned in the event a virtual display system reaches a display limit
|
||||
ovrError_DisplayLimitReached = -6009,
|
||||
|
||||
/****************/
|
||||
/* Fatal errors */
|
||||
/****************/
|
||||
|
||||
///< A runtime exception occurred. The application is required to shutdown LibOVR and
|
||||
/// re-initialize it before this error state will be cleared.
|
||||
ovrError_RuntimeException = -7000,
|
||||
|
||||
/**********************/
|
||||
/* Calibration errors */
|
||||
/**********************/
|
||||
|
||||
/// Result of a missing calibration block
|
||||
ovrError_NoCalibration = -9000,
|
||||
|
||||
/// Result of an old calibration block
|
||||
ovrError_OldVersion = -9001,
|
||||
|
||||
/// Result of a bad calibration block due to lengths
|
||||
ovrError_MisformattedBlock = -9002,
|
||||
|
||||
/****************/
|
||||
/* Other errors */
|
||||
/****************/
|
||||
|
||||
|
||||
} ovrErrorType;
|
||||
|
||||
/// Provides information about the last error.
|
||||
/// \see ovr_GetLastErrorInfo
|
||||
typedef struct ovrErrorInfo_ {
|
||||
/// The result from the last API call that generated an error ovrResult.
|
||||
ovrResult Result;
|
||||
|
||||
/// A UTF8-encoded null-terminated English string describing the problem.
|
||||
/// The format of this string is subject to change in future versions.
|
||||
char ErrorString[512];
|
||||
} ovrErrorInfo;
|
||||
|
||||
#endif /* OVR_ErrorCode_h */
|
@ -0,0 +1,60 @@
|
||||
/*************************************************************************************
|
||||
\file OVR_Version.h
|
||||
\brief This header provides LibOVR version identification.
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
*************************************************************************************/
|
||||
|
||||
#ifndef OVR_Version_h
|
||||
#define OVR_Version_h
|
||||
|
||||
|
||||
/// Conventional string-ification macro.
|
||||
#if !defined(OVR_STRINGIZE)
|
||||
#define OVR_STRINGIZEIMPL(x) #x
|
||||
#define OVR_STRINGIZE(x) OVR_STRINGIZEIMPL(x)
|
||||
#endif
|
||||
|
||||
// Master version numbers
|
||||
#define OVR_PRODUCT_VERSION 1 // Product version doesn't participate in semantic versioning.
|
||||
#define OVR_MAJOR_VERSION 1 // If you change these values then you need to also make sure to change
|
||||
// LibOVR/Projects/Windows/LibOVR.props in parallel.
|
||||
#define OVR_MINOR_VERSION 43 //
|
||||
#define OVR_PATCH_VERSION 0
|
||||
#define OVR_BUILD_NUMBER 0
|
||||
|
||||
// This is the ((product * 100) + major) version of the service that the DLL is compatible with.
|
||||
// When we backport changes to old versions of the DLL we update the old DLLs
|
||||
// to move this version number up to the latest version.
|
||||
// The DLL is responsible for checking that the service is the version it supports
|
||||
// and returning an appropriate error message if it has not been made compatible.
|
||||
#define OVR_DLL_COMPATIBLE_VERSION 101
|
||||
|
||||
// This is the minor version representing the minimum version an application can query with this
|
||||
// SDK. Calls ovr_Initialize will fail if the application requests a version that is less than this.
|
||||
#define OVR_MIN_REQUESTABLE_MINOR_VERSION 17
|
||||
|
||||
#define OVR_FEATURE_VERSION 0
|
||||
|
||||
/// "Major.Minor.Patch"
|
||||
#if !defined(OVR_VERSION_STRING)
|
||||
#define OVR_VERSION_STRING OVR_STRINGIZE(OVR_MAJOR_VERSION.OVR_MINOR_VERSION.OVR_PATCH_VERSION)
|
||||
#endif
|
||||
|
||||
/// "Major.Minor.Patch.Build"
|
||||
#if !defined(OVR_DETAILED_VERSION_STRING)
|
||||
#define OVR_DETAILED_VERSION_STRING \
|
||||
OVR_STRINGIZE(OVR_MAJOR_VERSION.OVR_MINOR_VERSION.OVR_PATCH_VERSION.OVR_BUILD_NUMBER)
|
||||
#endif
|
||||
|
||||
/// \brief file description for version info
|
||||
/// This appears in the user-visible file properties. It is intended to convey publicly
|
||||
/// available additional information such as feature builds.
|
||||
#if !defined(OVR_FILE_DESCRIPTION_STRING)
|
||||
#if defined(_DEBUG)
|
||||
#define OVR_FILE_DESCRIPTION_STRING "dev build debug"
|
||||
#else
|
||||
#define OVR_FILE_DESCRIPTION_STRING "dev build"
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#endif // OVR_Version_h
|
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,169 @@
|
||||
/********************************************************************************/ /**
|
||||
\file OVR_CAPI_Prototypes.h
|
||||
\brief Internal CAPI prototype listing macros
|
||||
\copyright Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
************************************************************************************/
|
||||
|
||||
#ifndef OVR_CAPI_Prototypes_h
|
||||
#define OVR_CAPI_Prototypes_h
|
||||
|
||||
#include "OVR_CAPI.h"
|
||||
|
||||
|
||||
//
|
||||
// OVR_LIST_*_APIS - apply passed in macros to a list of API entrypoints
|
||||
//
|
||||
// The _ macro argument is applied for all current API versions
|
||||
// The X macro argument is applied for back-compat API versions
|
||||
//
|
||||
// The tuple passed to either macro is (ReturnType, FunctionName, OptionalVersion, ParameterList)
|
||||
//
|
||||
|
||||
|
||||
struct ovrViewportStencilDesc_;
|
||||
typedef struct ovrViewportStencilDesc_ ovrViewportStencilDesc;
|
||||
|
||||
// clang-format off
|
||||
|
||||
#define OVR_LIST_PUBLIC_APIS(_,X) \
|
||||
X(ovrBool, ovr_InitializeRenderingShimVersion, , (int requestedMinorVersion)) \
|
||||
_(ovrResult, ovr_Initialize, , (const ovrInitParams* params)) \
|
||||
_(void, ovr_Shutdown, , (void)) \
|
||||
_(const char*, ovr_GetVersionString, , (void)) \
|
||||
_(void, ovr_GetLastErrorInfo, , (ovrErrorInfo* errorInfo)) \
|
||||
_(ovrHmdDesc, ovr_GetHmdDesc, , (ovrSession session)) \
|
||||
_(unsigned int, ovr_GetTrackerCount, , (ovrSession session)) \
|
||||
_(ovrTrackerDesc, ovr_GetTrackerDesc, , (ovrSession session, unsigned int trackerDescIndex)) \
|
||||
_(ovrResult, ovr_Create, , (ovrSession* pSession, ovrGraphicsLuid* pLuid)) \
|
||||
_(void, ovr_Destroy, , (ovrSession session)) \
|
||||
_(ovrResult, ovr_GetSessionStatus, , (ovrSession session, ovrSessionStatus* sessionStatus)) \
|
||||
_(ovrResult, ovr_IsExtensionSupported, , (ovrSession session, ovrExtensions extension, ovrBool* outExtensionSupported)) \
|
||||
_(ovrResult, ovr_EnableExtension, , (ovrSession session, ovrExtensions extension)) \
|
||||
_(ovrResult, ovr_SetTrackingOriginType, , (ovrSession session, ovrTrackingOrigin origin)) \
|
||||
_(ovrTrackingOrigin, ovr_GetTrackingOriginType, , (ovrSession session)) \
|
||||
_(ovrResult, ovr_RecenterTrackingOrigin, , (ovrSession session)) \
|
||||
_(ovrResult, ovr_SpecifyTrackingOrigin, , (ovrSession session, ovrPosef originPose)) \
|
||||
_(void, ovr_ClearShouldRecenterFlag, , (ovrSession session)) \
|
||||
_(ovrTrackingState, ovr_GetTrackingState, , (ovrSession session, double absTime, ovrBool latencyMarker)) \
|
||||
_(ovrResult, ovr_GetDevicePoses, , (ovrSession session, ovrTrackedDeviceType* deviceTypes, int deviceCount, double absTime, ovrPoseStatef* outDevicePoses)) \
|
||||
_(ovrTrackerPose, ovr_GetTrackerPose, , (ovrSession session, unsigned int index)) \
|
||||
_(ovrResult, ovr_GetInputState, , (ovrSession session, ovrControllerType controllerType, ovrInputState*)) \
|
||||
_(unsigned int, ovr_GetConnectedControllerTypes, , (ovrSession session)) \
|
||||
_(ovrSizei, ovr_GetFovTextureSize, , (ovrSession session, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel)) \
|
||||
X(ovrResult, ovr_GetViewportStencil, , (ovrSession session, const ovrViewportStencilDesc* viewportStencilDesc, ovrFovStencilMeshBuffer* meshBuffer)) \
|
||||
_(ovrResult, ovr_GetFovStencil, , (ovrSession session, const ovrFovStencilDesc* fovStencilDesc, ovrFovStencilMeshBuffer* meshBuffer)) \
|
||||
_(ovrResult, ovr_WaitToBeginFrame, , (ovrSession session, long long frameIndex)) \
|
||||
_(ovrResult, ovr_BeginFrame, , (ovrSession session, long long frameIndex)) \
|
||||
_(ovrResult, ovr_EndFrame, , (ovrSession session, long long frameIndex, const ovrViewScaleDesc* viewScaleDesc, ovrLayerHeader const * const * layerPtrList, unsigned int layerCount)) \
|
||||
X(ovrResult, ovr_SubmitFrame, , (ovrSession session, long long frameIndex, const ovrViewScaleDescPre117* viewScaleDesc, ovrLayerHeader const * const * layerPtrList, unsigned int layerCount)) \
|
||||
_(ovrResult, ovr_SubmitFrame, 2, (ovrSession session, long long frameIndex, const ovrViewScaleDesc* viewScaleDesc, ovrLayerHeader const * const * layerPtrList, unsigned int layerCount)) \
|
||||
X(ovrEyeRenderDescPre117, ovr_GetRenderDesc, , (ovrSession session, ovrEyeType eyeType, ovrFovPort fov)) \
|
||||
_(ovrEyeRenderDesc, ovr_GetRenderDesc, 2, (ovrSession session, ovrEyeType eyeType, ovrFovPort fov)) \
|
||||
_(double, ovr_GetPredictedDisplayTime, , (ovrSession session, long long frameIndex)) \
|
||||
_(double, ovr_GetTimeInSeconds, , (void)) \
|
||||
_(ovrBool, ovr_GetBool, , (ovrSession session, const char* propertyName, ovrBool defaultVal)) \
|
||||
_(ovrBool, ovr_SetBool, , (ovrSession session, const char* propertyName, ovrBool value)) \
|
||||
_(int, ovr_GetInt, , (ovrSession session, const char* propertyName, int defaultVal)) \
|
||||
_(ovrBool, ovr_SetInt, , (ovrSession session, const char* propertyName, int value)) \
|
||||
_(float, ovr_GetFloat, , (ovrSession session, const char* propertyName, float defaultVal)) \
|
||||
_(ovrBool, ovr_SetFloat, , (ovrSession session, const char* propertyName, float value)) \
|
||||
_(unsigned int, ovr_GetFloatArray, , (ovrSession session, const char* propertyName, float values[], unsigned int arraySize)) \
|
||||
_(ovrBool, ovr_SetFloatArray, , (ovrSession session, const char* propertyName, const float values[], unsigned int arraySize)) \
|
||||
_(const char*, ovr_GetString, , (ovrSession session, const char* propertyName, const char* defaultVal)) \
|
||||
_(ovrBool, ovr_SetString, , (ovrSession session, const char* propertyName, const char* value)) \
|
||||
_(int, ovr_TraceMessage, , (int level, const char* message)) \
|
||||
_(ovrResult, ovr_IdentifyClient, , (const char* identity)) \
|
||||
_(ovrResult, ovr_CreateTextureSwapChainGL, , (ovrSession session, const ovrTextureSwapChainDesc* desc, ovrTextureSwapChain* outTextureChain)) \
|
||||
_(ovrResult, ovr_CreateMirrorTextureGL, , (ovrSession session, const ovrMirrorTextureDesc* desc, ovrMirrorTexture* outMirrorTexture)) \
|
||||
_(ovrResult, ovr_CreateMirrorTextureWithOptionsGL, , (ovrSession session, const ovrMirrorTextureDesc* desc, ovrMirrorTexture* outMirrorTexture)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainBufferGL, , (ovrSession session, ovrTextureSwapChain chain, int index, unsigned int* texId)) \
|
||||
_(ovrResult, ovr_GetMirrorTextureBufferGL, , (ovrSession session, ovrMirrorTexture mirror, unsigned int* texId)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainLength, , (ovrSession session, ovrTextureSwapChain chain, int* length)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainCurrentIndex, , (ovrSession session, ovrTextureSwapChain chain, int* currentIndex)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainDesc, , (ovrSession session, ovrTextureSwapChain chain, ovrTextureSwapChainDesc* desc)) \
|
||||
_(ovrResult, ovr_CommitTextureSwapChain, , (ovrSession session, ovrTextureSwapChain chain)) \
|
||||
_(void, ovr_DestroyTextureSwapChain, , (ovrSession session, ovrTextureSwapChain chain)) \
|
||||
_(void, ovr_DestroyMirrorTexture, , (ovrSession session, ovrMirrorTexture texture)) \
|
||||
X(ovrResult, ovr_SetQueueAheadFraction, , (ovrSession session, float queueAheadFraction)) \
|
||||
_(ovrResult, ovr_Lookup, , (const char* name, void** data)) \
|
||||
_(ovrTouchHapticsDesc, ovr_GetTouchHapticsDesc, , (ovrSession session, ovrControllerType controllerType)) \
|
||||
_(ovrResult, ovr_SetControllerVibration, , (ovrSession session, ovrControllerType controllerType, float frequency, float amplitude)) \
|
||||
_(ovrResult, ovr_SubmitControllerVibration, , (ovrSession session, ovrControllerType controllerType, const ovrHapticsBuffer* buffer)) \
|
||||
_(ovrResult, ovr_GetControllerVibrationState, , (ovrSession session, ovrControllerType controllerType, ovrHapticsPlaybackState* outState)) \
|
||||
_(ovrResult, ovr_TestBoundary, , (ovrSession session, ovrTrackedDeviceType deviceBitmask, ovrBoundaryType singleBoundaryType, ovrBoundaryTestResult* outTestResult)) \
|
||||
_(ovrResult, ovr_TestBoundaryPoint, , (ovrSession session, const ovrVector3f* point, ovrBoundaryType singleBoundaryType, ovrBoundaryTestResult* outTestResult)) \
|
||||
_(ovrResult, ovr_SetBoundaryLookAndFeel, , (ovrSession session, const ovrBoundaryLookAndFeel* lookAndFeel)) \
|
||||
_(ovrResult, ovr_ResetBoundaryLookAndFeel, , (ovrSession session)) \
|
||||
_(ovrResult, ovr_GetBoundaryGeometry, , (ovrSession session, ovrBoundaryType singleBoundaryType, ovrVector3f* outFloorPoints, int* outFloorPointsCount)) \
|
||||
_(ovrResult, ovr_GetBoundaryDimensions, , (ovrSession session, ovrBoundaryType singleBoundaryType, ovrVector3f* outDimension)) \
|
||||
_(ovrResult, ovr_GetBoundaryVisible, , (ovrSession session, ovrBool* outIsVisible)) \
|
||||
_(ovrResult, ovr_RequestBoundaryVisible, , (ovrSession session, ovrBool visible)) \
|
||||
_(ovrResult, ovr_GetPerfStats, , (ovrSession session, ovrPerfStats* outPerfStats)) \
|
||||
_(ovrResult, ovr_ResetPerfStats, , (ovrSession session))\
|
||||
_(ovrResult, ovr_GetExternalCameras, , (ovrSession session, ovrExternalCamera* outCameras, unsigned int* outCameraCount))\
|
||||
_(ovrResult, ovr_SetExternalCameraProperties, , (ovrSession session, const char* name, const ovrCameraIntrinsics* const intrinsics, const ovrCameraExtrinsics* const extrinsics ))
|
||||
|
||||
#if defined (_WIN32)
|
||||
#define OVR_LIST_WIN32_APIS(_,X) \
|
||||
_(ovrResult, ovr_CreateTextureSwapChainDX, , (ovrSession session, IUnknown* d3dPtr, const ovrTextureSwapChainDesc* desc, ovrTextureSwapChain* outTextureChain)) \
|
||||
_(ovrResult, ovr_CreateMirrorTextureDX, , (ovrSession session, IUnknown* d3dPtr, const ovrMirrorTextureDesc* desc, ovrMirrorTexture* outMirrorTexture)) \
|
||||
_(ovrResult, ovr_CreateMirrorTextureWithOptionsDX, , (ovrSession session, IUnknown* d3dPtr, const ovrMirrorTextureDesc* desc, ovrMirrorTexture* outMirrorTexture)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainBufferDX, , (ovrSession session, ovrTextureSwapChain chain, int index, IID iid, void** ppObject)) \
|
||||
_(ovrResult, ovr_GetMirrorTextureBufferDX, , (ovrSession session, ovrMirrorTexture mirror, IID iid, void** ppObject)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceOutWaveId, , (UINT* deviceOutId)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceInWaveId, , (UINT* deviceInId)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceOutGuidStr, , (WCHAR* deviceOutStrBuffer)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceOutGuid, , (GUID* deviceOutGuid)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceInGuidStr, , (WCHAR* deviceInStrBuffer)) \
|
||||
_(ovrResult, ovr_GetAudioDeviceInGuid, , (GUID* deviceInGuid)) \
|
||||
_(ovrResult, ovr_GetInstanceExtensionsVk, , (ovrGraphicsLuid luid, char* extensionNames, uint32_t* inoutExtensionNamesSize)) \
|
||||
_(ovrResult, ovr_GetDeviceExtensionsVk, , (ovrGraphicsLuid luid, char* extensionNames, uint32_t* inoutExtensionNamesSize)) \
|
||||
_(ovrResult, ovr_GetSessionPhysicalDeviceVk, , (ovrSession session, ovrGraphicsLuid luid, VkInstance instance, VkPhysicalDevice* out_physicalDevice)) \
|
||||
X(ovrResult, ovr_SetSynchonizationQueueVk, , (ovrSession session, VkQueue queue)) \
|
||||
_(ovrResult, ovr_SetSynchronizationQueueVk, , (ovrSession session, VkQueue queue)) \
|
||||
_(ovrResult, ovr_CreateTextureSwapChainVk, , (ovrSession session, VkDevice device, const ovrTextureSwapChainDesc* desc, ovrTextureSwapChain* out_TextureSwapChain)) \
|
||||
_(ovrResult, ovr_GetTextureSwapChainBufferVk, , (ovrSession session, ovrTextureSwapChain chain, int index, VkImage* out_Image)) \
|
||||
_(ovrResult, ovr_CreateMirrorTextureWithOptionsVk, , (ovrSession session, VkDevice device, const ovrMirrorTextureDesc* desc, ovrMirrorTexture* out_MirrorTexture)) \
|
||||
_(ovrResult, ovr_GetMirrorTextureBufferVk, , (ovrSession session, ovrMirrorTexture mirrorTexture, VkImage* out_Image))
|
||||
#else
|
||||
#define OVR_LIST_WIN32_APIS(_,X)
|
||||
#endif
|
||||
|
||||
#define OVR_LIST_INTERNAL_APIS(_,X)
|
||||
|
||||
// We need to forward declare the ovrSensorData type here, as it won't be in a public OVR_CAPI.h header.
|
||||
struct ovrSensorData_;
|
||||
typedef struct ovrSensorData_ ovrSensorData;
|
||||
|
||||
// Hybrid Apps API forward declaration which won't be in a public OVR_CAPI.h header for now.
|
||||
// --------------------------------------------------------------------------
|
||||
struct ovrDesktopWindowDesc_;
|
||||
typedef struct ovrDesktopWindowDesc_ ovrDesktopWindowDesc;
|
||||
|
||||
struct ovrKeyboardDesc_;
|
||||
typedef struct ovrKeyboardDesc_ ovrKeyboardDesc;
|
||||
|
||||
enum ovrHybridInputFocusType_ ;
|
||||
typedef enum ovrHybridInputFocusType_ ovrHybridInputFocusType;
|
||||
|
||||
struct ovrHybridInputFocusState_;
|
||||
typedef struct ovrHybridInputFocusState_ ovrHybridInputFocusState;
|
||||
|
||||
typedef uint32_t ovrDesktopWindowHandle;
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
#define OVR_LIST_PRIVATE_APIS(_,X)
|
||||
|
||||
// clang-format on
|
||||
|
||||
//
|
||||
// OVR_LIST_APIS - master list of all API entrypoints
|
||||
//
|
||||
|
||||
#define OVR_LIST_APIS(_, X) \
|
||||
OVR_LIST_PUBLIC_APIS(_, X) \
|
||||
OVR_LIST_WIN32_APIS(_, X) \
|
||||
OVR_LIST_INTERNAL_APIS(_, X) \
|
||||
OVR_LIST_PRIVATE_APIS(_, X)
|
||||
|
||||
#endif // OVR_CAPI_Prototypes_h
|
@ -0,0 +1,437 @@
|
||||
/************************************************************************************
|
||||
|
||||
PublicHeader: OVR_CAPI_Util.c
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Licensed under the Oculus VR Rift SDK License Version 3.3 (the "License");
|
||||
you may not use the Oculus VR Rift SDK except in compliance with the License,
|
||||
which is provided at the time of installation or download, or which
|
||||
otherwise accompanies this software in either electronic or hard copy form.
|
||||
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.oculusvr.com/licenses/LICENSE-3.3
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
*************************************************************************************/
|
||||
|
||||
#include <Extras/OVR_CAPI_Util.h>
|
||||
#include <Extras/OVR_StereoProjection.h>
|
||||
|
||||
#include <limits.h>
|
||||
|
||||
#if !defined(_WIN32)
|
||||
#include <assert.h>
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1800 // MSVC < 2013
|
||||
#define round(dbl) \
|
||||
(dbl) >= 0.0 ? (int)((dbl) + 0.5) \
|
||||
: (((dbl) - (double)(int)(dbl)) <= -0.5 ? (int)(dbl) : (int)((dbl)-0.5))
|
||||
#endif
|
||||
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#include <emmintrin.h>
|
||||
#pragma intrinsic(_mm_pause)
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32)
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#if defined(OVR_DLL_BUILD) && defined(OVR_OPENXR_SUPPORT_ENABLED)
|
||||
|
||||
// This forces transitive export of the symbols marked for export in OVR_OpenXR_Impl.cpp:
|
||||
__pragma(comment(linker, "/INCLUDE:" OVR_ON32("_") "exported_openxr_version"))
|
||||
#endif // defined(OVR_DLL_BUILD) && defined(OVR_OPENXR_SUPPORT_ENABLED)
|
||||
|
||||
template <typename T>
|
||||
T ovrMax(T a, T b) {
|
||||
return a > b ? a : b;
|
||||
}
|
||||
template <typename T>
|
||||
T ovrMin(T a, T b) {
|
||||
return a < b ? a : b;
|
||||
}
|
||||
|
||||
// Used to generate projection from ovrEyeDesc::Fov
|
||||
OVR_PUBLIC_FUNCTION(ovrMatrix4f)
|
||||
ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags) {
|
||||
bool leftHanded = (projectionModFlags & ovrProjection_LeftHanded) > 0;
|
||||
bool flipZ = (projectionModFlags & ovrProjection_FarLessThanNear) > 0;
|
||||
bool farAtInfinity = (projectionModFlags & ovrProjection_FarClipAtInfinity) > 0;
|
||||
bool isOpenGL = (projectionModFlags & ovrProjection_ClipRangeOpenGL) > 0;
|
||||
|
||||
// TODO: Pass in correct eye to CreateProjection if we want to support canted displays from CAPI
|
||||
return OVR::CreateProjection(
|
||||
leftHanded, isOpenGL, fov, OVR::StereoEye_Center, znear, zfar, flipZ, farAtInfinity);
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(ovrTimewarpProjectionDesc)
|
||||
ovrTimewarpProjectionDesc_FromProjection(ovrMatrix4f Projection, unsigned int projectionModFlags) {
|
||||
ovrTimewarpProjectionDesc res;
|
||||
res.Projection22 = Projection.M[2][2];
|
||||
res.Projection23 = Projection.M[2][3];
|
||||
res.Projection32 = Projection.M[3][2];
|
||||
|
||||
if ((res.Projection32 != 1.0f) && (res.Projection32 != -1.0f)) {
|
||||
// This is a very strange projection matrix, and probably won't work.
|
||||
// If you need it to work, please contact Oculus and let us know your usage scenario.
|
||||
}
|
||||
|
||||
if ((projectionModFlags & ovrProjection_ClipRangeOpenGL) != 0) {
|
||||
// Internally we use the D3D range of [0,+w] not the OGL one of [-w,+w], so we need to convert
|
||||
// one to the other.
|
||||
// Note that the values in the depth buffer, and the actual linear depth we want is the same for
|
||||
// both APIs,
|
||||
// the difference is purely in the values inside the projection matrix.
|
||||
|
||||
// D3D does this:
|
||||
// depthBuffer = ( ProjD3D.M[2][2] * linearDepth + ProjD3D.M[2][3] ) / ( linearDepth
|
||||
// * ProjD3D.M[3][2] );
|
||||
// OGL does this:
|
||||
// depthBuffer = 0.5 + 0.5 * ( ProjOGL.M[2][2] * linearDepth + ProjOGL.M[2][3] ) / ( linearDepth
|
||||
// * ProjOGL.M[3][2] );
|
||||
|
||||
// Therefore:
|
||||
// ProjD3D.M[2][2] = 0.5 * ( ProjOGL.M[2][2] + ProjOGL.M[3][2] );
|
||||
// ProjD3D.M[2][3] = 0.5 * ProjOGL.M[2][3];
|
||||
// ProjD3D.M[3][2] = ProjOGL.M[3][2];
|
||||
|
||||
res.Projection22 = 0.5f * (Projection.M[2][2] + Projection.M[3][2]);
|
||||
res.Projection23 = 0.5f * Projection.M[2][3];
|
||||
res.Projection32 = Projection.M[3][2];
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(ovrMatrix4f)
|
||||
ovrMatrix4f_OrthoSubProjection(
|
||||
ovrMatrix4f projection,
|
||||
ovrVector2f orthoScale,
|
||||
float orthoDistance,
|
||||
float hmdToEyeOffsetX) {
|
||||
ovrMatrix4f ortho;
|
||||
// Negative sign is correct!
|
||||
// If the eye is offset to the left, then the ortho view needs to be offset to the right relative
|
||||
// to the camera.
|
||||
float orthoHorizontalOffset = -hmdToEyeOffsetX / orthoDistance;
|
||||
|
||||
// Current projection maps real-world vector (x,y,1) to the RT.
|
||||
// We want to find the projection that maps the range [-FovPixels/2,FovPixels/2] to
|
||||
// the physical [-orthoHalfFov,orthoHalfFov]
|
||||
// Note moving the offset from M[0][2]+M[1][2] to M[0][3]+M[1][3] - this means
|
||||
// we don't have to feed in Z=1 all the time.
|
||||
// The horizontal offset math is a little hinky because the destination is
|
||||
// actually [-orthoHalfFov+orthoHorizontalOffset,orthoHalfFov+orthoHorizontalOffset]
|
||||
// So we need to first map [-FovPixels/2,FovPixels/2] to
|
||||
// [-orthoHalfFov+orthoHorizontalOffset,orthoHalfFov+orthoHorizontalOffset]:
|
||||
// x1 = x0 * orthoHalfFov/(FovPixels/2) + orthoHorizontalOffset;
|
||||
// = x0 * 2*orthoHalfFov/FovPixels + orthoHorizontalOffset;
|
||||
// But then we need the same mapping as the existing projection matrix, i.e.
|
||||
// x2 = x1 * Projection.M[0][0] + Projection.M[0][2];
|
||||
// = x0 * (2*orthoHalfFov/FovPixels + orthoHorizontalOffset) * Projection.M[0][0] +
|
||||
// Projection.M[0][2]; = x0 * Projection.M[0][0]*2*orthoHalfFov/FovPixels +
|
||||
// orthoHorizontalOffset*Projection.M[0][0] + Projection.M[0][2];
|
||||
// So in the new projection matrix we need to scale by Projection.M[0][0]*2*orthoHalfFov/FovPixels
|
||||
// and offset by orthoHorizontalOffset*Projection.M[0][0] + Projection.M[0][2].
|
||||
|
||||
ortho.M[0][0] = projection.M[0][0] * orthoScale.x;
|
||||
ortho.M[0][1] = 0.0f;
|
||||
ortho.M[0][2] = 0.0f;
|
||||
ortho.M[0][3] = -projection.M[0][2] + (orthoHorizontalOffset * projection.M[0][0]);
|
||||
|
||||
ortho.M[1][0] = 0.0f;
|
||||
ortho.M[1][1] =
|
||||
-projection.M[1][1] * orthoScale.y; /* Note sign flip (text rendering uses Y=down). */
|
||||
ortho.M[1][2] = 0.0f;
|
||||
ortho.M[1][3] = -projection.M[1][2];
|
||||
|
||||
ortho.M[2][0] = 0.0f;
|
||||
ortho.M[2][1] = 0.0f;
|
||||
ortho.M[2][2] = 0.0f;
|
||||
ortho.M[2][3] = 0.0f;
|
||||
|
||||
/* No perspective correction for ortho. */
|
||||
ortho.M[3][0] = 0.0f;
|
||||
ortho.M[3][1] = 0.0f;
|
||||
ortho.M[3][2] = 0.0f;
|
||||
ortho.M[3][3] = 1.0f;
|
||||
|
||||
return ortho;
|
||||
}
|
||||
|
||||
#undef ovr_CalcEyePoses
|
||||
OVR_PUBLIC_FUNCTION(void)
|
||||
ovr_CalcEyePoses(ovrPosef headPose, const ovrVector3f hmdToEyeOffset[2], ovrPosef outEyePoses[2]) {
|
||||
if (!hmdToEyeOffset || !outEyePoses) {
|
||||
return;
|
||||
}
|
||||
|
||||
using OVR::Posef;
|
||||
using OVR::Vector3f;
|
||||
|
||||
// Currently hmdToEyeOffset is only a 3D vector
|
||||
outEyePoses[0] =
|
||||
Posef(headPose.Orientation, ((Posef)headPose).Apply((Vector3f)hmdToEyeOffset[0]));
|
||||
outEyePoses[1] =
|
||||
Posef(headPose.Orientation, ((Posef)headPose).Apply((Vector3f)hmdToEyeOffset[1]));
|
||||
}
|
||||
|
||||
OVR_PRIVATE_FUNCTION(void)
|
||||
ovr_CalcEyePoses2(ovrPosef headPose, const ovrPosef hmdToEyePose[2], ovrPosef outEyePoses[2]) {
|
||||
if (!hmdToEyePose || !outEyePoses) {
|
||||
return;
|
||||
}
|
||||
|
||||
using OVR::Posef;
|
||||
using OVR::Vector3f;
|
||||
|
||||
outEyePoses[0] = (Posef)headPose * (Posef)hmdToEyePose[0];
|
||||
outEyePoses[1] = (Posef)headPose * (Posef)hmdToEyePose[1];
|
||||
}
|
||||
|
||||
#undef ovr_GetEyePoses
|
||||
OVR_PUBLIC_FUNCTION(void)
|
||||
ovr_GetEyePoses(
|
||||
ovrSession session,
|
||||
long long frameIndex,
|
||||
ovrBool latencyMarker,
|
||||
const ovrVector3f hmdToEyeOffset[2],
|
||||
ovrPosef outEyePoses[2],
|
||||
double* outSensorSampleTime) {
|
||||
double frameTime = ovr_GetPredictedDisplayTime(session, frameIndex);
|
||||
ovrTrackingState trackingState = ovr_GetTrackingState(session, frameTime, latencyMarker);
|
||||
ovr_CalcEyePoses(trackingState.HeadPose.ThePose, hmdToEyeOffset, outEyePoses);
|
||||
|
||||
if (outSensorSampleTime != nullptr) {
|
||||
*outSensorSampleTime = ovr_GetTimeInSeconds();
|
||||
}
|
||||
}
|
||||
|
||||
OVR_PRIVATE_FUNCTION(void)
|
||||
ovr_GetEyePoses2(
|
||||
ovrSession session,
|
||||
long long frameIndex,
|
||||
ovrBool latencyMarker,
|
||||
const ovrPosef hmdToEyePose[2],
|
||||
ovrPosef outEyePoses[2],
|
||||
double* outSensorSampleTime) {
|
||||
double frameTime = ovr_GetPredictedDisplayTime(session, frameIndex);
|
||||
ovrTrackingState trackingState = ovr_GetTrackingState(session, frameTime, latencyMarker);
|
||||
ovr_CalcEyePoses2(trackingState.HeadPose.ThePose, hmdToEyePose, outEyePoses);
|
||||
|
||||
if (outSensorSampleTime != nullptr) {
|
||||
*outSensorSampleTime = ovr_GetTimeInSeconds();
|
||||
}
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(ovrDetectResult) ovr_Detect(int timeoutMilliseconds) {
|
||||
// Initially we assume everything is not running.
|
||||
ovrDetectResult result;
|
||||
result.IsOculusHMDConnected = ovrFalse;
|
||||
result.IsOculusServiceRunning = ovrFalse;
|
||||
|
||||
#if defined(_WIN32)
|
||||
// Attempt to open the named event.
|
||||
HANDLE hServiceEvent = ::OpenEventW(SYNCHRONIZE, FALSE, OVR_HMD_CONNECTED_EVENT_NAME);
|
||||
|
||||
// If event exists,
|
||||
if (hServiceEvent != nullptr) {
|
||||
// This indicates that the Oculus Runtime is installed and running.
|
||||
result.IsOculusServiceRunning = ovrTrue;
|
||||
|
||||
// Poll for event state.
|
||||
DWORD objectResult = ::WaitForSingleObject(hServiceEvent, timeoutMilliseconds);
|
||||
|
||||
// If the event is signaled,
|
||||
if (objectResult == WAIT_OBJECT_0) {
|
||||
// This indicates that the Oculus HMD is connected.
|
||||
result.IsOculusHMDConnected = ovrTrue;
|
||||
}
|
||||
|
||||
::CloseHandle(hServiceEvent);
|
||||
}
|
||||
#else
|
||||
(void)timeoutMilliseconds;
|
||||
fprintf(stderr, __FILE__ "::[%s] Not implemented. Assuming single-process.\n", __func__);
|
||||
result.IsOculusServiceRunning = ovrTrue;
|
||||
result.IsOculusHMDConnected = ovrTrue;
|
||||
#endif // OSX_UNIMPLEMENTED
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(void) ovrPosef_FlipHandedness(const ovrPosef* inPose, ovrPosef* outPose) {
|
||||
outPose->Orientation.x = -inPose->Orientation.x;
|
||||
outPose->Orientation.y = inPose->Orientation.y;
|
||||
outPose->Orientation.z = inPose->Orientation.z;
|
||||
outPose->Orientation.w = -inPose->Orientation.w;
|
||||
|
||||
outPose->Position.x = -inPose->Position.x;
|
||||
outPose->Position.y = inPose->Position.y;
|
||||
outPose->Position.z = inPose->Position.z;
|
||||
}
|
||||
|
||||
static float wavPcmBytesToFloat(const void* data, int32_t sizeInBits, bool swapBytes) {
|
||||
// TODO Support big endian
|
||||
(void)swapBytes;
|
||||
|
||||
// There's not a strong standard to convert 8/16/32b PCM to float.
|
||||
// For 16b: MSDN says range is [-32760, 32760], Pyton Scipy uses [-32767, 32767] and Audacity
|
||||
// outputs the full range [-32768, 32767].
|
||||
// We use the same range on both sides and clamp to [-1, 1].
|
||||
|
||||
float result = 0.0f;
|
||||
if (sizeInBits == 8)
|
||||
// uint8_t is a special case, unsigned where 128 is zero
|
||||
result = (*((uint8_t*)data) / (float)UCHAR_MAX) * 2.0f - 1.0f;
|
||||
else if (sizeInBits == 16)
|
||||
result = *((int16_t*)data) / (float)SHRT_MAX;
|
||||
// else if (sizeInBits == 24) {
|
||||
// int value = data[0] | data[1] << 8 | data[2] << 16; // Need consider 2's complement
|
||||
// return value / 8388607.0f;
|
||||
//}
|
||||
else if (sizeInBits == 32)
|
||||
result = *((int32_t*)data) / (float)INT_MAX;
|
||||
|
||||
return ovrMax(-1.0f, result);
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_GenHapticsFromAudioData(
|
||||
ovrHapticsClip* outHapticsClip,
|
||||
const ovrAudioChannelData* audioChannel,
|
||||
ovrHapticsGenMode genMode) {
|
||||
if (!outHapticsClip || !audioChannel || genMode != ovrHapticsGenMode_PointSample)
|
||||
return ovrError_InvalidParameter;
|
||||
// Validate audio channel
|
||||
if (audioChannel->Frequency <= 0 || audioChannel->SamplesCount <= 0 ||
|
||||
audioChannel->Samples == nullptr)
|
||||
return ovrError_InvalidParameter;
|
||||
|
||||
const int32_t kHapticsFrequency = 320;
|
||||
const int32_t kHapticsMaxAmplitude = 255;
|
||||
float samplesPerStep = audioChannel->Frequency / (float)kHapticsFrequency;
|
||||
int32_t hapticsSampleCount = (int32_t)ceil(audioChannel->SamplesCount / samplesPerStep);
|
||||
|
||||
uint8_t* hapticsSamples = new uint8_t[hapticsSampleCount];
|
||||
for (int32_t i = 0; i < hapticsSampleCount; ++i) {
|
||||
float sample = audioChannel->Samples[(int32_t)(i * samplesPerStep)];
|
||||
uint8_t hapticSample =
|
||||
(uint8_t)ovrMin(UCHAR_MAX, (int)round(fabs(sample) * kHapticsMaxAmplitude));
|
||||
hapticsSamples[i] = hapticSample;
|
||||
}
|
||||
|
||||
outHapticsClip->Samples = hapticsSamples;
|
||||
outHapticsClip->SamplesCount = hapticsSampleCount;
|
||||
|
||||
return ovrSuccess;
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(ovrResult)
|
||||
ovr_ReadWavFromBuffer(
|
||||
ovrAudioChannelData* outAudioChannel,
|
||||
const void* inputData,
|
||||
int dataSizeInBytes,
|
||||
int stereoChannelToUse) {
|
||||
// We don't support any format other than PCM and IEEE Float
|
||||
enum WavFormats {
|
||||
kWavFormatUnknown = 0x0000,
|
||||
kWavFormatLPCM = 0x0001,
|
||||
kWavFormatFloatIEEE = 0x0003,
|
||||
kWavFormatExtensible = 0xFFFE
|
||||
};
|
||||
|
||||
struct WavHeader {
|
||||
char RiffId[4]; // "RIFF" = little-endian, "RIFX" = big-endian
|
||||
int32_t Size; // 4 + (8 + FmtChunkSize) + (8 + DataChunkSize)
|
||||
char WavId[4]; // Must be "WAVE"
|
||||
|
||||
char FmtChunckId[4]; // Must be "fmt "
|
||||
uint32_t FmtChunkSize; // Remaining size of this chunk (16B)
|
||||
uint16_t Format; // WavFormats: PCM or Float supported
|
||||
uint16_t Channels; // 1 = Mono, 2 = Stereo
|
||||
uint32_t SampleRate; // e.g. 44100
|
||||
uint32_t BytesPerSec; // SampleRate * BytesPerBlock
|
||||
uint16_t BytesPerBlock; // (NumChannels * BitsPerSample/8)
|
||||
uint16_t BitsPerSample; // 8, 16, 32
|
||||
|
||||
char DataChunckId[4]; // Must be "data"
|
||||
uint32_t DataChunkSize; // Remaining size of this chunk
|
||||
};
|
||||
|
||||
const int32_t kMinWavFileSize = sizeof(WavHeader) + 1;
|
||||
if (!outAudioChannel || !inputData || dataSizeInBytes < kMinWavFileSize)
|
||||
return ovrError_InvalidParameter;
|
||||
|
||||
WavHeader* header = (WavHeader*)inputData;
|
||||
uint8_t* data = (uint8_t*)inputData + sizeof(WavHeader);
|
||||
|
||||
// Validate
|
||||
const char* wavId = header->RiffId;
|
||||
// TODO We need to support RIFX when supporting big endian formats
|
||||
// bool isValidWav = (wavId[0] == 'R' && wavId[1] == 'I' && wavId[2] == 'F' && (wavId[3] == 'F' ||
|
||||
// wavId[3] == 'X')) &&
|
||||
bool isValidWav = (wavId[0] == 'R' && wavId[1] == 'I' && wavId[2] == 'F' && wavId[3] == 'F') &&
|
||||
memcmp(header->WavId, "WAVE", 4) == 0;
|
||||
bool hasValidChunks =
|
||||
memcmp(header->FmtChunckId, "fmt ", 4) == 0 && memcmp(header->DataChunckId, "data ", 4) == 0;
|
||||
if (!isValidWav || !hasValidChunks) {
|
||||
return ovrError_InvalidOperation;
|
||||
}
|
||||
|
||||
// We only support PCM
|
||||
bool isSupported = (header->Format == kWavFormatLPCM || header->Format == kWavFormatFloatIEEE) &&
|
||||
(header->Channels == 1 || header->Channels == 2) &&
|
||||
(header->BitsPerSample == 8 || header->BitsPerSample == 16 || header->BitsPerSample == 32);
|
||||
if (!isSupported) {
|
||||
return ovrError_Unsupported;
|
||||
}
|
||||
|
||||
// Channel selection
|
||||
bool useSecondChannel = (header->Channels == 2 && stereoChannelToUse == 1);
|
||||
int32_t channelOffset = (useSecondChannel) ? header->BytesPerBlock / 2 : 0;
|
||||
|
||||
// TODO Support big-endian
|
||||
int32_t blockCount = header->DataChunkSize / header->BytesPerBlock;
|
||||
float* samples = new float[blockCount];
|
||||
|
||||
for (int32_t i = 0; i < blockCount; i++) {
|
||||
int32_t dataIndex = i * header->BytesPerBlock;
|
||||
uint8_t* dataPtr = &data[dataIndex + channelOffset];
|
||||
float sample = (header->Format == kWavFormatLPCM)
|
||||
? wavPcmBytesToFloat(dataPtr, header->BitsPerSample, false)
|
||||
: *(float*)dataPtr;
|
||||
|
||||
samples[i] = sample;
|
||||
}
|
||||
|
||||
// Output
|
||||
outAudioChannel->Samples = samples;
|
||||
outAudioChannel->SamplesCount = blockCount;
|
||||
outAudioChannel->Frequency = header->SampleRate;
|
||||
|
||||
return ovrSuccess;
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(void) ovr_ReleaseAudioChannelData(ovrAudioChannelData* audioChannel) {
|
||||
if (audioChannel != nullptr && audioChannel->Samples != nullptr) {
|
||||
delete[] audioChannel->Samples;
|
||||
memset(audioChannel, 0, sizeof(ovrAudioChannelData));
|
||||
}
|
||||
}
|
||||
|
||||
OVR_PUBLIC_FUNCTION(void) ovr_ReleaseHapticsClip(ovrHapticsClip* hapticsClip) {
|
||||
if (hapticsClip != nullptr && hapticsClip->Samples != nullptr) {
|
||||
delete[](uint8_t*) hapticsClip->Samples;
|
||||
memset(hapticsClip, 0, sizeof(ovrHapticsClip));
|
||||
}
|
||||
}
|
@ -0,0 +1,218 @@
|
||||
/************************************************************************************
|
||||
|
||||
Filename : OVR_StereoProjection.cpp
|
||||
Content : Stereo rendering functions
|
||||
Created : November 30, 2013
|
||||
Authors : Tom Fosyth
|
||||
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Licensed under the Oculus VR Rift SDK License Version 3.3 (the "License");
|
||||
you may not use the Oculus VR Rift SDK except in compliance with the License,
|
||||
which is provided at the time of installation or download, or which
|
||||
otherwise accompanies this software in either electronic or hard copy form.
|
||||
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.oculusvr.com/licenses/LICENSE-3.3
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
*************************************************************************************/
|
||||
|
||||
#include <Extras/OVR_StereoProjection.h>
|
||||
|
||||
namespace OVR {
|
||||
|
||||
ScaleAndOffset2D CreateNDCScaleAndOffsetFromFov(FovPort tanHalfFov) {
|
||||
float projXScale = 2.0f / (tanHalfFov.LeftTan + tanHalfFov.RightTan);
|
||||
float projXOffset = (tanHalfFov.LeftTan - tanHalfFov.RightTan) * projXScale * 0.5f;
|
||||
float projYScale = 2.0f / (tanHalfFov.UpTan + tanHalfFov.DownTan);
|
||||
float projYOffset = (tanHalfFov.UpTan - tanHalfFov.DownTan) * projYScale * 0.5f;
|
||||
|
||||
ScaleAndOffset2D result;
|
||||
result.Scale = Vector2f(projXScale, projYScale);
|
||||
result.Offset = Vector2f(projXOffset, projYOffset);
|
||||
// Hey - why is that Y.Offset negated?
|
||||
// It's because a projection matrix transforms from world coords with Y=up,
|
||||
// whereas this is from NDC which is Y=down.
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
Matrix4f CreateProjection(
|
||||
bool leftHanded,
|
||||
bool isOpenGL,
|
||||
FovPort tanHalfFov,
|
||||
StereoEye /*eye*/,
|
||||
float zNear /*= 0.01f*/,
|
||||
float zFar /*= 10000.0f*/,
|
||||
bool flipZ /*= false*/,
|
||||
bool farAtInfinity /*= false*/) {
|
||||
if (!flipZ && farAtInfinity) {
|
||||
// OVR_ASSERT_M(false, "Cannot push Far Clip to Infinity when Z-order is not flipped");
|
||||
// Assertion disabled because this code no longer has access to LibOVRKernel assertion
|
||||
// functionality.
|
||||
farAtInfinity = false;
|
||||
}
|
||||
|
||||
// A projection matrix is very like a scaling from NDC, so we can start with that.
|
||||
ScaleAndOffset2D scaleAndOffset = CreateNDCScaleAndOffsetFromFov(tanHalfFov);
|
||||
|
||||
float handednessScale = leftHanded ? 1.0f : -1.0f;
|
||||
|
||||
Matrix4f projection;
|
||||
// Produces X result, mapping clip edges to [-w,+w]
|
||||
projection.M[0][0] = scaleAndOffset.Scale.x;
|
||||
projection.M[0][1] = 0.0f;
|
||||
projection.M[0][2] = handednessScale * scaleAndOffset.Offset.x;
|
||||
projection.M[0][3] = 0.0f;
|
||||
|
||||
// Produces Y result, mapping clip edges to [-w,+w]
|
||||
// Hey - why is that YOffset negated?
|
||||
// It's because a projection matrix transforms from world coords with Y=up,
|
||||
// whereas this is derived from an NDC scaling, which is Y=down.
|
||||
projection.M[1][0] = 0.0f;
|
||||
projection.M[1][1] = scaleAndOffset.Scale.y;
|
||||
projection.M[1][2] = handednessScale * -scaleAndOffset.Offset.y;
|
||||
projection.M[1][3] = 0.0f;
|
||||
|
||||
// Produces Z-buffer result - app needs to fill this in with whatever Z range it wants.
|
||||
// We'll just use some defaults for now.
|
||||
projection.M[2][0] = 0.0f;
|
||||
projection.M[2][1] = 0.0f;
|
||||
|
||||
if (farAtInfinity) {
|
||||
if (isOpenGL) {
|
||||
// It's not clear this makes sense for OpenGL - you don't get the same precision benefits you
|
||||
// do in D3D.
|
||||
projection.M[2][2] = -handednessScale;
|
||||
projection.M[2][3] = 2.0f * zNear;
|
||||
} else {
|
||||
projection.M[2][2] = 0.0f;
|
||||
projection.M[2][3] = zNear;
|
||||
}
|
||||
} else {
|
||||
if (isOpenGL) {
|
||||
// Clip range is [-w,+w], so 0 is at the middle of the range.
|
||||
projection.M[2][2] =
|
||||
-handednessScale * (flipZ ? -1.0f : 1.0f) * (zNear + zFar) / (zNear - zFar);
|
||||
projection.M[2][3] = 2.0f * ((flipZ ? -zFar : zFar) * zNear) / (zNear - zFar);
|
||||
} else {
|
||||
// Clip range is [0,+w], so 0 is at the start of the range.
|
||||
projection.M[2][2] = -handednessScale * (flipZ ? -zNear : zFar) / (zNear - zFar);
|
||||
projection.M[2][3] = ((flipZ ? -zFar : zFar) * zNear) / (zNear - zFar);
|
||||
}
|
||||
}
|
||||
|
||||
// Produces W result (= Z in)
|
||||
projection.M[3][0] = 0.0f;
|
||||
projection.M[3][1] = 0.0f;
|
||||
projection.M[3][2] = handednessScale;
|
||||
projection.M[3][3] = 0.0f;
|
||||
|
||||
return projection;
|
||||
}
|
||||
|
||||
Matrix4f CreateOrthoSubProjection(
|
||||
bool /*rightHanded*/,
|
||||
StereoEye eyeType,
|
||||
float tanHalfFovX,
|
||||
float tanHalfFovY,
|
||||
float unitsX,
|
||||
float unitsY,
|
||||
float distanceFromCamera,
|
||||
float interpupillaryDistance,
|
||||
Matrix4f const& projection,
|
||||
float zNear /*= 0.0f*/,
|
||||
float zFar /*= 0.0f*/,
|
||||
bool flipZ /*= false*/,
|
||||
bool farAtInfinity /*= false*/) {
|
||||
if (!flipZ && farAtInfinity) {
|
||||
// OVR_ASSERT_M(false, "Cannot push Far Clip to Infinity when Z-order is not flipped");
|
||||
// Assertion disabled because this code no longer has access to LibOVRKernel assertion
|
||||
// functionality.
|
||||
farAtInfinity = false;
|
||||
}
|
||||
|
||||
float orthoHorizontalOffset = interpupillaryDistance * 0.5f / distanceFromCamera;
|
||||
switch (eyeType) {
|
||||
case StereoEye_Left:
|
||||
break;
|
||||
case StereoEye_Right:
|
||||
orthoHorizontalOffset = -orthoHorizontalOffset;
|
||||
break;
|
||||
case StereoEye_Center:
|
||||
orthoHorizontalOffset = 0.0f;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// Current projection maps real-world vector (x,y,1) to the RT.
|
||||
// We want to find the projection that maps the range [-FovPixels/2,FovPixels/2] to
|
||||
// the physical [-orthoHalfFov,orthoHalfFov]
|
||||
// Note moving the offset from M[0][2]+M[1][2] to M[0][3]+M[1][3] - this means
|
||||
// we don't have to feed in Z=1 all the time.
|
||||
// The horizontal offset math is a little hinky because the destination is
|
||||
// actually [-orthoHalfFov+orthoHorizontalOffset,orthoHalfFov+orthoHorizontalOffset]
|
||||
// So we need to first map [-FovPixels/2,FovPixels/2] to
|
||||
// [-orthoHalfFov+orthoHorizontalOffset,orthoHalfFov+orthoHorizontalOffset]:
|
||||
// x1 = x0 * orthoHalfFov/(FovPixels/2) + orthoHorizontalOffset;
|
||||
// = x0 * 2*orthoHalfFov/FovPixels + orthoHorizontalOffset;
|
||||
// But then we need the sam mapping as the existing projection matrix, i.e.
|
||||
// x2 = x1 * Projection.M[0][0] + Projection.M[0][2];
|
||||
// = x0 * (2*orthoHalfFov/FovPixels + orthoHorizontalOffset) * Projection.M[0][0] +
|
||||
// Projection.M[0][2];
|
||||
// = x0 * Projection.M[0][0]*2*orthoHalfFov/FovPixels +
|
||||
// orthoHorizontalOffset*Projection.M[0][0] + Projection.M[0][2];
|
||||
// So in the new projection matrix we need to scale by Projection.M[0][0]*2*orthoHalfFov/FovPixels
|
||||
// and
|
||||
// offset by orthoHorizontalOffset*Projection.M[0][0] + Projection.M[0][2].
|
||||
|
||||
float orthoScaleX = 2.0f * tanHalfFovX / unitsX;
|
||||
float orthoScaleY = 2.0f * tanHalfFovY / unitsY;
|
||||
Matrix4f ortho;
|
||||
ortho.M[0][0] = projection.M[0][0] * orthoScaleX;
|
||||
ortho.M[0][1] = 0.0f;
|
||||
ortho.M[0][2] = 0.0f;
|
||||
ortho.M[0][3] = -projection.M[0][2] + (orthoHorizontalOffset * projection.M[0][0]);
|
||||
|
||||
ortho.M[1][0] = 0.0f;
|
||||
ortho.M[1][1] = -projection.M[1][1] * orthoScaleY; // Note sign flip (text rendering uses Y=down).
|
||||
ortho.M[1][2] = 0.0f;
|
||||
ortho.M[1][3] = -projection.M[1][2];
|
||||
|
||||
const float zDiff = zNear - zFar;
|
||||
if (fabsf(zDiff) < 0.001f) {
|
||||
ortho.M[2][0] = 0.0f;
|
||||
ortho.M[2][1] = 0.0f;
|
||||
ortho.M[2][2] = 0.0f;
|
||||
ortho.M[2][3] = flipZ ? zNear : zFar;
|
||||
} else {
|
||||
ortho.M[2][0] = 0.0f;
|
||||
ortho.M[2][1] = 0.0f;
|
||||
|
||||
if (farAtInfinity) {
|
||||
ortho.M[2][2] = 0.0f;
|
||||
ortho.M[2][3] = zNear;
|
||||
} else if (zDiff != 0.0f) {
|
||||
ortho.M[2][2] = (flipZ ? zNear : zFar) / zDiff;
|
||||
ortho.M[2][3] = ((flipZ ? -zFar : zFar) * zNear) / zDiff;
|
||||
}
|
||||
}
|
||||
|
||||
// No perspective correction for ortho.
|
||||
ortho.M[3][0] = 0.0f;
|
||||
ortho.M[3][1] = 0.0f;
|
||||
ortho.M[3][2] = 0.0f;
|
||||
ortho.M[3][3] = 1.0f;
|
||||
|
||||
return ortho;
|
||||
}
|
||||
|
||||
} // namespace OVR
|
Reference in New Issue
Block a user