Libovr 1.43 Reference Guide

OVR_CAPI_Util.h File Reference

This header provides LibOVR utility function declarations.

Macros

#define
OVR_HMD_CONNECTED_EVENT_NAME
This is the Windows Named Event name that is used to check for HMD connected state.
#define
ovr_CalcEyePoses
#define
ovr_GetEyePoses

Enumerations

enum
ovrProjectionModifier {
}
Enumerates modifications to the projection matrix based on the application's needs.
enum
ovrHapticsGenMode {
}
Modes used to generate Touch Haptics from audio PCM buffer.

Functions

OVR_STATIC_ASSERT ( sizeof(ovrDetectResult) , "ovrDetectResult size mismatch" )
ovrDetectResult
ovr_Detect ( int timeoutMilliseconds )
Detects Oculus Runtime and Device Status.
ovrMatrix4f
ovrMatrix4f_Projection ( ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags )
Used to generate projection from ovrEyeDesc::Fov.
ovrTimewarpProjectionDesc
ovrTimewarpProjectionDesc_FromProjection ( ovrMatrix4f projection, unsigned int projectionModFlags )
Extracts the required data from the result of ovrMatrix4f_Projection.
ovrMatrix4f
ovrMatrix4f_OrthoSubProjection ( ovrMatrix4f projection, ovrVector2f orthoScale, float orthoDistance, float HmdToEyeOffsetX )
Generates an orthographic sub-projection.
void
ovr_CalcEyePoses ( ovrPosef headPose, const ovrVector3f hmdToEyeOffset, ovrPosef outEyePoses )
Computes offset eye poses based on headPose returned by ovrTrackingState.
void
ovr_CalcEyePoses2 ( ovrPosef headPose, const ovrPosef HmdToEyePose, ovrPosef outEyePoses )
void
ovr_GetEyePoses ( ovrSession session, long long frameIndex, ovrBool latencyMarker, const ovrVector3f hmdToEyeOffset, ovrPosef outEyePoses, double * outSensorSampleTime )
Returns the predicted head pose in outHmdTrackingState and offset eye poses in outEyePoses.
void
ovr_GetEyePoses2 ( ovrSession session, long long frameIndex, ovrBool latencyMarker, const ovrPosef HmdToEyePose, ovrPosef outEyePoses, double * outSensorSampleTime )
void
ovrPosef_FlipHandedness ( const ovrPosef * inPose, ovrPosef * outPose )
Tracking poses provided by the SDK come in a right-handed coordinate system.
ovrResult
ovr_ReadWavFromBuffer ( ovrAudioChannelData * outAudioChannel, const void * inputData, int dataSizeInBytes, int stereoChannelToUse )
Reads an audio channel from Wav (Waveform Audio File) data.
ovrResult
ovr_GenHapticsFromAudioData ( ovrHapticsClip * outHapticsClip, const ovrAudioChannelData * audioChannel, ovrHapticsGenMode genMode )
Generates playable Touch Haptics data from an audio channel.
void
Releases memory allocated for ovrAudioChannelData.
void
Releases memory allocated for ovrHapticsClip.

Detailed Description

Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.

Macros Documentation

#define OVR_HMD_CONNECTED_EVENT_NAME
This is the Windows Named Event name that is used to check for HMD connected state.
#define ovr_CalcEyePoses
#define ovr_GetEyePoses

Enumeration Type Documentation

enum OVR_CAPI_Util.h.ovrProjectionModifier
Enumerates modifications to the projection matrix based on the application's needs.
Enumerator
ovrProjection_None
Use for generating a default projection matrix that is:
ovrProjection_LeftHanded
Enable if using left-handed transformations in your application.
ovrProjection_FarLessThanNear
After the projection transform is applied, far values stored in the depth buffer will be less than closer depth values.
ovrProjection_FarClipAtInfinity
When this flag is used, the zfar value pushed into ovrMatrix4f_Projection() will be ignored NOTE: Enable only if ovrProjection_FarLessThanNear is also enabled where the far clipping plane will be pushed to infinity.
ovrProjection_ClipRangeOpenGL
Enable if the application is rendering with OpenGL and expects a projection matrix with a clipping range of (-w to w).
enum OVR_CAPI_Util.h.ovrHapticsGenMode
Modes used to generate Touch Haptics from audio PCM buffer.
Enumerator
ovrHapticsGenMode_PointSample
Point sample original signal at Haptics frequency.
ovrHapticsGenMode_Count

Function Documentation

OVR_STATIC_ASSERT ( sizeof(ovrDetectResult) , "ovrDetectResult size mismatch" )
ovrDetectResult ovr_Detect ( int timeoutMilliseconds )
Detects Oculus Runtime and Device Status.
Checks for Oculus Runtime and Oculus HMD device status without loading the LibOVRRT shared library. This may be called before ovr_Initialize() to help decide whether or not to initialize LibOVR.
Parameters
timeoutMilliseconds
Specifies a timeout to wait for HMD to be attached or 0 to poll.
Returns an ovrDetectResult object indicating the result of detection.
See Also:
ovrMatrix4f ovrMatrix4f_Projection ( ovrFovPort fov, float znear, float zfar, unsigned int projectionModFlags )
Used to generate projection from ovrEyeDesc::Fov.
Parameters
fov
Specifies the ovrFovPort to use.
znear
Distance to near Z limit.
zfar
Distance to far Z limit.
projectionModFlags
A combination of the ovrProjectionModifier flags.
Returns the calculated projection matrix.
ovrTimewarpProjectionDesc ovrTimewarpProjectionDesc_FromProjection ( ovrMatrix4f projection, unsigned int projectionModFlags )
Extracts the required data from the result of ovrMatrix4f_Projection.
Parameters
projection
Specifies the project matrix from which to extract ovrTimewarpProjectionDesc.
projectionModFlags
A combination of the ovrProjectionModifier flags.
Returns the extracted ovrTimewarpProjectionDesc.
ovrMatrix4f ovrMatrix4f_OrthoSubProjection ( ovrMatrix4f projection, ovrVector2f orthoScale, float orthoDistance, float HmdToEyeOffsetX )
Generates an orthographic sub-projection.
Used for 2D rendering, Y is down.
Parameters
projection
The perspective matrix that the orthographic matrix is derived from.
orthoScale
Equal to 1.0f / pixelsPerTanAngleAtCenter.
orthoDistance
Equal to the distance from the camera in meters, such as 0.8m.
HmdToEyeOffsetX
Specifies the offset of the eye from the center.
Returns the calculated projection matrix.
void ovr_CalcEyePoses ( ovrPosef headPose, const ovrVector3f hmdToEyeOffset, ovrPosef outEyePoses )
Computes offset eye poses based on headPose returned by ovrTrackingState.
Parameters
headPose
Indicates the HMD position and orientation to use for the calculation.
hmdToEyePose
Can be ovrEyeRenderDesc.HmdToEyePose returned from ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average of the two position vectors for each eyes.
outEyePoses
If outEyePoses are used for rendering, they should be passed to ovr_SubmitFrame in ovrLayerEyeFov::RenderPose or ovrLayerEyeFovDepth::RenderPose.
void ovr_CalcEyePoses2 ( ovrPosef headPose, const ovrPosef HmdToEyePose, ovrPosef outEyePoses )
void ovr_GetEyePoses ( ovrSession session, long long frameIndex, ovrBool latencyMarker, const ovrVector3f hmdToEyeOffset, ovrPosef outEyePoses, double * outSensorSampleTime )
Returns the predicted head pose in outHmdTrackingState and offset eye poses in outEyePoses.
This is a thread-safe function where caller should increment frameIndex with every frame and pass that index where applicable to functions called on the rendering thread. Assuming outEyePoses are used for rendering, it should be passed as a part of ovrLayerEyeFov. The caller does not need to worry about applying HmdToEyePose to the returned outEyePoses variables.
Parameters
hmd
Specifies an ovrSession previously returned by ovr_Create.
frameIndex
Specifies the targeted frame index, or 0 to refer to one frame after the last time ovr_SubmitFrame was called.
latencyMarker
Specifies that this call is the point in time where the "App-to-Mid-Photon" latency timer starts from. If a given ovrLayer provides "SensorSampleTimestamp", that will override the value stored here.
hmdToEyePose
Can be ovrEyeRenderDesc.HmdToEyePose returned from ovr_GetRenderDesc. For monoscopic rendering, use a position vector that is average of the two position vectors for each eyes.
outEyePoses
The predicted eye poses.
outSensorSampleTime
The time when this function was called. May be NULL, in which case it is ignored.
void ovr_GetEyePoses2 ( ovrSession session, long long frameIndex, ovrBool latencyMarker, const ovrPosef HmdToEyePose, ovrPosef outEyePoses, double * outSensorSampleTime )
void ovrPosef_FlipHandedness ( const ovrPosef * inPose, ovrPosef * outPose )
Tracking poses provided by the SDK come in a right-handed coordinate system.
If an application is passing in ovrProjection_LeftHanded into ovrMatrix4f_Projection, then it should also use this function to flip the HMD tracking poses to be left-handed.
While this utility function is intended to convert a left-handed ovrPosef into a right-handed coordinate system, it will also work for converting right-handed to left-handed since the flip operation is the same for both cases.
Parameters
inPose
that is right-handed
outPose
that is requested to be left-handed (can be the same pointer to inPose)
ovrResult ovr_ReadWavFromBuffer ( ovrAudioChannelData * outAudioChannel, const void * inputData, int dataSizeInBytes, int stereoChannelToUse )
Reads an audio channel from Wav (Waveform Audio File) data.
Input must be a byte buffer representing a valid Wav file. Audio samples from the specified channel are read, converted to float [-1.0f, 1.0f] and returned through ovrAudioChannelData.
Supported formats: PCM 8b, 16b, 32b and IEEE float (little-endian only).
Parameters
outAudioChannel
output audio channel data.
inputData
a binary buffer representing a valid Wav file data.
dataSizeInBytes
size of the buffer in bytes.
stereoChannelToUse
audio channel index to extract (0 for mono).
ovrResult ovr_GenHapticsFromAudioData ( ovrHapticsClip * outHapticsClip, const ovrAudioChannelData * audioChannel, ovrHapticsGenMode genMode )
Generates playable Touch Haptics data from an audio channel.
Parameters
outHapticsClip
generated Haptics clip.
audioChannel
input audio channel data.
genMode
mode used to convert and audio channel data to Haptics data.
void ovr_ReleaseAudioChannelData ( ovrAudioChannelData * audioChannel )
Releases memory allocated for ovrAudioChannelData.
Must be called to avoid memory leak.
Parameters
audioChannel
pointer to an audio channel
void ovr_ReleaseHapticsClip ( ovrHapticsClip * hapticsClip )
Releases memory allocated for ovrHapticsClip.
Must be called to avoid memory leak.
Parameters
hapticsClip
pointer to a haptics clip
1
7
8
9
10
11
12
13
14
15
20
26
27
29
30
35
36
40
41
46
47
48
53
60
61
66
67
68
69
70
71
72
73
76
78
79
80
81
85
87
88
90
91
93
94
95
98
100
101
103
104
105
118
119
120
121
122
124
125
137
138
139
148
149
150
162
163
164
165
166
167
168
178
179
180
181
182
183
184
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
236
237
250
251
252
253
254
255
256
263
264
265
266
267
268
272
273
277
278
279
280
281
282
283
/********************************************************************************/
#ifndef OVR_CAPI_Util_h
#define OVR_CAPI_Util_h

#include "OVR_CAPI.h"

#ifdef __cplusplus
extern"C" {
#endif

typedefenum ovrProjectionModifier_ {
  ovrProjection_None = 0x00,

  ovrProjection_LeftHanded = 0x01,

  ovrProjection_FarLessThanNear = 0x02,

  ovrProjection_FarClipAtInfinity = 0x04,

  ovrProjection_ClipRangeOpenGL = 0x08,
} ovrProjectionModifier;

typedefstruct OVR_ALIGNAS(8) ovrDetectResult_ {
  ovrBool IsOculusServiceRunning;

  ovrBool IsOculusHMDConnected;

  OVR_UNUSED_STRUCT_PAD(pad0, 6) 

} ovrDetectResult;

OVR_STATIC_ASSERT(sizeof(ovrDetectResult) == 8, "ovrDetectResult size mismatch");

typedefenum ovrHapticsGenMode_ {
  ovrHapticsGenMode_PointSample,
  ovrHapticsGenMode_Count
} ovrHapticsGenMode;

typedefstruct ovrAudioChannelData_ {
constfloat* Samples;

int SamplesCount;

int Frequency;
} ovrAudioChannelData;

typedefstruct ovrHapticsClip_ {
constvoid* Samples;

int SamplesCount;
} ovrHapticsClip;

OVR_PUBLIC_FUNCTION(ovrDetectResult) ovr_Detect(int timeoutMilliseconds);

// On the Windows platform,
#ifdef _WIN32
#define OVR_HMD_CONNECTED_EVENT_NAME L"OculusHMDConnected"
#endif // _WIN32

OVR_PUBLIC_FUNCTION(ovrMatrix4f)
ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, unsignedint projectionModFlags);

OVR_PUBLIC_FUNCTION(ovrTimewarpProjectionDesc)
ovrTimewarpProjectionDesc_FromProjection(ovrMatrix4f projection, unsignedint projectionModFlags);

OVR_PUBLIC_FUNCTION(ovrMatrix4f)
ovrMatrix4f_OrthoSubProjection(
    ovrMatrix4f projection,
    ovrVector2f orthoScale,
float orthoDistance,
float HmdToEyeOffsetX);

#undef ovr_CalcEyePoses
OVR_PUBLIC_FUNCTION(void)
ovr_CalcEyePoses(ovrPosef headPose, const ovrVector3f hmdToEyeOffset[2], ovrPosef outEyePoses[2]);
OVR_PRIVATE_FUNCTION(void)
ovr_CalcEyePoses2(ovrPosef headPose, const ovrPosef HmdToEyePose[2], ovrPosef outEyePoses[2]);
#define ovr_CalcEyePoses ovr_CalcEyePoses2

#undef ovr_GetEyePoses
OVR_PUBLIC_FUNCTION(void)
ovr_GetEyePoses(
    ovrSession session,
longlong frameIndex,
    ovrBool latencyMarker,
const ovrVector3f hmdToEyeOffset[2],
    ovrPosef outEyePoses[2],
double* outSensorSampleTime);
OVR_PRIVATE_FUNCTION(void)
ovr_GetEyePoses2(
    ovrSession session,
longlong frameIndex,
    ovrBool latencyMarker,
const ovrPosef HmdToEyePose[2],
    ovrPosef outEyePoses[2],
double* outSensorSampleTime);
#define ovr_GetEyePoses ovr_GetEyePoses2

OVR_PUBLIC_FUNCTION(void) ovrPosef_FlipHandedness(const ovrPosef* inPose, ovrPosef* outPose);

OVR_PUBLIC_FUNCTION(ovrResult)
ovr_ReadWavFromBuffer(
    ovrAudioChannelData* outAudioChannel,
constvoid* inputData,
int dataSizeInBytes,
int stereoChannelToUse);

OVR_PUBLIC_FUNCTION(ovrResult)
ovr_GenHapticsFromAudioData(
    ovrHapticsClip* outHapticsClip,
const ovrAudioChannelData* audioChannel,
    ovrHapticsGenMode genMode);

OVR_PUBLIC_FUNCTION(void) ovr_ReleaseAudioChannelData(ovrAudioChannelData* audioChannel);

OVR_PUBLIC_FUNCTION(void) ovr_ReleaseHapticsClip(ovrHapticsClip* hapticsClip);

#ifdef __cplusplus
} /* extern "C" */
#endif

#endif // Header include guard
The documentation for this file was generated from the following file: Include/Extras/OVR_CAPI_Util.h