Oculus Rift Direct3D Renga |
#define OVR_D3D_VERSION 11 // in case direct3d 11
#include "OVR_CAPI_D3D.h"
bool InitOculus()
{
ovrSession session = 0;
ovrGraphicsLuid luid = 0;
// Initializes LibOVR, and the Rift
ovrInitParams initParams = { ovrInit_RequestVersion, OVR_MINOR_VERSION, NULL, 0, 0 };
if (!OVR_SUCCESS(ovr_Initialize(&initParams)))
return false;
if (!OVR_SUCCESS(ovr_Create(&session, &luid)))
return false;
// FloorLevel will give tracking poses where the floor height is 0
if(!OVR_SUCCESS(ovr_SetTrackingOriginType(session, ovrTrackingOrigin_EyeLevel)))
return false;
return true;
}
struct EyeTexture
{
ovrSession Session;
ovrTextureSwapChain TextureChain;
std::vector TexRtv;
EyeTexture() :
Session(nullptr),
TextureChain(nullptr)
{
}
bool Create(ovrSession session, int sizeW, int sizeH)
{
Session = session;
ovrTextureSwapChainDesc desc = {};
desc.Type = ovrTexture_2D;
desc.ArraySize = 1;
desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
desc.Width = sizeW;
desc.Height = sizeH;
desc.MipLevels = 1;
desc.SampleCount = 1;
desc.MiscFlags = ovrTextureMisc_DX_Typeless;
desc.BindFlags = ovrTextureBind_DX_RenderTarget;
desc.StaticImage = ovrFalse;
ovrResult result = ovr_CreateTextureSwapChainDX(Session, pDevice, &desc, &TextureChain);
if (!OVR_SUCCESS(result))
return false;
int textureCount = 0;
ovr_GetTextureSwapChainLength(Session, TextureChain, &textureCount);
for (int i = 0; i < textureCount; ++i)
{
ID3D11Texture2D* tex = nullptr;
ovr_GetTextureSwapChainBufferDX(Session, TextureChain, i, IID_PPV_ARGS(&tex));
D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
ID3D11RenderTargetView* rtv;
DIRECTX.Device->CreateRenderTargetView(tex, &rtvd, &rtv);
TexRtv.push_back(rtv);
tex->Release();
}
return true;
}
~EyeTexture()
{
for (int i = 0; i < (int)TexRtv.size(); ++i)
{
Release(TexRtv[i]);
}
if (TextureChain)
{
ovr_DestroyTextureSwapChain(Session, TextureChain);
}
}
ID3D11RenderTargetView* GetRTV()
{
int index = 0;
ovr_GetTextureSwapChainCurrentIndex(Session, TextureChain, &index);
return TexRtv[index];
}
void Commit()
{
ovr_CommitTextureSwapChain(Session, TextureChain);
}
};
ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session);
ovrSizei idealSize = ovr_GetFovTextureSize(session, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f);
// Create a mirror to see on the monitor.
ovrMirrorTexture mirrorTexture = nullptr;
mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
mirrorDesc.Width = width;
mirrorDesc.Height =height;
ovr_CreateMirrorTextureDX(session, pDXDevice, &mirrorDesc, &mirrorTexture);
ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session);
ovrEyeRenderDesc eyeRenderDesc[2];
eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);
// Get both eye poses simultaneously, with IPD offset already included.
ovrPosef EyeRenderPose[2];
ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset,
eyeRenderDesc[1].HmdToEyeOffset };
double sensorSampleTime; // sensorSampleTime is fed into the layer later
ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime);
OculusTexture * pEyeTexture[2] = { nullptr, nullptr };
// ...
// Draw into eye textures
// ...
// Initialize our single full screen Fov layer.
ovrLayerEyeFov ld = {};
ld.Header.Type = ovrLayerType_EyeFov;
ld.Header.Flags = 0;
for (int eye = 0; eye < 2; ++eye)
{
ld.ColorTexture[eye] = pEyeTexture[eye]->TextureChain;
ld.Viewport[eye] = eyeRenderViewport[eye];
ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
ld.RenderPose[eye] = EyeRenderPose[eye];
ld.SensorSampleTime = sensorSampleTime;
}
ovrLayerHeader* layers = &ld.Header;
ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1);
ID3D11Texture2D* tex = nullptr;
ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&tex));
pDXContext->CopyResource(backBufferTexture, tex);