Oculus Rift DK2 + Mogre 1.7.4 (working, missing shader)

RMI

13-05-2015 11:14:12

Hi,
I finally get the Oculus Rift (Development kit 2, sdk 0.4.4) working with Mogre (1.7.4).
So I put the source code here if someone else want to try.
[note : I'm French so I hope everything will be spelled correctly and readable]
I started from the SDK samples and a sample to use Oculus with slimdx and c# found on https://oculuswrap.codeplex.com/ so there will be some code and comments pasted for there.
First of all there is two way to use the Oculus : "Direct access" or "Extended desktop".
The first way ("Direct access") need to give the Oculus direct access to BackBuffers et SwapChain and I can't get it working with mogre (but you can look at http://www.ogre3d.org/forums/viewtopic.php?f=5&t=76970 on the main ogre forum where it seem they get it work with the last Ogre version).
The second way (Extended desktop) first need to open the "Oculus Configuration Utility" the select "Tools => Rift display mode" and check "Extend Desktop to the HDM".
The Oculus will display as a second 1090*1920 screen in "Portrait" with 90° rotation.
For this way we will need to render 2 camera (one for each eye) on to mesh that will handle the distortion on the Oculus lens then a third camera will render this two mesh side by side on the screen/Oculus
Now you will need a wrapper for the Oculus. I used this one : https://oculuswrap.codeplex.com/
Here start the Mogre part.

To display on the Oculus you'll need two material to render the 2 eyes ("Oculus.material")

material Oculus/LeftEye
{
receive_shadows off
technique
{
pass Oculus/LeftEye
{
lighting off
texture_unit
{
}
}
}
}

material Oculus/RightEye
{
receive_shadows off
technique
{
pass Oculus/RightEye
{
lighting off
texture_unit
{
}
}
}
}


In the code you'll need

//An acces to the Oculus
private Hmd hmd;
private Wrap oculusWrap;
//To stock Oculus informations
private float fInterpupillaryDistance;
private Quaternion qOculusOrientation;
private Vector3 v3OculusPosition;
//3 camera : 1 for each eye and one final for the Render window / Oculus
private Camera camera;
private Camera camLeft;
private Camera camRight;
//2 RenderTexture for the two "EyeCamera"
private TexturePtr leftEyeTexturePtr;
private TexturePtr rightEyeTexturePtr;
private RenderTexture leftRenderTexture;
private RenderTexture rightRenderTexture;
private MaterialPtr leftEyeMaterialPtr;
private MaterialPtr rightEyeMaterialPtr;
//some scene node to move the camera
private SceneNode snHeadNode, snEyesNode
// and an access to
//the SceneManager
public SceneManager SceneManager;
//the RenderWindow
public RenderWindow RenderWindow;
//the root
public Root Root { get; private set; }


When creating your render window you'll need to give it the Oculus resolution and to send it on the Oculus second screen

Root.Initialise(false, "Main Ogre Window");
NameValuePairList misc = new NameValuePairList();
// Oculus second screen
misc["monitorIndex"] = "1";
misc["border "] = "none";
// Oculus resolution
RenderWindow = Root.CreateRenderWindow("Oculus Rift Liver Visualization", 1080, 1920, true, misc);


Now before calling the "Root.StartRendering()"

First Initialise the Oculus

private void OculusInit()
{
oculusWrap = new Wrap();

// Initialize the Oculus runtime.
oculusWrap.Initialize();

// Use the head mounted display, if it's available, otherwise use the debug HMD.
int numberOfHeadMountedDisplays = oculusWrap.Hmd_Detect();
if (numberOfHeadMountedDisplays > 0)
hmd = oculusWrap.Hmd_Create(0);
else
return;

if (hmd == null)
{
MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
return;
}

if (hmd.ProductName == string.Empty)
MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);

if (!hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None))
{
hmd.Dispose();
oculusWrap.Dispose();
MessageBox.Show("TRACKING INIT ERROR");
}
}


then Initialise the mesh and render textures for the eye's camera

private void OculusDistortionMeshesInit()
{
// Configure Render Textures:
OVR.Sizei recommendedTexLeftSize = hmd.GetFovTextureSize(OVR.EyeType.Left,
hmd.DefaultEyeFov[0], 1.0f);
OVR.Sizei recommendedTexRightSize = hmd.GetFovTextureSize(OVR.EyeType.Right,
hmd.DefaultEyeFov[1], 1.0f);

leftEyeTexturePtr = TextureManager.Singleton.CreateManual("RiftRenderTextureLeft", ResourceGroupManager.DEFAULT_RESOURCE_GROUP_NAME, TextureType.TEX_TYPE_2D, (uint)recommendedTexLeftSize.Width, (uint)recommendedTexLeftSize.Height, 0, PixelFormat.PF_R8G8B8, (int)TextureUsage.TU_RENDERTARGET);

rightEyeTexturePtr = TextureManager.Singleton.CreateManual("RiftRenderTextureRight", ResourceGroupManager.DEFAULT_RESOURCE_GROUP_NAME, TextureType.TEX_TYPE_2D, (uint)recommendedTexRightSize.Width, (uint)recommendedTexRightSize.Height, 0, PixelFormat.PF_R8G8B8, (int)TextureUsage.TU_RENDERTARGET);

// Assign the textures to the eyes used later:
leftEyeMaterialPtr = MaterialManager.Singleton.GetByName("Oculus/LeftEye");
leftEyeMaterialPtr.GetTechnique(0).GetPass(0).GetTextureUnitState(0).SetTextureName("RiftRenderTextureLeft");

rightEyeMaterialPtr = MaterialManager.Singleton.GetByName("Oculus/RightEye");
rightEyeMaterialPtr.GetTechnique(0).GetPass(0).GetTextureUnitState(0).SetTextureName("RiftRenderTextureRight");

OVR.EyeRenderDesc[] eyeRenderDesc = new OVR.EyeRenderDesc[2];
eyeRenderDesc[0] = hmd.GetRenderDesc(OVR.EyeType.Left, hmd.DefaultEyeFov[0]);
eyeRenderDesc[1] = hmd.GetRenderDesc(OVR.EyeType.Right, hmd.DefaultEyeFov[1]);

OVR.Recti[] viewports = new OVR.Recti[2];
viewports[0].Position.x = 0;
viewports[0].Position.y = 0;
viewports[0].Size.Width = recommendedTexLeftSize.Width;
viewports[0].Size.Height = recommendedTexLeftSize.Height;
viewports[1].Position.x = recommendedTexLeftSize.Width;
viewports[1].Position.y = 0;
viewports[1].Size.Width = recommendedTexRightSize.Width;
viewports[1].Size.Height = recommendedTexRightSize.Height;
SceneNode meshNode = SceneManager.RootSceneNode.CreateChildSceneNode();

OVR.DistortionVertex[] vertexDatas;
OVR.Vector2f[] UVScaleOffset;
ushort[] indexDatas;
ManualObject manual;

// Create Left Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[0].Eye, eyeRenderDesc[0].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[0].Fov, recommendedTexLeftSize, viewports[0], out UVScaleOffset);

// create ManualObject
// TODO: Destroy the manual objects!!
manual = SceneManager.CreateManualObject("RiftRenderObjectLeft");
manual.Begin("Oculus/LeftEye", RenderOperation.OperationTypes.OT_TRIANGLE_LIST);

for (uint i = 0; i < vertexDatas.Length; i++)
{
OVR.DistortionVertex v = vertexDatas;
manual.Position(v.ScreenPosNDC.X, v.ScreenPosNDC.Y, 0);
manual.TextureCoord(v.TanEyeAnglesR.X * UVScaleOffset[0].X + UVScaleOffset[1].X, v.TanEyeAnglesR.Y * UVScaleOffset[0].Y + UVScaleOffset[1].Y);
}
for (uint i = 0; i < indexDatas.Length; i++)
{
manual.Index(indexDatas);
}
// tell Ogre, your definition has finished
manual.End();
meshNode.AttachObject(manual);


//Create Right Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[1].Eye, eyeRenderDesc[1].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[1].Fov, recommendedTexRightSize, viewports[1], out UVScaleOffset);

// create ManualObject
// TODO: Destroy the manual objects!!
manual = SceneManager.CreateManualObject("RiftRenderObjectRight");
manual.Begin("Oculus/RightEye", RenderOperation.OperationTypes.OT_TRIANGLE_LIST);

for (uint i = 0; i < vertexDatas.Length; i++)
{
OVR.DistortionVertex v = vertexDatas;
manual.Position(v.ScreenPosNDC.X, v.ScreenPosNDC.Y, 0);
manual.TextureCoord(v.TanEyeAnglesR.X * UVScaleOffset[0].X + UVScaleOffset[1].X, v.TanEyeAnglesR.Y * UVScaleOffset[0].Y + UVScaleOffset[1].Y);
}
for (uint i = 0; i < indexDatas.Length; i++)
{
manual.Index(indexDatas);
}
// tell Ogre, your definition has finished
manual.End();
meshNode.AttachObject(manual);

meshNode.SetPosition(0, 0, -1);
meshNode.SetScale(1, 1, -1);
}


Finally initialise your cameras

private void SetCameras()
{
//Create a scene nodes which the main cam will be attached to:
SceneNode cameraNode = SceneManager.RootSceneNode.CreateChildSceneNode("Camera");
cameraNode.SetPosition(0, 0, 0);
//create the main camera : (if your scene already have one you can use it instead) so the mesh can be rendered onto it:
camera = SceneManager.CreateCamera("Camera");
camera.ProjectionType = ProjectionType.PT_ORTHOGRAPHIC;
camera.SetOrthoWindow(2, 2);
camera.SetPosition(0, 0, 0);
camera.FarClipDistance = 5000;
camera.NearClipDistance = 0.001f;
//cause the oculus render a 90° rotated screen
camera.Roll(new Radian(-Math.PI / 2));

//attach to the node
cameraNode.AttachObject(camera);
//set the viewport
//if your scene already have one call RenderWindow.RemoveAllViewports() before
Viewport viewport = RenderWindow.AddViewport(camera);
viewport.BackgroundColour = ColourValue.White;

//set the viewport for the oculus now you have one
Viewport oculusViewport = RenderWindow.GetViewport(0);
oculusViewport.BackgroundColour = ColourValue.Black;
oculusViewport.OverlaysEnabled = true;
// Set up IPD in meters:
fInterpupillaryDistance = hmd.GetFloat(OVR.OVR_KEY_IPD, 0.064f);

// Create a scene nodes which the cams will be attached to:
snHeadNode = SceneManager.RootSceneNode.CreateChildSceneNode("HeadNode");
//set the position
snHeadNode.SetPosition(0, 180, 0);

camLeft = SceneManager.CreateCamera("LeftCamera");
camRight = SceneManager.CreateCamera("RightCamera");

snEyesNode = snHeadNode.CreateChildSceneNode("EyesNode");
snEyesNode.AttachObject(camLeft);
snEyesNode.AttachObject(camRight);

// Position cameras according to interpupillary distance
camLeft.SetPosition(-fInterpupillaryDistance / 2.0f, 0.0f, 0.0f);
camRight.SetPosition(fInterpupillaryDistance / 2.0f, 0.0f, 0.0f);

//set the RenderTexture to the eye cam
leftRenderTexture = leftEyeTexturePtr.GetBuffer().GetRenderTarget();
leftRenderTexture.AddViewport(camLeft);
leftRenderTexture.GetViewport(0).SetClearEveryFrame(true);
leftRenderTexture.GetViewport(0).BackgroundColour = RenderWindow.GetViewport(0).BackgroundColour;
leftRenderTexture.GetViewport(0).OverlaysEnabled = true;

rightRenderTexture = rightEyeTexturePtr.GetBuffer().GetRenderTarget();
rightRenderTexture.AddViewport(camRight);
rightRenderTexture.GetViewport(0).SetClearEveryFrame(true);
rightRenderTexture.GetViewport(0).BackgroundColour = RenderWindow.GetViewport(0).BackgroundColour;
rightRenderTexture.GetViewport(0).OverlaysEnabled = true;

OVR.FovPort fovLeft = hmd.DefaultEyeFov[0];
OVR.FovPort fovRight = hmd.DefaultEyeFov[1];

float combinedTanHalfFovHorizontal = System.Math.Max(fovLeft.LeftTan, fovLeft.RightTan);
float combinedTanHalfFovVertical = System.Math.Max(fovLeft.UpTan, fovLeft.DownTan);
float aspectRatio = combinedTanHalfFovHorizontal / combinedTanHalfFovVertical;
camLeft.AspectRatio = aspectRatio;
camRight.AspectRatio = aspectRatio;

OVR.Matrix4f projL = OVR.ovrMatrix4f_Projection(fovLeft, camLeft.NearClipDistance, camLeft.FarClipDistance, 1);
OVR.Matrix4f projR = OVR.ovrMatrix4f_Projection(fovRight, camRight.NearClipDistance, camRight.FarClipDistance, 1);
camLeft.SetCustomProjectionMatrix(true,
new Matrix4(projL.M11, projL.M12, projL.M13, projL.M14,
projL.M21, projL.M22, projL.M23, projL.M24,
projL.M31, projL.M32, projL.M33, projL.M34,
projL.M41, projL.M42, projL.M43, projL.M44));
camRight.SetCustomProjectionMatrix(true,
new Matrix4(projR.M11, projR.M12, projR.M13, projR.M14,
projR.M21, projR.M22, projR.M23, projR.M24,
projR.M31, projR.M32, projR.M33, projR.M34,
projR.M41, projR.M42, projR.M43, projR.M44));
}


Now you can call the

Root.StartRendering();

And your Oculus should display your scene
If you want the render to follow the Oculus head moves :
You'll need to subscribe to FrameRenderingQueued

Root.FrameRenderingQueued += Root_FrameRenderingQueued;



private bool Root_FrameRenderingQueued(FrameEvent _frameEvent)
{
if (hmd != null)
{
if (UpdateOculus(_frameEvent.timeSinceLastFrame))
{
SetOculusPose(qOculusOrientation, v3OculusPosition);
}
}
return true;
}

private bool UpdateOculus(float dt)
{
if (hmd == null) return true;
OVR.FrameTiming frameTiming = hmd.BeginFrameTiming(0);
OVR.TrackingState ts = hmd.GetTrackingState(frameTiming.ScanoutMidpointSeconds);

if (ts.StatusFlags.HasFlag(OVR.StatusBits.ovrStatus_OrientationTracked) || ts.StatusFlags.HasFlag(OVR.StatusBits.ovrStatus_PositionTracked))
{
// The cpp compatibility layer is used to convert ovrPosef to Posef (see OVR_Math.h)
OVR.Posef pose = ts.HeadPose.ThePose;
qOculusOrientation = new Quaternion(pose.Orientation.W, pose.Orientation.X, pose.Orientation.Y, pose.Orientation.Z);
v3OculusPosition = new Vector3(pose.Position.X, pose.Position.Y, pose.Position.Z);
}

OVR.ovr_WaitTillTime(frameTiming.TimewarpPointSeconds);

hmd.ovrHmd_EndFrameTiming();
return true;
}

private void SetOculusPose(Quaternion _qOrientation, Mogre.Vector3 _v3Pos)
{
snEyesNode.Orientation = _qOrientation;
snEyesNode.Position = _v3Pos;
}

RMI

13-05-2015 13:35:37

And now the part where I need help.
To have a better effect the Oculus developer guide provide two shaders.
A vertex shader to improve the distortion.
A pixel shader to give a smooth fade-to-black effect at the edges.
My problem is that first I never used shaders and then the HLSL used seem to not be handled by the ogre version wrapped in Mogre 1.7.4
I don't know if it's possible to convert them.
From the Oculus sample this is how I think they should be used.

"Oculus.material" to replace the previous one

material Oculus/LeftEye
{
receive_shadows off
technique
{
pass Oculus/LeftEye
{
vertex_program_ref OculusDistortionVertexShader
{
}
fragment_program_ref OculusDistortionPixelShader
{
}
lighting off
texture_unit
{
}
}
}
}

material Oculus/RightEye
{
receive_shadows off
technique
{
pass Oculus/RightEye
{
vertex_program_ref OculusDistortionVertexShader
{
}
fragment_program_ref OculusDistortionPixelShader
{
}
lighting off
texture_unit
{
}
}
}
}


"Oculus.program"

vertex_program OculusDistortionVertexShader hlsl
{
source OculusDistortionVertexShader.hlsl
target vs_4_0
entry_point main
default_params
{
param_named EyeToSourceUVScale float2 1.0 1.0
param_named EyeToSourceUVOffset float2 0.0 0.0
param_named EyeRotationStart float4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
param_named EyeRotationEnd float4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
}
}

fragment_program OculusDistortionPixelShader hlsl
{
source OculusDistortionPixelShader.hlsl
target ps__0
entry_point main
}


"OculusDistortionVertexShader.hlsl"

float2 EyeToSourceUVScale, EyeToSourceUVOffset;
float4x4 EyeRotationStart, EyeRotationEnd;
float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)
{
// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);
// Project them back onto the Z=1 plane of the rendered images.
float2 flattened = (transformed.xy / transformed.z);
// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);
}

void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1,
in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0,
in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2,
out float4 oPosition : SV_Position, out float2 oTexCoord0 : TEXCOORD0,
out float2 oTexCoord1 : TEXCOORD1, out float2 oTexCoord2 : TEXCOORD2,
out float oVignette : TEXCOORD3)
{
float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);
oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot);
oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot);
oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot);
oPosition = float4(Position.xy, 0.5, 1.0);
oVignette = Vignette; /* For vignette fade */
}


"OculusDistortionPixelShader.hlsl"

Texture2D Texture : register(t0);
SamplerState Linear : register(s0);
float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0,
in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2,
in float oVignette : TEXCOORD3) : SV_Target
{
// 3 samples for fixing chromatic aberrations
float R = Texture.Sample(Linear, oTexCoord0.xy).r;
float G = Texture.Sample(Linear, oTexCoord1.xy).g;
float B = Texture.Sample(Linear, oTexCoord2.xy).b;
return (oVignette*float4(R,G,B,1));
}


and here is where it's should be used

GpuProgramParametersSharedPtr gpuProgramParametersSharedPtrLeft;
GpuProgramParametersSharedPtr gpuProgramParametersSharedPtrRight;

[...]
private void OculusInit()
{
[...]
//Create Left Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[0].Eye, eyeRenderDesc[0].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[0].Fov, recommendedTexLeftSize, viewports[0], out UVScaleOffset);
gpuProgramParametersSharedPtrLeft = leftEyeMaterialPtr.GetTechnique(0).GetPass(0).GetVertexProgramParameters();

unsafe
{
float[] UVScale = new float[2] { UVScaleOffset[0].X, UVScaleOffset[0].Y };
float[] UVOffset = new float[2] { UVScaleOffset[1].X, UVScaleOffset[1].Y };
fixed (float* ptrUVScale = UVScale)
{
fixed (float* ptrUVOffset = UVOffset)
{
gpuProgramParametersSharedPtrLeft.SetNamedConstant("EyeToSourceUVScale", ptrUVScale, 2);
gpuProgramParametersSharedPtrLeft.SetNamedConstant("EyeToSourceUVOffset", ptrUVOffset, 2);
}
}
}

[...]

#region Create Right Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[1].Eye, eyeRenderDesc[1].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[1].Fov, recommendedTexRightSize, viewports[1], out UVScaleOffset);
gpuProgramParametersSharedPtrRight = rightEyeMaterialPtr.GetTechnique(0).GetPass(0).GetVertexProgramParameters();

unsafe
{
float[] UVScale = new float[2] { UVScaleOffset[0].X, UVScaleOffset[0].Y };
float[] UVOffset = new float[2] { UVScaleOffset[1].X, UVScaleOffset[1].Y };
fixed (float* ptrUVScale = UVScale)
{
fixed (float* ptrUVOffset = UVOffset)
{
gpuProgramParametersSharedPtrRight.SetNamedConstant("EyeToSourceUVScale", ptrUVScale, 2);
gpuProgramParametersSharedPtrRight.SetNamedConstant("EyeToSourceUVOffset", ptrUVOffset, 2);
}
}
}

[...]

private bool UpdateOculus(float dt)
{
[...]
v3OculusPosition = new Vector3(pose.Position.X, pose.Position.Y, pose.Position.Z);

for (int eyeNum = 0; eyeNum < 2; eyeNum++)
{
OVR.Matrix4f[] tWM = new OVR.Matrix4f[2];
hmd.GetEyeTimewarpMatrices(eyeNum == 0 ? OVR.EyeType.Left : OVR.EyeType.Right, pose, out tWM);

GpuProgramParametersSharedPtr gpuProgramParametersSharedPtr;
gpuProgramParametersSharedPtr = eyeNum == 0 ? gpuProgramParametersSharedPtrLeft : gpuProgramParametersSharedPtrRight;

gpuProgramParametersSharedPtr.SetNamedConstant("EyeRotationStart", new Matrix4(tWM[0].M11, tWM[0].M12, tWM[0].M13, tWM[0].M14,
tWM[0].M21, tWM[0].M22, tWM[0].M23, tWM[0].M24,
tWM[0].M31, tWM[0].M32, tWM[0].M33, tWM[0].M34,
tWM[0].M41, tWM[0].M42, tWM[0].M43, tWM[0].M44
));
gpuProgramParametersSharedPtr.SetNamedConstant("EyeRotationEnd", new Matrix4(tWM[1].M11, tWM[1].M12, tWM[1].M13, tWM[1].M14,
tWM[1].M21, tWM[1].M22, tWM[1].M23, tWM[1].M24,
tWM[1].M31, tWM[1].M32, tWM[1].M33, tWM[1].M34,
tWM[1].M41, tWM[1].M42, tWM[1].M43, tWM[1].M44
));
}

}



Maybe someone will found how to use it correctly.

Beauty

30-05-2015 15:22:03

I finally get the Oculus Rift (Development kit 2, sdk 0.4.4) working with Mogre
This is fine.
Thanks for sharing all the useful information.
If you like, we can create a page in the Ogre wiki. It's a better place than hidden in a forum post.

My problem is that first I never used shaders and then the HLSL used seem to not be handled by the ogre version wrapped in Mogre 1.7.4
I'm sorry, but I have no knowledge about shader programming.

I don't know if it's possible to convert them.
If you want to port shader code to an other shader language, you can create a post in the Ogre main forum. I'm shure that there are people with good shader knowledge.

In general you can modify the code of the wrapped Ogre library. By use of the MogreBuilder it's easy to create binaries.
The downside: You need to know what to change.

RMI

01-06-2015 13:53:21

Hi,
Thanks.
Yes you can put it in the wiki if you want.
I posted it here first because I use a lot of plug-in with Mogre so I tried to strip the code from all uses of loaders/plug-in/...
But I wasn't sure I didn't forgot anything doing this.
So I thought that, if I post it here, maybe some one will give it a try and post if it also works for him.

For the shaders I'll try the ogre main forum but I first need to check exactly the shader version used in the sample and the version I can use with Mogre 1.7.4.

For editing the wrapped Ogre library, the Occulus team said that, for the "Direct access" method, they'll stop Directx 9 support in the following SDK. So I think it will be a waste of time trying to have this method working for now (this is the only version supported by Mogre 1.7.4).
The good way with the "external desktop" method is that it's free from the display plug-in used. For the other method you need to use a different method to initialise the Occulus for each Render System.
Also, from what I read from the main forum it seems that it's directly in Ogre that there is "missing" entry points to some buffers to use this method. They had to move to Ogre 2.0.

Gecc

18-04-2016 14:41:01

Hi,

I needed Oculus DK2 that works on Mogre 1.8, and I've converted the shaders in glsl in order to use them with OpenGL RenderSystem.

First the program script, very basic :


vertex_program oculus_vs glsl
{
source oculus_vs.glsl
syntax arbvp1
}

fragment_program oculus_fs glsl
{
source oculus_fs.glsl
syntax arbvp1
}


Then the material script :

material Oculus/LeftEye
{
receive_shadows off
technique
{
pass Oculus/LeftEye
{
vertex_program_ref oculus_vs
{
param_named EyeToSourceUVScale float2 1.0 1.0
param_named EyeToSourceUVOffset float2 0.0 0.0
param_named EyeRotationStart matrix4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
param_named EyeRotationEnd matrix4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
}
fragment_program_ref oculus_fs
{
param_named tex0 int 0
}

lighting off
texture_unit
{
}
}
}
}

material Oculus/RightEye
{
receive_shadows off
technique
{
pass Oculus/RightEye
{
vertex_program_ref oculus_vs
{
param_named EyeToSourceUVScale float2 1.0 1.0
param_named EyeToSourceUVOffset float2 0.0 0.0
param_named EyeRotationStart matrix4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
param_named EyeRotationEnd matrix4x4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0;
}
fragment_program_ref oculus_fs
{
param_named tex0 int 0
}

lighting off
texture_unit
{
}
}
}
}


The vertex shader :

#version 150

uniform vec2 EyeToSourceUVScale;
uniform vec2 EyeToSourceUVOffset;
uniform mat4 EyeRotationStart;
uniform mat4 EyeRotationEnd;

vec2 TimewarpTexCoord(vec2 TexCoord, mat4 rotMat)
{
// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
vec3 transformed = vec3(rotMat * vec4(TexCoord.xy, 1, 1)).xyz;
// Project them back onto the Z=1 plane of the rendered images.
vec2 flattened = (transformed.xy / transformed.z);
// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
return vec2(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);
}

in vec4 vertex;
in vec4 uv0;
in vec4 uv1;
in vec4 uv2;
in vec4 uv3;

out vec2 oTexCoord0;
out vec2 oTexCoord1;
out vec2 oTexCoord2;
out float oVignette;

void main()
{
float VignetteFactor = uv3.x;
float TimeWarpFactor = uv3.y;

mat4 lerpedEyeRot = EyeRotationStart * (1.0 - TimeWarpFactor) + EyeRotationEnd * TimeWarpFactor;
oTexCoord0 = TimewarpTexCoord(uv0.xy, lerpedEyeRot);
oTexCoord1 = TimewarpTexCoord(uv1.xy, lerpedEyeRot);
oTexCoord2 = TimewarpTexCoord(uv2.xy, lerpedEyeRot);
oVignette = VignetteFactor; /* For vignette fade */
//gl_Position = vec4(vertex.xy, -1.0, 1.0);
gl_Position = vec4(-vertex.y, vertex.x, -1.0, 1.0); // rotation -90° cause the oculus render a 90° rotated screen
}


The fragment shader :

#version 150

in vec2 oTexCoord0;
in vec2 oTexCoord1;
in vec2 oTexCoord2;
in float oVignette;

out vec4 Out_Color;

uniform sampler2D tex0;

void main()
{
// 3 samples for fixing chromatic aberrations
float R = texture(tex0, oTexCoord0.xy).r;
float G = texture(tex0, oTexCoord1.xy).g;
float B = texture(tex0, oTexCoord2.xy).b;
Out_Color = oVignette * vec4(R,G,B,1);
}


And in the code, for each eye at the distortion mesh definition, you must have the following :


for (uint i = 0; i < vertexDatas.Length; i++)
{
OVR.DistortionVertex v = vertexDatas;
manual.Position(v.ScreenPosNDC.X, v.ScreenPosNDC.Y, 0);
manual.TextureCoord(v.TanEyeAnglesR.X, v.TanEyeAnglesR.Y);
manual.TextureCoord(v.TanEyeAnglesG.X, v.TanEyeAnglesG.Y);
manual.TextureCoord(v.TanEyeAnglesB.X, v.TanEyeAnglesB.Y);
manual.TextureCoord(v.VignetteFactor, v.TimeWarpFactor);

}