@paul.kemppi Here is an explanation of how to use apply a shader to the world camera images and save the result.
Here are some more detailed instructions. I'll also try to put together a sample project and share it.
Using the following from the WorldCameraExample.cs from our samples:
private void GetCameraData()
{
var result = worldCamera.GetLatestWorldCameraData(out MLWorldCamera.Frame[] frames);
if (!result.IsOk)
return;
foreach (var frame in frames)
{
var cameraAndFrameType = new ValueTuple<MLWorldCamera.CameraId, MLWorldCamera.Frame.Type>(frame.CameraId, frame.FrameType);
if (cameraAndFrameTypes[activeCombinationIndex] == cameraAndFrameType)
{
worldCamVisualizer.RenderFrame(frame);
this.cameraAndFrameType.text = $"Camera: {cameraAndFrameType.Item1}\nType: {cameraAndFrameType.Item2}";
frameInfo.text = $"CameraPose: { frame.CameraPose}, CameraIntrinsics: { frame.CameraIntrinsics}, FrameBuffer: {frame.FrameBuffer}";
break;
}
}
}
We make the following changes to WorldCameraVisualizer.cs
- add
private RenderTexture _undistortedTexture;
- add `private Material _undistortMaterial;'
Initialize both in the Start()
method:
Shader shader = Shader.Find("Custom/UndistortWorldCamera"); //Add the shader to the scene or into your project settings so that it does not get stripped during build.
_undistortMaterial = new Material(shader);
_undistortedTexture = new RenderTexture(1016, 1016, GraphicsFormat.R8G8B8A8_UNorm, 0);
Add the following to the RenderFrame()
method to set the material params:
var focalLength = frame.CameraIntrinsics.FocalLength;
var principalPoint = frame.CameraIntrinsics.PrincipalPoint;
var radialDistortion = new Vector4((float)frame.CameraIntrinsics.RadialDistortion[0],
(float)frame.CameraIntrinsics.RadialDistortion[1],
(float)frame.CameraIntrinsics.RadialDistortion[2],
(float)frame.CameraIntrinsics.RadialDistortion[3]);
var tangentialDistortion = new Vector4((float)frame.CameraIntrinsics.TangentialDistortion[0],(float)frame.CameraIntrinsics.TangentialDistortion[1],0,0);
_undistortMaterial.SetFloat("_Fx",focalLength.x);
_undistortMaterial.SetFloat("_Fy",focalLength.y);
_undistortMaterial.SetFloat("_Cx",principalPoint.x);
_undistortMaterial.SetFloat("_Cy",principalPoint.y);
_undistortMaterial.SetVector("_D1",radialDistortion);
_undistortMaterial.SetVector("_D2",tangentialDistortion);
Now edit UpdateTextureChannel()
to blit the shader and texture to the Render Texture after rawVideoTexture.Apply()
:
Graphics.Blit(rawVideoTexture, _undistortedTexture, _undistortMaterial);
To Read data you have 3 options:
- Use another empty texture (learn how it works you can see it used to resize textures here: GPU Resizer Script)
public void SaveRenderTexture(string folderName,string fileName)
{
RenderTexture previous = RenderTexture.active;
RenderTexture.active = _undistortedTexture;
if(_emptyTexture== null){
_emptyTexture = new Texture2D(1016,1016);
}
_emptyTexture.ReadPixels(new Rect(0, 0, 1016, 1016), 0, 0);
_emptyTexture.Apply();
RenderTexture.active = previous;
string filePath = Path.Combine(folderName, fileName);
var textureData = _undistortedTextureData.GetRawTextureData<byte>();
Task.Run(async () =>
{
try
{
var data = ImageConversion.EncodeNativeArrayToPNG(textureData, GraphicsFormat.R8G8B8A8_UNorm, 1016, 1016);
await File.WriteAllBytesAsync(filePath, data.ToArray());
}
catch (Exception e)
{
Debug.LogError($"Failed To Save Image: {e}");
}
}); //Please add cancellation token! >.<
}
- Read bytes using AsyncGPUReadback and force to complete:
public void SaveRenderTexture(string folderName,string fileName)
{
var request = AsyncGPUReadback.Request(_undistortedTexture, 0);
request.WaitForCompletion();
if (request.hasError)
{
Debug.LogError("Error during GPU readback");
return;
}
var dataBytes = request.GetData<byte>();
string filePath = Path.Combine(folderName, fileName);
Task.Run(async () =>
{
try
{
var data = ImageConversion.EncodeNativeArrayToPNG(dataBytes, GraphicsFormat.R8G8B8A8_UNorm, 1016,
1016);
await File.WriteAllBytesAsync(filePath, data.ToArray());
}
catch (Exception e)
{
Debug.LogError($"Failed To Save Image: {e}");
}
}); //Please add cancellation token! >.<
}
- Read bytes using AsyncGPUReadback and read asynchronous:
public void SaveRenderTextureV2(string folderName,string fileName)
{
AsyncGPUReadback.Request(_undistortedTexture, 0, (request) =>
{
if (request.hasError)
{
Debug.LogError("Error during GPU readback");
return;
}
var dataBytes = request.GetData<byte>();
string filePath = Path.Combine(folderName, fileName);
Task.Run(async () =>
{
try
{
var data = ImageConversion.EncodeNativeArrayToPNG(dataBytes, GraphicsFormat.R8G8B8A8_UNorm,
1016, 1016);
await File.WriteAllBytesAsync(filePath, data.ToArray());
}
catch (Exception e)
{
Debug.LogError($"Failed To Save Image: {e}");
}
});
}); //Please add cancellation token! >.<
}
World Undistortion Shader:
Shader "Custom/UndistortWorldCamera" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
_Width ("Width", Float) = 1016
_Height ("Height", Float) = 1016
_Fx ("Focal Length X", Float) = 583
_Fy ("Focal Length Y", Float) = 583
_Cx ("Principal Point X", Float) = 512
_Cy ("Principal Point Y", Float) = 502
_D1 ("Radial Distortion", Vector) = (0.107429, -0.0893991, -0.040145796, 0.02508350)
_D2 ("Tangential Distortion", Vector) = (0.002956, 0.00091453, 0, 0)
[Toggle(ROTATE180)] _Rotate180 ("Rotate 180", Float) = 1
[Toggle(GRAYSCALE)] _Grayscale ("Render Grayscale", Float) = 1
}
SubShader {
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#pragma multi_compile_local _ ROTATE180
#pragma multi_compile_local _ GRAYSCALE
struct appdata {
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f {
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float _Width;
float _Height;
float _Fx;
float _Fy;
float _Cx;
float _Cy;
float4 _D1;
float4 _D2;
v2f vert (appdata v) {
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
#ifdef ROTATE180
o.uv = float2(v.uv.x, 1.0 - v.uv.y); // Rotate 180 degrees
#else
o.uv = v.uv;
#endif
return o;
}
float2 undistort(float2 uv) {
// Convert UV to normalized pixel coordinates
float x = (uv.x * _Width - _Cx) / _Fx;
float y = (uv.y * _Height - _Cy) / _Fy;
float r = sqrt(x*x + y*y);
float theta = atan(r);
float rd = (theta + _D1.x * pow(theta,3) + _D1.y * pow(theta,5) + _D1.z * pow(theta,7) + _D1.w * pow(theta,9)) / r;
x *= rd;
y *= rd;
float r2 = x*x + y*y;
x += 2.0 * _D2.x * x * y + _D2.y * (r2 + 2.0 * x*x);
y += _D2.x * (r2 + 2.0 * y*y) + 2.0 * _D2.y * x * y;
// Convert back to UV space
x = x * _Fx + _Cx;
y = y * _Fy + _Cy;
return float2(x / _Width, y / _Height);
}
half4 frag (v2f i) : SV_Target {
float2 uv = undistort(i.uv);
half4 color = tex2D(_MainTex, uv);
// // Check if grayscale is enabled
#ifdef GRAYSCALE
// Convert to grayscale using luminosity method
float grayscale = tex2D(_MainTex, uv).r; // Assuming the texture is grayscale (R8)
// The GPU automatically replicates the grayscale value across RGB channels
color = float4(grayscale, grayscale, grayscale, 1.0);
//return pow(color, 2.2); // Approximation for inverse sRGB gamma correction
#endif
return color;
}
ENDCG
}
}
}