How to undistort world camera image

I would like to apply the distortion parameters to the world camera frame to get undistorted image. How to do this?

Hi @paul.kemppi,

I have reached out to our Unity team and I will report back as soon as I learn more.

Best,

El

Shader "Image/Undistort"
{
    Properties
    {
        _MainTex("Main Texture", 2D) = "white" {}
    }
        SubShader
        {
            Tags { "RenderType" = "Opaque" }
            LOD 100
            Pass
            {
                CGPROGRAM
                #pragma vertex vert
                #pragma fragment frag
                #include "UnityCG.cginc"
                struct appdata
                {
                    float4 vertex : POSITION;
                    float2 uv : TEXCOORD0;
                };
                struct v2f
                {
                    float4 vertex : SV_POSITION;
                    float2 uv : TEXCOORD0;
                };
                sampler2D _MainTex;
                float4 _MainTex_ST;
                float k1 = 0, k2 = 0, k3 = 0, p1 = 0, p2 = 0;
                float2 undistort(float2 uv)
                {
                    float2 xy = uv - 0.5f;
                    float r2 = dot(xy, xy);
                    float r4 = r2 * r2;
                    float r6 = r4 * r2;
                    float2 xy_rd = xy * (1 + (k1 * r2) + (k2 * r4) + (k3 * r6));
                    float xtd = (2 * p1 * xy.x * xy.y) + (p2 * (r2 + (2 * xy.x * xy.x)));
                    float ytd = (2 * p2 * xy.x * xy.y) + (p1 * (r2 + (2 * xy.y * xy.y)));
                    float2 xy_td = float2(xtd, ytd);
                    return (xy_rd + xy_td) + 0.5f;
                }
                v2f vert(appdata v)
                {
                    v2f o;
                    o.vertex = UnityObjectToClipPos(v.vertex);
                    o.uv = TRANSFORM_TEX(v.uv, _MainTex);
                    return o;
                }
                fixed4 frag(v2f i) : SV_Target
                {
                    return tex2D(_MainTex, undistort(i.uv));
                }
                ENDCG
            }
        }
}

You can use this shader to undistort images using radial and tangential distortion coefficients.

Let me know if you have further questions.

Best,

El

Thanks for the info. I was hoping to get a code that I can use for already collected images. Moreover, I am not that familiar with shaders e.g., how to pass the parameters (k1,..) for that shader.

When collecting the world camera frames, I just convert them into byte arrays and save them together with the metadata to be processed on the server using python.

This is the code to convert the image to byte array:

byte dataArray = new byte[mlWorldCameraFrame.FrameBuffer.DataSize];
Marshal.Copy(mlWorldCameraFrame.FrameBuffer.Data, dataArray, 0, mlWorldCameraFrame.FrameBuffer.DataSize);

How to use the shader here to be able to save the undistorted image instead of the original one? Create a texture, assign material with the shader and copy the byte array to the texture and then back to byte array?

Regards,
Paul

@paul.kemppi Here is an explanation of how to use apply a shader to the world camera images and save the result.

Here are some more detailed instructions. I'll also try to put together a sample project and share it.

Using the following from the WorldCameraExample.cs from our samples:

private void GetCameraData()
    {
        var result = worldCamera.GetLatestWorldCameraData(out MLWorldCamera.Frame[] frames);
        if (!result.IsOk)
            return;

        foreach (var frame in frames)
        {
            var cameraAndFrameType = new ValueTuple<MLWorldCamera.CameraId, MLWorldCamera.Frame.Type>(frame.CameraId, frame.FrameType);
            if (cameraAndFrameTypes[activeCombinationIndex] == cameraAndFrameType)
            {
                worldCamVisualizer.RenderFrame(frame);
                this.cameraAndFrameType.text = $"Camera: {cameraAndFrameType.Item1}\nType: {cameraAndFrameType.Item2}";
                frameInfo.text = $"CameraPose: { frame.CameraPose}, CameraIntrinsics: { frame.CameraIntrinsics}, FrameBuffer: {frame.FrameBuffer}";
                break;
            }
        }

    }

We make the following changes to WorldCameraVisualizer.cs

  • add private RenderTexture _undistortedTexture;
  • add `private Material _undistortMaterial;'

Initialize both in the Start() method:

Shader shader = Shader.Find("Custom/UndistortWorldCamera"); //Add the shader to the scene or into your project settings so that it does not get stripped during build.
_undistortMaterial = new Material(shader);
_undistortedTexture = new RenderTexture(1016, 1016, GraphicsFormat.R8G8B8A8_UNorm, 0);

Add the following to the RenderFrame() method to set the material params:

var focalLength = frame.CameraIntrinsics.FocalLength;
var principalPoint = frame.CameraIntrinsics.PrincipalPoint;
var radialDistortion = new Vector4((float)frame.CameraIntrinsics.RadialDistortion[0],
  (float)frame.CameraIntrinsics.RadialDistortion[1], 
  (float)frame.CameraIntrinsics.RadialDistortion[2],
  (float)frame.CameraIntrinsics.RadialDistortion[3]);
var tangentialDistortion = new Vector4((float)frame.CameraIntrinsics.TangentialDistortion[0],(float)frame.CameraIntrinsics.TangentialDistortion[1],0,0);

_undistortMaterial.SetFloat("_Fx",focalLength.x);
_undistortMaterial.SetFloat("_Fy",focalLength.y);
_undistortMaterial.SetFloat("_Cx",principalPoint.x);
_undistortMaterial.SetFloat("_Cy",principalPoint.y);
_undistortMaterial.SetVector("_D1",radialDistortion);
_undistortMaterial.SetVector("_D2",tangentialDistortion);

Now edit UpdateTextureChannel() to blit the shader and texture to the Render Texture after rawVideoTexture.Apply():

Graphics.Blit(rawVideoTexture, _undistortedTexture, _undistortMaterial);

To Read data you have 3 options:

  1. Use another empty texture (learn how it works you can see it used to resize textures here: GPU Resizer Script)
public void SaveRenderTexture(string folderName,string fileName)
{
  RenderTexture previous = RenderTexture.active;
  RenderTexture.active = _undistortedTexture;
			if(_emptyTexture== null){
			_emptyTexture = new Texture2D(1016,1016);
			}
      
  _emptyTexture.ReadPixels(new Rect(0, 0, 1016, 1016), 0, 0);
  _emptyTexture.Apply();

  RenderTexture.active = previous;
  string filePath = Path.Combine(folderName, fileName);
  var textureData = _undistortedTextureData.GetRawTextureData<byte>();
  Task.Run(async () =>
  {
      try
      {
          var data = ImageConversion.EncodeNativeArrayToPNG(textureData, GraphicsFormat.R8G8B8A8_UNorm, 1016, 1016);
          await File.WriteAllBytesAsync(filePath, data.ToArray());
      }
      catch (Exception e)
      {
          Debug.LogError($"Failed To Save Image: {e}");
      }
 
  });  //Please add cancellation token! >.<
}

  1. Read bytes using AsyncGPUReadback and force to complete:
	 	public void SaveRenderTexture(string folderName,string fileName)
        {
            var request = AsyncGPUReadback.Request(_undistortedTexture, 0);
            request.WaitForCompletion();
            if (request.hasError)
            {
                Debug.LogError("Error during GPU readback");
                return;
            }

            var dataBytes = request.GetData<byte>();
            string filePath = Path.Combine(folderName, fileName);
            Task.Run(async () =>
            {
                try
                {
                    var data = ImageConversion.EncodeNativeArrayToPNG(dataBytes, GraphicsFormat.R8G8B8A8_UNorm, 1016,
                        1016);
                    await File.WriteAllBytesAsync(filePath, data.ToArray());
                }
                catch (Exception e)
                {
                    Debug.LogError($"Failed To Save Image: {e}");
                }

            });   //Please add cancellation token! >.<

        }

  1. Read bytes using AsyncGPUReadback and read asynchronous:
	 	     public void SaveRenderTextureV2(string folderName,string fileName)
        {
            AsyncGPUReadback.Request(_undistortedTexture, 0, (request) => 
            {
                if (request.hasError)
                {
                    Debug.LogError("Error during GPU readback");
                    return;
                }

                var dataBytes = request.GetData<byte>();
                string filePath = Path.Combine(folderName, fileName);
                Task.Run(async () =>
                {
                    try
                    {
                        var data = ImageConversion.EncodeNativeArrayToPNG(dataBytes, GraphicsFormat.R8G8B8A8_UNorm,
                            1016, 1016);
                        await File.WriteAllBytesAsync(filePath, data.ToArray());
                    }
                    catch (Exception e)
                    {
                        Debug.LogError($"Failed To Save Image: {e}");
                    }

                });
            }); //Please add cancellation token! >.<
        }

World Undistortion Shader:

Shader "Custom/UndistortWorldCamera" {
    Properties {
        _MainTex ("Texture", 2D) = "white" {}
        _Width ("Width", Float) = 1016
        _Height ("Height", Float) = 1016
        _Fx ("Focal Length X", Float) = 583
        _Fy ("Focal Length Y", Float) = 583
        _Cx ("Principal Point X", Float) = 512
        _Cy ("Principal Point Y", Float) = 502
        _D1 ("Radial Distortion", Vector) = (0.107429, -0.0893991, -0.040145796, 0.02508350)
        _D2 ("Tangential Distortion", Vector) = (0.002956, 0.00091453, 0, 0)
        [Toggle(ROTATE180)] _Rotate180 ("Rotate 180", Float) = 1
        [Toggle(GRAYSCALE)] _Grayscale ("Render Grayscale", Float) = 1
    }

    SubShader {
        Pass {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #include "UnityCG.cginc"

            #pragma multi_compile_local _ ROTATE180
            #pragma multi_compile_local _ GRAYSCALE
            struct appdata {
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f {
                float2 uv : TEXCOORD0;
                float4 vertex : SV_POSITION;
            };

            sampler2D _MainTex;
            float _Width;
            float _Height;
            float _Fx;
            float _Fy;
            float _Cx;
            float _Cy;
            float4 _D1;
            float4 _D2;

            v2f vert (appdata v) {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                #ifdef ROTATE180
                o.uv = float2(v.uv.x, 1.0 - v.uv.y); // Rotate 180 degrees
                #else
                o.uv = v.uv;
                #endif
                return o;
            }

            float2 undistort(float2 uv) {
                // Convert UV to normalized pixel coordinates
                float x = (uv.x * _Width - _Cx) / _Fx;
                float y = (uv.y * _Height - _Cy) / _Fy;
                float r = sqrt(x*x + y*y);
                float theta = atan(r);
                float rd = (theta + _D1.x * pow(theta,3) + _D1.y * pow(theta,5) + _D1.z * pow(theta,7) + _D1.w * pow(theta,9)) / r;
                x *= rd;
                y *= rd;
                float r2 = x*x + y*y;
                x += 2.0 * _D2.x * x * y + _D2.y * (r2 + 2.0 * x*x);
                y += _D2.x * (r2 + 2.0 * y*y) + 2.0 * _D2.y * x * y;

                // Convert back to UV space
                x = x * _Fx + _Cx;
                y = y * _Fy + _Cy;
                return float2(x / _Width, y / _Height);
            }

        
            half4 frag (v2f i) : SV_Target {
                float2 uv = undistort(i.uv);
                half4 color = tex2D(_MainTex, uv);
                // // Check if grayscale is enabled
                 #ifdef GRAYSCALE
                // Convert to grayscale using luminosity method
                float grayscale = tex2D(_MainTex, uv).r; // Assuming the texture is grayscale (R8)
                // The GPU automatically replicates the grayscale value across RGB channels
                color = float4(grayscale, grayscale, grayscale, 1.0); 
                //return  pow(color, 2.2); // Approximation for inverse sRGB gamma correction
                #endif
                return color;
            }

            
            ENDCG
        }
    }
}

More Information:

Thanks for the answers and examples. Got this working as well in my demo.

This topic was automatically closed 15 days after the last reply. New replies are no longer allowed.