Vuforia did return something likewise, check the figure I attached from vuforia:
I don't know if this looks correct, but I have
FieldOfViewInDeg
as
Field of View in Deg: x=64.7041 y=39.22261
I'm now testing with MLCamera but the app seems to get crashed after a few seconds. Trying to debug right now... I have a log like this:
logcat.zip (144.3 KB)
My code:
using System;
using System.Collections;
using System.Runtime.CompilerServices;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using Vuforia;
public class SimpleCamera : MonoBehaviour
{
[SerializeField, Tooltip("Desired width for the camera capture")]
private int captureWidth = 1920;
[SerializeField, Tooltip("Desired height for the camera capture")]
private int captureHeight = 1080;
[SerializeField, Tooltip("The renderer to show the camera capture on RGB format")]
private Renderer _screenRendererRGB = null;
//The identifier can either target the Main or CV cameras.
private MLCamera.Identifier _identifier = MLCamera.Identifier.Main;
private MLCamera _camera;
//Is true if the camera is ready to be connected.
private bool _cameraDeviceAvailable;
private MLCamera.CaptureConfig _captureConfig;
private Texture2D _videoTextureRgb;
//The camera capture state
bool _isCapturing;
void OnEnable()
{
//This script assumes that camera permissions were already granted.
StartCoroutine(EnableMLCamera());
}
void OnDisable()
{
StopCapture();
}
//Waits for the camera to be ready and then connects to it.
private IEnumerator EnableMLCamera()
{
//Checks the main camera's availability.
while (!_cameraDeviceAvailable)
{
MLResult result = MLCamera.GetDeviceAvailabilityStatus(_identifier, out _cameraDeviceAvailable);
if (result.IsOk == false || _cameraDeviceAvailable == false)
{
// Wait until camera device is available
yield return new WaitForSeconds(1.0f);
}
}
ConnectCamera();
}
private void ConnectCamera()
{
//Once the camera is available, we can connect to it.
if (_cameraDeviceAvailable)
{
MLCamera.ConnectContext connectContext = MLCamera.ConnectContext.Create();
connectContext.CamId = _identifier;
//MLCamera.Identifier.Main is the only camera that can access the virtual and mixed reality flags
connectContext.Flags = MLCamera.ConnectFlag.CamOnly;
connectContext.EnableVideoStabilization = true;
_camera = MLCamera.CreateAndConnect(connectContext);
if (_camera != null)
{
Debug.Log("Camera device connected");
ConfigureCameraInput();
SetCameraCallbacks();
}
}
}
private void ConfigureCameraInput()
{
//Gets the stream capabilities the selected camera. (Supported capture types, formats and resolutions)
MLCamera.StreamCapability[] streamCapabilities = MLCamera.GetImageStreamCapabilitiesForCamera(_camera, MLCamera.CaptureType.Video);
if (streamCapabilities.Length == 0)
return;
//Set the default capability stream
MLCamera.StreamCapability defaultCapability = streamCapabilities[0];
//Try to get the stream that most closely matches the target width and height
if (MLCamera.TryGetBestFitStreamCapabilityFromCollection(streamCapabilities, captureWidth, captureHeight,
MLCamera.CaptureType.Video, out MLCamera.StreamCapability selectedCapability))
{
defaultCapability = selectedCapability;
}
//Initialize a new capture config.
_captureConfig = new MLCamera.CaptureConfig();
//Set RGBA video as the output
MLCamera.OutputFormat outputFormat = MLCamera.OutputFormat.YUV_420_888;
//Set the Frame Rate to 30fps
_captureConfig.CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS;
//Initialize a camera stream config.
//The Main Camera can support up to two stream configurations
_captureConfig.StreamConfigs = new MLCamera.CaptureStreamConfig[1];
_captureConfig.StreamConfigs[0] = MLCamera.CaptureStreamConfig.Create(
defaultCapability, outputFormat
);
StartVideoCapture();
}
private void StartVideoCapture()
{
MLResult result = _camera.PrepareCapture(_captureConfig, out MLCamera.Metadata metaData);
if (result.IsOk)
{
//Trigger auto exposure and auto white balance
_camera.PreCaptureAEAWB();
//Starts video capture. This call can also be called asynchronously
//Images capture uses the CaptureImage function instead.
result = _camera.CaptureVideoStart();
_isCapturing = MLResult.DidNativeCallSucceed(result.Result, nameof(_camera.CaptureVideoStart));
if (_isCapturing)
{
Debug.Log("Video capture started!");
}
else
{
Debug.LogError($"Could not start camera capture. Result : {result}");
}
}
}
private void StopCapture()
{
if (_isCapturing)
{
_camera.CaptureVideoStop();
}
_camera.Disconnect();
_camera.OnRawVideoFrameAvailable -= RawVideoFrameAvailable;
_isCapturing = false;
}
//Assumes that the capture configure was created with a Video CaptureType
private void SetCameraCallbacks()
{
//Provides frames in either YUV/RGBA format depending on the stream configuration
_camera.OnRawVideoFrameAvailable += RawVideoFrameAvailable;
}
void RawVideoFrameAvailable(MLCamera.CameraOutput output, MLCamera.ResultExtras extras, MLCameraBase.Metadata metadataHandle)
{
var imgTarget = AllVariables.Instance.mImageTarget;
if (imgTarget == null || imgTarget.TargetStatus.Status != Status.TRACKED)
{
return;
}
if (output.Format == MLCamera.OutputFormat.YUV_420_888)
{
var imagePlane = output.Planes[0];
int actualWidth = (int)(imagePlane.Width * imagePlane.PixelStride);
// obtain corners of the puzzle in world space
AllVariables.Instance.ComputeCornerCoordinates();
var corners = AllVariables.Instance.corners;
Debug.Log("C2: topLeft" + corners[0]);
Debug.Log("C2: topRight" + corners[1]);
Debug.Log("C2: bottomLeft" + corners[2]);
Debug.Log("C2: bottomRight" + corners[3]);
if (MLCVCamera.GetFramePose(extras.VCamTimestamp, out Matrix4x4 cameraTransform).IsOk)
{
Debug.Log("Actual width is " + actualWidth);
Debug.Log("imagePlane.Stride is " + imagePlane.PixelStride);
Debug.Log("imagePlane.Width is " + imagePlane.Width);
Debug.Log("imagePlane.Height is " + imagePlane.Height);
var topLeftScreen = WorldPointToPixel(corners[0], (int)actualWidth, (int)imagePlane.Height, extras.Intrinsics.Value, cameraTransform);
var topRightScreen = WorldPointToPixel(corners[1], (int)actualWidth, (int)imagePlane.Height, extras.Intrinsics.Value, cameraTransform);
var bottomLeftScreen = WorldPointToPixel(corners[2], (int)actualWidth, (int)imagePlane.Height, extras.Intrinsics.Value, cameraTransform);
var bottomRightScreen = WorldPointToPixel(corners[3], (int)actualWidth, (int)imagePlane.Height, extras.Intrinsics.Value, cameraTransform);
Debug.Log("C5: topLeft" + topLeftScreen);
Debug.Log("C5: topRight" + topRightScreen);
Debug.Log("C5: bottomLeft" + bottomLeftScreen);
Debug.Log("C5: bottomRight" + bottomRightScreen);
var minX = Mathf.Max(Mathf.FloorToInt(Mathf.Min(topLeftScreen.x, bottomLeftScreen.x)) - 10, 0);
var maxX = Mathf.Min(Mathf.CeilToInt(Mathf.Max(topRightScreen.x, bottomRightScreen.x)) + 10, actualWidth);
var minY = Mathf.Max(Mathf.FloorToInt(Mathf.Min(bottomLeftScreen.y, bottomRightScreen.y)) - 10, 0);
var maxY = Mathf.Min(Mathf.CeilToInt(Mathf.Max(topLeftScreen.y, topRightScreen.y)) + 10, imagePlane.Height);
Debug.Log("C3: minX: " + minX);
Debug.Log("C3: maxX: " + maxX);
Debug.Log("C3: minY: " + minY);
Debug.Log("C3: maxY: " + maxY);
AllVariables.Instance.puzzleHeight = (int)(maxY - minY);
AllVariables.Instance.puzzleWidth = maxX - minX;
if (imagePlane.Stride != actualWidth)
{
byte[] yChannel = new byte[(int)((maxX - minX) * (maxY - minY))];
for (int i = minY; i < maxY; i++)
{
Buffer.BlockCopy(imagePlane.Data, (int)(i * imagePlane.Stride + minX), yChannel,
i * (maxX - minX), maxX - minX);
}
Debug.Log("Hello we have a channel Y data in if with shape: " + yChannel.Length);
AllVariables.Instance.puzzleImage = yChannel;
Debug.Log("Ending one update");
}
else
{
byte[] yChannel = imagePlane.Data;
Debug.Log("Hello we have a channel Y data in else with shape: " + yChannel.Length);
}
}
}
}
public Vector2 WorldPointToPixel(Vector3 worldPoint, int width, int height, MLCameraBase.IntrinsicCalibrationParameters parameters, Matrix4x4 cameraTransformationMatrix)
{
// Step 1: Convert the world space point to camera space
Vector3 cameraSpacePoint = cameraTransformationMatrix.inverse.MultiplyPoint(worldPoint);
// Step 2: Project the camera space point onto the normalized image plane
Vector2 normalizedImagePoint = new Vector2(cameraSpacePoint.x / cameraSpacePoint.z, cameraSpacePoint.y / cameraSpacePoint.z);
// Step 3: Adjust for FOV
float verticalFOVRad = parameters.FOV * Mathf.Deg2Rad;
float aspectRatio = width / (float)height;
float horizontalFOVRad = 2 * Mathf.Atan(Mathf.Tan(verticalFOVRad / 2) * aspectRatio);
normalizedImagePoint.x /= Mathf.Tan(horizontalFOVRad / 2);
normalizedImagePoint.y /= Mathf.Tan(verticalFOVRad / 2);
// Step 4: Convert normalized image coordinates to pixel coordinates
Vector2 pixelPosition = new Vector2(
normalizedImagePoint.x * width + parameters.PrincipalPoint.x,
normalizedImagePoint.y * height + parameters.PrincipalPoint.y
);
return pixelPosition;
}
}
Update: It still crashes sometimes but I was able to get some results. Basically use the conversion function I have, I got:
08-14 20:12:53.206271 25938 25959 I Unity : C2: topLeft(-0.16, 0.03, 0.36)
08-14 20:12:53.206321 25938 25959 I Unity : C2: topRight(-0.05, 0.03, 0.37)
08-14 20:12:53.206343 25938 25959 I Unity : C2: bottomLeft(-0.16, -0.08, 0.37)
08-14 20:12:53.206367 25938 25959 I Unity : C2: bottomRight(-0.05, -0.08, 0.38)
08-14 20:12:53.206477 25938 25959 I Unity : Actual width is 1920
08-14 20:12:53.206500 25938 25959 I Unity : imagePlane.Stride is 1
08-14 20:12:53.206513 25938 25959 I Unity : imagePlane.Width is 1920
08-14 20:12:53.206529 25938 25959 I Unity : imagePlane.Height is 1080
08-14 20:12:53.206561 25938 25959 I Unity : C5: topLeft(802.76, 254.34)
08-14 20:12:53.206580 25938 25959 I Unity : C5: topRight(1123.74, 201.58)
08-14 20:12:53.206599 25938 25959 I Unity : C5: bottomLeft(750.44, -65.84)
08-14 20:12:53.206617 25938 25959 I Unity : C5: bottomRight(1071.02, -118.72)
08-14 20:12:53.206632 25938 25959 I Unity : C3: minX: 740
08-14 20:12:53.206646 25938 25959 I Unity : C3: maxX: 1134
08-14 20:12:53.206661 25938 25959 I Unity : C3: minY: 0
08-14 20:12:53.206681 25938 25959 I Unity : C3: maxY: 265
And the image I get is higher up w.r.t the actual square I have. I assume the y axis is also flipped for MLCamera output, but why am I getting negative values here?