Camera Access while Streaming

Here is a quick sample of a script that gets the CV camera image:

using System;
using System.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;

public class MagicLeapCamera : MonoBehaviour
{
    [SerializeField] private int width = 1280;
    [SerializeField] private int height = 720;
    [SerializeField] private MLCamera.CaptureFrameRate frameRate = MLCamera.CaptureFrameRate._15FPS;

    private MLCamera _cvCamera;
    private MLCamera.StreamCapability[] _streamCapabilities;
    private MLCamera.StreamCapability _currentStreamCapability;

    private bool _cameraDeviceAvailable = false;
    private readonly MLPermissions.Callbacks _permissionCallbacks = new MLPermissions.Callbacks();

    private Texture2D _rawVideoTexture;

    private void Awake()
    {
        _permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
        _permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
        _permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDenied;
    }

    private void OnDestroy()
    {
        _permissionCallbacks.OnPermissionGranted -= OnPermissionGranted;
        _permissionCallbacks.OnPermissionDenied -= OnPermissionDenied;
        _permissionCallbacks.OnPermissionDeniedAndDontAskAgain -= OnPermissionDenied;

        if (_cvCamera != null)
            _cvCamera.OnRawVideoFrameAvailable -= RawVideoFrameAvailable;
    }

    // Start is called before the first frame update
    void Start()
    {
        MLPermissions.RequestPermission(MLPermission.Camera, _permissionCallbacks);
    }

    void HandlePermissionsDone(MLResult result)
    {
        if (!result.IsOk)
        {
            Debug.LogError(
                $"CameraIntrinsicTest failed to get requested permissions, disabling script. Reason: {result}");
            enabled = false;
            return;
        }

        Debug.Log("Succeeded in requesting all permissions");
        StartCoroutine(SetUpCamera());
    }

    private IEnumerator SetUpCamera()
    {
        while (!_cameraDeviceAvailable)
        {
            MLResult result = MLResult.Create(MLResult.Code.NotImplemented);
            Debug.Log("Get Camera Status!");
            try
            {
                result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.CV, out _cameraDeviceAvailable);
            }
            catch (Exception e)
            {
                Debug.Log(e);
            }

            if (!(result.IsOk && _cameraDeviceAvailable))
            {
                // Wait until camera device is available
                yield return new WaitForSeconds(1.0f);
            }
        }

        Debug.Log("Camera available!");
        yield return new WaitForSeconds(1.0f);
        ConnectCamera();
    }

    private void ConnectCamera()
    {
        MLCamera.ConnectContext connectContext = MLCamera.ConnectContext.Create();
        connectContext.CamId = MLCamera.Identifier.CV;
        connectContext.Flags = MLCamera.ConnectFlag.CamOnly;
        connectContext.EnableVideoStabilization = false;

        _cvCamera = MLCamera.CreateAndConnect(connectContext);

        if (_cvCamera != null)
        {
            Debug.Log("Camera device connected");

            _streamCapabilities = MLCamera.GetImageStreamCapabilitiesForCamera(_cvCamera, MLCamera.CaptureType.Video);
            if (_streamCapabilities == null || _streamCapabilities.Length == 0)
            {
                Debug.LogError("No stream caps received");
                return;
            }

            MLCamera.StreamCapability selectedCapability =
                MLCamera.GetBestFitStreamCapabilityFromCollection(_streamCapabilities, width, height,
                    MLCamera.CaptureType.Video);

            Debug.Log("Streaming in " + selectedCapability.Width + "x" + selectedCapability.Height);

            _currentStreamCapability = selectedCapability;

            Debug.Log("Camera device received stream caps");
            _cvCamera.OnRawVideoFrameAvailable += RawVideoFrameAvailable;

            MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig();
            captureConfig.CaptureFrameRate = frameRate;
            captureConfig.StreamConfigs = new MLCamera.CaptureStreamConfig[1];
            captureConfig.StreamConfigs[0] =
                MLCamera.CaptureStreamConfig.Create(selectedCapability, MLCamera.OutputFormat.RGBA_8888);
            MLResult result = _cvCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

            if (!result.IsOk)
            {
                Debug.Log(result);
                return;
            }

            Debug.Log("Camera device received stream caps");
            _cvCamera.PreCaptureAEAWB();
            result = _cvCamera.CaptureVideoStart();

            if (!result.IsOk)
            {
                Debug.LogError($"Image capture failed. Reason: {result}");
            }
        }
        else
        {
            Debug.Log("Unable to properly Connect MLCamera");
        }
    }

    void RawVideoFrameAvailable(MLCamera.CameraOutput output, MLCamera.ResultExtras extras)
    {
        UpdateRGBTexture(ref _rawVideoTexture, output.Planes[0]);
        LogIntrinsics(extras.Intrinsics);
        LogExtrinsicsMatrix(extras.VCamTimestamp);
    }

    private void UpdateRGBTexture(ref Texture2D rawVideoTexture, MLCamera.PlaneInfo imagePlane)
    {
        int imageWidth = (int) (imagePlane.Stride / imagePlane.BytesPerPixel);

        if (rawVideoTexture != null &&
            (rawVideoTexture.width != imageWidth || rawVideoTexture.height != imagePlane.Height))
        {
            Destroy(rawVideoTexture);
            rawVideoTexture = null;
        }

        if (rawVideoTexture == null)
        {
            rawVideoTexture = new Texture2D(imageWidth, (int) imagePlane.Height, TextureFormat.RGBA32, false);
            rawVideoTexture.filterMode = FilterMode.Bilinear;
        }

        rawVideoTexture.LoadRawTextureData(imagePlane.Data);
        rawVideoTexture.Apply();
    }


    void LogExtrinsicsMatrix(MLTime vcamTimestampUs)
    {
#if UNITY_ANDROID
        Matrix4x4 outMatrix;
        MLResult result = MLCVCamera.GetFramePose(vcamTimestampUs, out outMatrix);
        if (result.IsOk)
        {
            Debug.Log("Rotation: " + outMatrix.rotation + " Position: " + outMatrix.GetPosition());
        }
#endif
    }

    void LogIntrinsics(MLCamera.IntrinsicCalibrationParameters? cameraIntrinsicParameters)
    {
        if (cameraIntrinsicParameters == null)
        {
            return;
        }

        var cameraParameters = cameraIntrinsicParameters.Value;
        Debug.Log("IntrinsicData " +
                  "\n Width: " + _currentStreamCapability.Width +
                  "\n Height:" + _currentStreamCapability.Height +
                  "\n FocalLength.x:" + cameraParameters.FocalLength.x +
                  "\n FocalLength.y:" + cameraParameters.FocalLength.y +
                  "\n PrincipalPoint.x:" + cameraParameters.PrincipalPoint.x +
                  "\n PrincipalPoint.y:" + cameraParameters.PrincipalPoint.y);
    }

    private void OnPermissionDenied(string permission)
    {
        HandlePermissionsDone(MLResult.Create(MLResult.Code.PermissionDenied));
    }

    private void OnPermissionGranted(string permission)
    {
        HandlePermissionsDone(MLResult.Create(MLResult.Code.Ok));
    }
}