Camera capture in Unity

I'm using my code below to poll the ML2 RGB camera in a project under Unity Ed. 2022.2.19f1, Win10, MLSDK v1.4.0-dev2 ML Unity v1.11.0.

I'm able to a) get capture permissions, b) enable and c) connect to the camera, but am failing to set up the still image stream to prepare capture here:

// Initialize CaptureConfig
MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig();
captureConfig.CaptureFrameRate = MLCamera.CaptureFrameRate._15FPS;

captureConfig.StreamConfigs = new MLCamera.CaptureStreamConfig[1];
captureConfig.StreamConfigs[0] = new MLCamera.CaptureStreamConfig
{
CaptureType = MLCamera.CaptureType.Image,
OutputFormat = MLCamera.OutputFormat.JPEG
};

// Prepare for capture
MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

Logcat shows:

Error ml_camera_client validateAndSetStreamConfig Stream Config Invalid Image
ml_camera_client prepareCapture device state = Idle numStreamConfigs = 0
ml_camera_client Stream 0 type = resolution = state = Invalid
ml_camera_client Stream 1 type = resiolution = state = Invalid

I'm not sure how to proceed. A couple questions:

  • In the case of still image capture, I believe we still proceed with 'stream' setup -- is that correct?
  • I have not found the supported methods to feed or detect image size, et cetera, in a 'stream' context. I'm using the Unity camera examples as a basis.

Thanks for any reference or corrections. My full code follows:

using UnityEngine;
using UnityEngine.XR.MagicLeap;
using System.Collections;
using UnityEngine.InputSystem;
using System;
using static UnityEngine.XR.MagicLeap.MLCameraBase;
using System.Collections.Generic;

public class SimpleCameraCapture : MonoBehaviour
{
[SerializeField] private int width = 1280;
[SerializeField] private int height = 720;
[SerializeField] private MLCamera.CaptureFrameRate frameRate = MLCamera.CaptureFrameRate._15FPS;

private MLCamera captureCamera;
private MLCamera.CaptureStreamConfig captureStreamConfig;
private bool cameraDeviceAvailable = false;

private void Start()
{
    MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();
    permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
    permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
    permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDeniedAndDontAskAgain;

    MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
}

private void OnPermissionGranted(string permission)
{
    StartCoroutine(EnableMLCamera());
}

private void OnPermissionDenied(string permission) { }

private void OnPermissionDeniedAndDontAskAgain(string permission) { }

private IEnumerator EnableMLCamera()
{
    while (!cameraDeviceAvailable)
    {
        MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
        if (!(result.IsOk && cameraDeviceAvailable))
        {
            yield return new WaitForSeconds(1.0f);
        }
    }

    ConnectCamera();
}

private void ConnectCamera()
{
    MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
    context.Flags = MLCamera.ConnectFlag.CamOnly;
    context.EnableVideoStabilization = false;

    captureCamera = MLCamera.CreateAndConnect(context);

    if (captureCamera != null)
    {
        captureStreamConfig = new MLCamera.CaptureStreamConfig
        {
            CaptureType = MLCamera.CaptureType.Image,
            OutputFormat = MLCamera.OutputFormat.JPEG
        };

        captureCamera.OnRawImageAvailable += OnCaptureRawImageComplete;
    }
}

private void OnCaptureRawImageComplete(MLCamera.CameraOutput output, MLCamera.ResultExtras resultExtras, MLCamera.Metadata metadataHandle) { }

void Update()
{
    if (Keyboard.current.spaceKey.wasPressedThisFrame && captureCamera != null)
    {
        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig();
        captureConfig.CaptureFrameRate = MLCamera.CaptureFrameRate._15FPS;

        captureConfig.StreamConfigs = new MLCamera.CaptureStreamConfig[1];
        captureConfig.StreamConfigs[0] = new MLCamera.CaptureStreamConfig
        {
            CaptureType = MLCamera.CaptureType.Image,
            OutputFormat = MLCamera.OutputFormat.JPEG
        };

        MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

        if (!result.IsOk)
        {
            return;
        }

        captureCamera.PreCaptureAEAWB();

        result = captureCamera.CaptureImage(1);
        if (!result.IsOk)
        {
            return;
        }
    }
}

void OnDestroy()
{
    if (captureCamera != null)
    {
        captureCamera.OnRawImageAvailable -= OnCaptureRawImageComplete;
        captureCamera.Disconnect();
    }
}

}

1 Like

The width and height can be specified in the CaptureStreamConfig

     new MLCamera.CaptureStreamConfig()
                            {
                                OutputFormat = MLCamera.OutputFormat.JPEG,
                                CaptureType = MLCamera.CaptureType.Image,
                                Width = 1920,
                                Height = 1080
                            }

However, I recommend taking a look at the Camera Capture Example script (~line 439) inside the Magic Leap Example Project. This script shows how to capture deferent type of streams using the MLCamera API.

I also created a feedback ticket to create an Image Capture guide on our developer portal. Currently, we only provide a sample that demonstrates how to capture video : Simple Camera Example | MagicLeap Developer Documentation

Thanks @kbabilinski, I appreciate your notes: I've been using the Camera Capture Example, though its methods are entangled with the UI handling and it doesn't help disambiguate video from image capture since it supports both without commenting on the ideas behind the expected stream setup.

From the logcat output below it seems the system is detecting more than one stream, and since I am only supplying attributes for the one image stream I want, the other stream is causing these errors:

Error Camera-Device endConfigure fail Status(-8, EX_SERVICE_SPECIFIC): '3: endConfigure:739: Camera 0: Unsupported set of inputs/outputs provided'
Info ml_camera_client  Capture device state = Idle numStreamConfigs = 1 
Info ml_camera_client  Stream 0 type = Image resolution = 1080P state = Configured 
Info ml_camera_client Stream 1 type = Image resiolution =  state = Invalid

I am here attempting to set a single image stream as follows:

// Initialize CaptureConfig
        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig
        {
            CaptureFrameRate = MLCamera.CaptureFrameRate._15FPS, // You can adjust this according to your needs
            StreamConfigs = new MLCamera.CaptureStreamConfig[]
            {
            new MLCamera.CaptureStreamConfig()
            {
                OutputFormat = MLCamera.OutputFormat.JPEG,
                CaptureType = MLCamera.CaptureType.Image,
                Width = 1920,
                Height = 1080
            }
            }
        };

        // Prepare for capture
        MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

My full revised capture code follows:

using UnityEngine;
using UnityEngine.XR.MagicLeap;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.InputSystem;
using System;
using static UnityEngine.XR.MagicLeap.MLCameraBase;
using System.Collections.Generic;

public class SimpleCameraCapture : MonoBehaviour
{
[SerializeField] private int width = 1280;
[SerializeField] private int height = 720;
[SerializeField] private MLCamera.CaptureFrameRate frameRate = MLCamera.CaptureFrameRate._15FPS;

private MLCamera captureCamera;
private MLCamera.CaptureStreamConfig captureStreamConfig; // Declare as member variable
private bool cameraDeviceAvailable = false;
private List<MLCamera.StreamCapability> streamCapabilities;

private void Start()
{
    MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();
    permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
    permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
    permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDeniedAndDontAskAgain;

    MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
}

private void OnPermissionGranted(string permission)
{
    StartCoroutine(EnableMLCamera());
}

private void OnPermissionDenied(string permission)
{
    Debug.Log("Permission denied: " + permission);
}

private void OnPermissionDeniedAndDontAskAgain(string permission)
{
    Debug.Log("Permission denied and don't ask again: " + permission);
}

private IEnumerator EnableMLCamera()
{
    while (!cameraDeviceAvailable)
    {
        MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
        if (!(result.IsOk && cameraDeviceAvailable))
        {
            yield return new WaitForSeconds(1.0f);
        }
    }

    Debug.Log("Camera device available");
    ConnectCamera();
}

private void ConnectCamera()
{
    MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
    context.Flags = MLCamera.ConnectFlag.CamOnly;
    context.EnableVideoStabilization = false;

    captureCamera = MLCamera.CreateAndConnect(context);

    if (captureCamera != null)
    {
        Debug.Log("ML2 RGB Camera connected!");

        // Initialize member variable captureStreamConfig here
        captureStreamConfig = new MLCamera.CaptureStreamConfig
        {
            CaptureType = MLCamera.CaptureType.Image,
            OutputFormat = MLCamera.OutputFormat.JPEG
        };

        // Register callback
        captureCamera.OnRawImageAvailable += OnCaptureRawImageComplete;

        // Initialize stream capabilities
        if (!GetImageStreamCapabilities())
        {
            Debug.LogError("Failed to get image stream capabilities.");
            // Handle this error appropriately
        }
    }
}

private void OnCaptureRawImageComplete(MLCamera.CameraOutput output, MLCamera.ResultExtras resultExtras, MLCamera.Metadata metadataHandle)
{
    Debug.Log("RGB image callback!");
}

/// Gets the Image stream capabilities
/// True if MLCamera returned at least one stream capability
private bool GetImageStreamCapabilities()
{
    var result =
        captureCamera.GetStreamCapabilities(out MLCamera.StreamCapabilitiesInfo[] streamCapabilitiesInfo);

    if (!result.IsOk)
    {
        Debug.Log("Could not get Stream capabilities Info.");
        return false;
    }

    streamCapabilities = new List<MLCamera.StreamCapability>();

    for (int i = 0; i < streamCapabilitiesInfo.Length; i++)
    {
        foreach (var streamCap in streamCapabilitiesInfo[i].StreamCapabilities)
        {
            streamCapabilities.Add(streamCap);
        }
    }

    return streamCapabilities.Count > 0;
}

/// Gets currently selected StreamCapability
private MLCamera.StreamCapability GetStreamCapability()
{
    foreach (var streamCapability in streamCapabilities)
    {
        return streamCapability;
    }

    return streamCapabilities[0];
}

void Update()
{
    if (Keyboard.current.spaceKey.wasPressedThisFrame && captureCamera != null)
    {
        // Check for initialized stream capabilities
        if (streamCapabilities == null || !streamCapabilities.Any())
        {
            // Stream capabilities not initialized
            return;
        }

        // Initialize CaptureConfig
        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig
        {
            CaptureFrameRate = MLCamera.CaptureFrameRate._15FPS, // You can adjust this according to your needs
            StreamConfigs = new MLCamera.CaptureStreamConfig[]
            {
            new MLCamera.CaptureStreamConfig()
            {
                OutputFormat = MLCamera.OutputFormat.JPEG,
                CaptureType = MLCamera.CaptureType.Image,
                Width = 1920,
                Height = 1080
            }
            }
        };

        // Prepare for capture
        MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

        if (!result.IsOk)
        {
            // Failed to prepare the camera
            return;
        }

        // Trigger auto-exposure and auto-white-balance convergence
        captureCamera.PreCaptureAEAWB();

        // Capture the image
        result = captureCamera.CaptureImage(1);
        if (!result.IsOk)
        {
            // Image capture failed
            return;
        }
    }
}

void OnDestroy()
{
    if (captureCamera != null)
    {
        captureCamera.OnRawImageAvailable -= OnCaptureRawImageComplete;
        captureCamera.Disconnect();
    }
}

}

1 Like

I've added more checking and more explicit stream configuration, below. Logcat is showing two streams as before:

Info ml_camera_client  Capture device state = Idle numStreamConfigs = 1 
Info ml_camera_client  Stream 0 type = Image resolution = 1080P state = Configured 
Info ml_camera_client Stream 1 type = Image resiolution =  state = Invalid

It appears from the above the stream 0 is configured for still images; I am trying to specify the stream via [0] or [1] here:
MLCamera.StreamCapability selected = streamCapabilities[0];
and
selectedStreamCapability = streamCapabilityOptions[0];

In either case, captureCamera.CaptureImage() fails.

Full code follows:

using UnityEngine;
using UnityEngine.XR.MagicLeap;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.InputSystem;
using System;
using static UnityEngine.XR.MagicLeap.MLCameraBase;
using System.Collections.Generic;
using UnityEngine.Playables;

public class SimpleCameraCapture : MonoBehaviour
{
[SerializeField] private int width = 640;
[SerializeField] private int height = 480;
[SerializeField] private MLCamera.CaptureFrameRate frameRate = MLCamera.CaptureFrameRate._15FPS;
[SerializeField] private List streamCapabilityOptions; // To hold the dropdown options
[SerializeField] private string selectedStreamCapability; // To hold the selected dropdown option

private MLCamera captureCamera;
private MLCamera.CaptureStreamConfig captureStreamConfig; // Declare as member variable
private bool cameraDeviceAvailable = false;
private List<MLCamera.StreamCapability> streamCapabilities;

private void Start()
{
    MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();
    permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
    permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
    permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDeniedAndDontAskAgain;

    MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
}

private void OnPermissionGranted(string permission)
{
    StartCoroutine(EnableMLCamera());
}

private void OnPermissionDenied(string permission)
{
    Debug.Log("Permission denied: " + permission);
}

private void OnPermissionDeniedAndDontAskAgain(string permission)
{
    Debug.Log("Permission denied and don't ask again: " + permission);
}

private IEnumerator EnableMLCamera()
{
    while (!cameraDeviceAvailable)
    {
        MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
        if (!(result.IsOk && cameraDeviceAvailable))
        {
            yield return new WaitForSeconds(1.0f);
        }
    }

    Debug.Log("Camera device available");
    ConnectCamera();
}

/// Update the streamCapabilityOptions based on the available streamCapabilities
private void UpdateStreamCapabilityOptions()
{
    streamCapabilityOptions = new List<string>();
    foreach (var cap in streamCapabilities)
    {
        streamCapabilityOptions.Add($"{cap.Width}x{cap.Height}");
    }
}

private void ConnectCamera()
{
    MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
    context.Flags = MLCamera.ConnectFlag.CamOnly;
    context.EnableVideoStabilization = false;

    captureCamera = MLCamera.CreateAndConnect(context);

    if (captureCamera != null)
    {
        Debug.Log("ML2 RGB Camera connected!");

        // Initialize stream capabilities and dropdown options
        if (!GetImageStreamCapabilities())
        {
            Debug.LogError("Failed to get image stream capabilities.");
            // Handle this error appropriately
        }
        else
        {
            if (streamCapabilityOptions.Count > 0)
            {
                // Update the selectedStreamCapability based on the available options
                // 0 or 1
                selectedStreamCapability = streamCapabilityOptions[0];
            }
        }
    }
}

private void OnCaptureRawImageComplete(MLCamera.CameraOutput output, MLCamera.ResultExtras resultExtras, MLCamera.Metadata metadataHandle)
{
    Debug.Log("RGB image callback!");
}

/// Gets the Image stream capabilities
/// True if MLCamera returned at least one stream capability
private bool GetImageStreamCapabilities()
{
    var result =
        captureCamera.GetStreamCapabilities(out MLCamera.StreamCapabilitiesInfo[] streamCapabilitiesInfo);

    if (!result.IsOk)
    {
        Debug.Log("Could not get Stream capabilities Info.");
        return false;
    }

    streamCapabilities = new List<MLCamera.StreamCapability>();
    streamCapabilityOptions = new List<string>(); // Initialize the list

    for (int i = 0; i < streamCapabilitiesInfo.Length; i++)
    {
        foreach (var streamCap in streamCapabilitiesInfo[i].StreamCapabilities)
        {
            streamCapabilities.Add(streamCap);
            streamCapabilityOptions.Add($"{streamCap.Width}x{streamCap.Height}"); // Populate the options
        }
    }

    // Set a default value for the selected capability if possible
    if (streamCapabilityOptions.Count > 0)
    {
        Debug.Log("Selected stream capability set to [1] -------------------------------");
        selectedStreamCapability = streamCapabilityOptions[1];
    }

    return streamCapabilities.Count > 0;
}

private MLCamera.StreamCapability GetStreamCapability()
{
    // Check if streamCapabilities has at least two elements
    // 1 or 2
    if (streamCapabilities.Count < 2)
    {
        Debug.LogError("Insufficient stream capabilities.");
    }

    //  // Default to Stream 0 or 1
    MLCamera.StreamCapability selected = streamCapabilities[1];
    foreach (var streamCapability in streamCapabilities)
    {
        if ($"{streamCapability.Width}x{streamCapability.Height}" == selectedStreamCapability)
        {
            selected = streamCapability;
            break;
        }
    }

    Debug.Log($"Returning stream capability for still images: Width = {selected.Width}, Height = {selected.Height}");

    return selected;
}

void Update()
{
    if (Keyboard.current.spaceKey.wasPressedThisFrame && captureCamera != null)
    {
        // Check for initialized stream capabilities
        if (streamCapabilities == null || !streamCapabilities.Any())
        {
            // Stream capabilities not initialized
            Debug.Log("Stream capabilities not initialized ---------------------------------");
            return;
        }

        // Initialize CaptureConfig
        Debug.Log("Stream capabilities initialized ---------------------------------");

        // Get the stream capability for Stream 1 (still images)
        MLCamera.StreamCapability selectedCapability = GetStreamCapability();

        int captureWidth = (selectedCapability.Width > 0) ? selectedCapability.Width : width;
        int captureHeight = (selectedCapability.Height > 0) ? selectedCapability.Height : height;

        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig
        {
            CaptureFrameRate = frameRate,
            StreamConfigs = new MLCamera.CaptureStreamConfig[]
            {
        new MLCamera.CaptureStreamConfig()
        {
            OutputFormat = MLCamera.OutputFormat.JPEG,
            CaptureType = MLCamera.CaptureType.Image,
            Width = captureWidth,  // Use either the stream's width or a default
            Height = captureHeight  // Use either the stream's height or a default
        }
            }
        };

        // Prepare for capture
        Debug.Log("Prepare Capture ---------------------------------");
        MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

        if (MLResult.DidNativeCallSucceed(result.Result, nameof(captureCamera.PrepareCapture)))
        {
            // Trigger auto-exposure and auto-white-balance convergence
            Debug.Log("Trigger auto-exposure and auto-white-balance convergence ---------------------------------");
            captureCamera.PreCaptureAEAWB();
        }
        else
        {
            Debug.Log("pre capture failed ---------------------------------");
            // Failed to prepare the camera
            return;
        }

        // Capture the image
        Debug.Log("Capture RGB ---------------------------------");
        result = captureCamera.CaptureImage(1);
        if (!result.IsOk)
        {
            // Image capture failed
            return;
        }
    }
}

void OnDestroy()
{
    if (captureCamera != null)
    {
        captureCamera.OnRawImageAvailable -= OnCaptureRawImageComplete;
        captureCamera.Disconnect();
    }
}

}

1 Like

@kevincain It looks like you found a bug or a gap in our documentation. I reported this internally so we can get it fixed ASAP.

  1. It appears that if frame rate is set to 15fps the camera capture will not work. Worse, I noticed that subsequent runs of the application would not run (even when the fps was set to 30. Until we resolve this please set the capture frame rate to 30FPS. CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS
  2. I also noticed that the JPEG images does not update completely, leaving a ghost image. This might be related to my test environment, but if you experience the same issue please let me know. As a workaround I recommend taking 3-4 images.

Here is a script that I used on my device and was able to capture JPEG images from the main camera

using System.Collections;
using MagicLeap.Examples;
using UnityEngine;
using UnityEngine.XR.MagicLeap;

/// <summary>
/// This script provides an example of capturing images using the Magic Leap 2's Main Camera stream and Magic Leap 2  Camera APIs
/// It handles permissions, connects to the camera, captures images at regular intervals, and sends the result data to the Camera Capture visualizer.
/// </summary>
public class ImageCaptureExample : MonoBehaviour
{
    [SerializeField, Tooltip("Refrence to the Visualizer gameobject that will display the image stream.")]
    private CameraCaptureVisualizer imageVisualizer = null;

    // Indicates if the camera is connected
    private bool isCameraConnected;
    // Reference to the MLCamera object that will access the device's camera
    private MLCamera colorCamera;
    // Indicates if the camera device is available
    private bool cameraDeviceAvailable;

    // Reference to the MLPermissions.Callbacks object that will handle the permission requests and responses
    private readonly MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();

    // Register the permission callbacks in the Awake method
    private void Awake()
    {
        permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
        permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
        permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDenied;
    }

    // Request the camera permission in the Start method
    void Start()
    {
        MLResult result = MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
        if (!result.IsOk)
        {
            Debug.LogErrorFormat( "Error: ImageCaptureExample failed to get requested permissions, disabling script. Reason: {0}", result);
            enabled = false;
        }

        // Hide the image visualizer until an image is captured
        imageVisualizer.HideRenderer();
    }

    /// <summary>
    /// Stop the camera, unregister callbacks, and stop input and permissions APIs.
    /// </summary>
    void OnDisable()
    {
        permissionCallbacks.OnPermissionGranted -= OnPermissionGranted;
        permissionCallbacks.OnPermissionDenied -= OnPermissionDenied;
        permissionCallbacks.OnPermissionDeniedAndDontAskAgain -= OnPermissionDenied;

        if (colorCamera != null && isCameraConnected)
        {
            DisableMLCamera();
        }
    }

    // Handle the permission denied event by logging an error message
    private void OnPermissionDenied(string permission)
    {
        MLPluginLog.Error($"{permission} denied, test won't function.");
    }

    // Handle the permission granted event by starting two coroutines:
    // one to enable the camera and one to capture images in a loop
    private void OnPermissionGranted(string permission)
    {
        StartCoroutine(EnableMLCamera());
        StartCoroutine(CaptureImagesLoop());
    }

    // Define a coroutine that will enable the camera by checking its availability,
    // creating and connecting it, and preparing it for capture
    private IEnumerator EnableMLCamera()
    { 
        // Loop until the camera device is available
        while (!cameraDeviceAvailable)
        {
            MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
            if (!(result.IsOk && cameraDeviceAvailable))
            {
                // Wait until camera device is available
                yield return new WaitForSeconds(1.0f);
            }
        }

        Debug.Log("Camera device available.");

        // Create and connect the camera with a context that enables video stabilization and camera only capture
        ConnectCamera();

        // Wait until the camera is connected since this script uses the async "CreateAndConnectAsync" Method to connect to the camera.
        while (!isCameraConnected)
        {
                yield return null;
        }
        
        Debug.Log("Camera device connected.");

        // Prepare the camera for capture with a configuration that specifies JPEG output format, frame rate, and resolution
        ConfigureAndPrepareCapture();
    }

    // Define a coroutine that will capture images every 3 seconds if the camera is connected and supports image capture type. 
    // The image is then captured async
    private IEnumerator CaptureImagesLoop()
    {
        while (true)
        {
            if (isCameraConnected)
            {
                if (MLCamera.IsCaptureTypeSupported(colorCamera, MLCamera.CaptureType.Image))
                {
                    CaptureImage();
                }
            }
            yield return new WaitForSeconds(3.0f);
        }
    }

    // Define an async method that will create and connect the camera with a context that enables video stabilization and Video only capture
    private async void ConnectCamera()
    {
        MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
        context.EnableVideoStabilization = false;
        context.Flags = MLCameraBase.ConnectFlag.CamOnly;

        // Use the CreateAndConnectAsync method to create and connect the camera asynchronously
        colorCamera = await MLCamera.CreateAndConnectAsync(context);

        if (colorCamera != null)
        {
            // Register a callback for when a raw image is available after capture
            colorCamera.OnRawImageAvailable += OnCaptureRawImageComplete;
            isCameraConnected = true;
        }
    }

    // Define an async method that will prepare the camera for capture with a configuration that specifies
    // JPEG output format, frame rate, and resolution
    private async void ConfigureAndPrepareCapture()
    {
        MLCamera.CaptureStreamConfig[] imageConfig = new MLCamera.CaptureStreamConfig[1]
        {
            new MLCamera.CaptureStreamConfig()
            {
                OutputFormat = MLCamera.OutputFormat.JPEG,
                CaptureType = MLCamera.CaptureType.Image,
                Width = 1920,
                Height = 1080
            }
        };

        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig()
        {
            StreamConfigs = imageConfig,
            CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS
        };

        // Use the PrepareCapture method to set the capture configuration and get the metadata handle
        MLResult prepareCaptureResult = colorCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);
        
        if (!prepareCaptureResult.IsOk)
        {
            return;
        }

        // Use the PreCaptureAEAWBAsync method to perform auto exposure and auto white balance asynchronously before capture
        await colorCamera.PreCaptureAEAWBAsync();

    }

    /// <summary>
    /// Disconnects the MLCamera if it was ever created or connected.
    /// </summary>
    private void DisableMLCamera()
    {
        if (colorCamera != null)
        {
            colorCamera.Disconnect();

            // Explicitly set to false here as the disconnect was attempted.
            isCameraConnected = false;
        }
    }


    /// <summary>
    /// Takes a picture async with the device's camera using the camera's CaptureImageAsync method.
    /// </summary>
    private async void CaptureImage()
    {
        var result = await colorCamera.CaptureImageAsync();

        if (!result.IsOk)
        {
            Debug.LogError("Image capture failed!");
        }
        else
        {
            // show image on top
            imageVisualizer.DisplayCapture(MLCamera.OutputFormat.JPEG, false);
        }
    }

    /// <summary>
    /// Handles the event of a new image getting captured and visualizes it with the Image Visualizer
    /// </summary>
    /// <param name="capturedImage">Captured frame.</param>
    /// <param name="resultExtras">Results Extras.</param>
    private void OnCaptureRawImageComplete(MLCamera.CameraOutput capturedImage, MLCamera.ResultExtras resultExtras, MLCamera.Metadata metadataHandle)
    {
        Debug.Log("DisplayImage");
        imageVisualizer.OnCaptureDataReceived(resultExtras, capturedImage);
    }

}
1 Like

Thanks @kbabilinski, it's great to hear that you can duplicate some of our troubles. Like you, I have seen that the RGB camera becomes unresponsive after an error of the kind I describe in this thread. I've been rebooting the ML2 each time to clear it, and testing with the Unity Examples to ensure the camera is unblocked before testing.

I'm happy to say that I can duplicate your results in the code you provided. I haven't seen the JPEG ghosting you mentioned, but have just run a few tests so far.

I'll leave this thread open so you or another Leaper can post once the bug checking has some results.

To conclude, did you get one of our variants (e.g., methods with no Vizualizer) to produce an image? If so, could you post that code here? I tried both of the full listings I provided above with your suggested 30FPS setting: MLCamera.CaptureFrameRate._30FPS.

At runtime, after the captureCamera.PreCaptureAEAWB() step I see the error:

Error Camera-Device endConfigure fail Status(-8, EX_SERVICE_SPECIFIC): '3: endConfigure:739: Camera 0: Unsupported set of inputs/outputs provided'

After this the camera stumbles into its unresponsive state, first showing the following and then other errors:

Warn ml_camera_client Image capture timed out this should never happen

Thanks again for your help!

1 Like

Unfortunately, I will not be able to debug your code directly, but I think the issue is how you are obtaining the stream capabilities and setting the stream config. I recommend looking at the MLCamera sample on the developer portal and changing the configuration from Video to Image. Then replacing the StartVideoCapture to the CaptureImage function.

Thanks @kbabilinski, I appreciate that and wasn't asking you to debug what I wrote as I know that is separate from the general issue. But, again, thanks and I'm all ears when there is action on fixing the bug.

I haven't been able to duplicate the artifacts you saw with JPEG capture but will respond here if I do so that can also be addressed more fully.

2 Likes

Thanks again @kbabilinski,

I can confirm the ghosting you reported from your testing, as shown in the following images. Note that if you display the color images sequentially, you'll see that the 'ghost' artifact is in fact a composite of the last RGB cam image taken with the current, each having roughly 50% opacity. That suggests that the frame buffer isn't being cleared between captures, and you may want to pass that along to the developers having a look at this.

Also, I see the JPEG images come in inverted as shown below -- did you see that in your tests?

Also included is a synchronous world camera capture (taken at the same time as the first RGB cam image), for reference.




Thank you for confirming. You can flip the image using the MLCamera.FlipFrameVertically(); to flip the image before displaying it.

I have also reported the bug to our SDK team.

Thanks @kbabilinski,

Since JPEG output has the confirmed ghosting bug(s) noted above using your provided code above, I've changed: OutputFormat = MLCamera.OutputFormat.JPEG to:
OutputFormat = MLCamera.OutputFormat.YUV_420_888
and also tried: OutputFormat = MLCamera.OutputFormat.RGBA_8888

Neither YUV or RGBA capture works, yielding the following:

Error Camera-Device endConfigure fail Status(-8, EX_SERVICE_SPECIFIC): '3: endConfigure:739: Camera 0: Unsupported set of inputs/outputs provided'

Do we need to make other alterations to capture YUV/RGB, and are both supported?

  • I've accommodated the format changes in file output, but I see the failing is on polling the camera not my write methods.
  • I've tried other XY resolutions in the capture setup, with the same result
  • The RGB cam is answering the callback but the image is null

Your edited code follows for reference:

private async void ConfigureAndPrepareCapture()
{
    MLCamera.CaptureStreamConfig[] imageConfig = new MLCamera.CaptureStreamConfig[1]
    {
        new MLCamera.CaptureStreamConfig()
        {
            //OutputFormat = MLCamera.OutputFormat.JPEG,
            OutputFormat = MLCamera.OutputFormat.YUV_420_888,
            CaptureType = MLCamera.CaptureType.Image,
            Width = 1920,
            Height = 1080
        }
    };

    MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig()
    {
        StreamConfigs = imageConfig,
        CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS
    };

    // Use the PrepareCapture method to set the capture configuration and get the metadata handle
    MLResult prepareCaptureResult = colorCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

    if (!prepareCaptureResult.IsOk)
    {
        return;
    }

    // Use the PreCaptureAEAWBAsync method to perform auto exposure and auto white balance asynchronously before capture
    await colorCamera.PreCaptureAEAWBAsync();

}

I still see the errors posted above for RGB output, but have been able to get data back from the RGB Cam in YUV mode.

However, importantly my tests show the same ghosting artifact discussed above, where the prior image is composited with the current frame output. To illustrate, here is the Y (lum.) plane from the YUV output: note the ghosting:

I also note that running the same YUV capture code works mostly, but sometimes results in system crashes:

Sorry, I missed the last question. To answer your question you cannot capture two streams of the same type at the same time. However, I wanted to mention that we do not do any additional processing between image and video frames. As a workaround I recommend using the Image frame captured from a video stream.

With the exception of the issue you experienced when running the camera at 15 fps, the compatible configurations are listed here :

I wanted for follow up on this. The bug is part of the Image Capture Example script. To fix the ghosting issue, call PreCaptureAEAWB() before CaptureImage() . I have linked the updated script below incase anyone references this thread in the future

Async Camera Capture Script


using System.Collections;
using MagicLeap.Examples;
using UnityEngine;
using UnityEngine.XR.MagicLeap;

/// <summary>
/// This script provides an example of capturing images using the Magic Leap 2's Main Camera stream and Magic Leap 2  Camera APIs
/// It handles permissions, connects to the camera, captures images at regular intervals, and sends the result data to the Camera Capture visualizer.
/// </summary>
public class ImageCaptureExample : MonoBehaviour
{
    [SerializeField, Tooltip("Refrence to the Visualizer gameobject that will display the image stream.")]
    private CameraCaptureVisualizer imageVisualizer = null;

    // Indicates if the camera is connected
    private bool isCameraConnected;
    // Reference to the MLCamera object that will access the device's camera
    private MLCamera colorCamera;
    // Indicates if the camera device is available
    private bool cameraDeviceAvailable;

    // Reference to the MLPermissions.Callbacks object that will handle the permission requests and responses
    private readonly MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();

    // Register the permission callbacks in the Awake method
    private void Awake()
    {
        permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
        permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
        permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDenied;
    }

    // Request the camera permission in the Start method
    void Start()
    {
        MLResult result = MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
        if (!result.IsOk)
        {
            Debug.LogErrorFormat( "Error: ImageCaptureExample failed to get requested permissions, disabling script. Reason: {0}", result);
            enabled = false;
        }

        // Hide the image visualizer until an image is captured
        imageVisualizer.HideRenderer();
    }

    /// <summary>
    /// Stop the camera, unregister callbacks, and stop input and permissions APIs.
    /// </summary>
    void OnDisable()
    {
        permissionCallbacks.OnPermissionGranted -= OnPermissionGranted;
        permissionCallbacks.OnPermissionDenied -= OnPermissionDenied;
        permissionCallbacks.OnPermissionDeniedAndDontAskAgain -= OnPermissionDenied;

        if (colorCamera != null && isCameraConnected)
        {
            DisableMLCamera();
        }
    }

    // Handle the permission denied event by logging an error message
    private void OnPermissionDenied(string permission)
    {
        MLPluginLog.Error($"{permission} denied, test won't function.");
    }

    // Handle the permission granted event by starting two coroutines:
    // one to enable the camera and one to capture images in a loop
    private void OnPermissionGranted(string permission)
    {
        StartCoroutine(EnableMLCamera());
        StartCoroutine(CaptureImagesLoop());
    }

    // Define a coroutine that will enable the camera by checking its availability,
    // creating and connecting it, and preparing it for capture
    private IEnumerator EnableMLCamera()
    { 
        // Loop until the camera device is available
        while (!cameraDeviceAvailable)
        {
            MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
            if (!(result.IsOk && cameraDeviceAvailable))
            {
                // Wait until camera device is available
                yield return new WaitForSeconds(1.0f);
            }
        }

        Debug.Log("Camera device available.");

        // Create and connect the camera with a context that enables video stabilization and camera only capture
        ConnectCamera();

        // Wait until the camera is connected since this script uses the async "CreateAndConnectAsync" Method to connect to the camera.
        while (!isCameraConnected)
        {
                yield return null;
        }
        
        Debug.Log("Camera device connected.");

        // Prepare the camera for capture with a configuration that specifies JPEG output format, frame rate, and resolution
        ConfigureAndPrepareCapture();
    }

    // Define a coroutine that will capture images every 3 seconds if the camera is connected and supports image capture type. 
    // The image is then captured async
    private IEnumerator CaptureImagesLoop()
    {
        while (true)
        {
            if (isCameraConnected)
            {
                if (MLCamera.IsCaptureTypeSupported(colorCamera, MLCamera.CaptureType.Image))
                {
                    CaptureImage();
                }
            }
            yield return new WaitForSeconds(3.0f);
        }
    }

    // Define an async method that will create and connect the camera with a context that enables video stabilization and Video only capture
    private async void ConnectCamera()
    {
        MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
        context.EnableVideoStabilization = false;
        context.Flags = MLCameraBase.ConnectFlag.CamOnly;

        // Use the CreateAndConnectAsync method to create and connect the camera asynchronously
        colorCamera = await MLCamera.CreateAndConnectAsync(context);

        if (colorCamera != null)
        {
            // Register a callback for when a raw image is available after capture
            colorCamera.OnRawImageAvailable += OnCaptureRawImageComplete;
            isCameraConnected = true;
        }
    }

    // Define an async method that will prepare the camera for capture with a configuration that specifies
    // JPEG output format, frame rate, and resolution
    private async void ConfigureAndPrepareCapture()
    {
        MLCamera.CaptureStreamConfig[] imageConfig = new MLCamera.CaptureStreamConfig[1]
        {
            new MLCamera.CaptureStreamConfig()
            {
                OutputFormat = MLCamera.OutputFormat.JPEG,
                CaptureType = MLCamera.CaptureType.Image,
                Width = 1920,
                Height = 1080
            }
        };

        MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig()
        {
            StreamConfigs = imageConfig,
            CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS
        };

        // Use the PrepareCapture method to set the capture configuration and get the metadata handle
        MLResult prepareCaptureResult = colorCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);
        
        if (!prepareCaptureResult.IsOk)
        {
            return;
        }

    }

    /// <summary>
    /// Disconnects the MLCamera if it was ever created or connected.
    /// </summary>
    private void DisableMLCamera()
    {
        if (colorCamera != null)
        {
            colorCamera.Disconnect();

            // Explicitly set to false here as the disconnect was attempted.
            isCameraConnected = false;
        }
    }

    /// <summary>
    /// Takes a picture async with the device's camera using the camera's CaptureImageAsync method.
    /// </summary>
    private async void CaptureImage()
    {
        // Use the PreCaptureAEAWBAsync method to perform auto exposure and auto white balance asynchronously before capture
        var aeawbResult = await colorCamera.PreCaptureAEAWBAsync();
         if (!aeawbResult.IsOk)
        {
            Debug.LogError("Image capture failed!");
        }
        else
        {
            var result = await colorCamera.CaptureImageAsync();

            if (!result.IsOk)
            {
                Debug.LogError("Image capture failed!");
            }
            else
            {
                // show image on top
                imageVisualizer.DisplayCapture(MLCamera.OutputFormat.JPEG, false);
            }
        }
    }

    /// <summary>
    /// Handles the event of a new image getting captured and visualizes it with the Image Visualizer
    /// </summary>
    /// <param name="capturedImage">Captured frame.</param>
    /// <param name="resultExtras">Results Extras.</param>
    private void OnCaptureRawImageComplete(MLCamera.CameraOutput capturedImage, MLCamera.ResultExtras resultExtras, MLCamera.Metadata metadataHandle)
    {
        Debug.Log("DisplayImage");
        imageVisualizer.OnCaptureDataReceived(resultExtras, capturedImage);
    }

}

And here is the simpler non async version based on the script you shared earlier

using System;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using MagicLeap.Examples;
using static UnityEngine.XR.MagicLeap.MLCameraBase;

public class SimpleCameraCapture : MonoBehaviour
{
    [SerializeField] private int width = 640;
    [SerializeField] private int height = 480;
    [SerializeField] private MLCamera.CaptureFrameRate frameRate = MLCamera.CaptureFrameRate._30FPS;

    private MLCameraBase.StreamCapability targetStreamCapability;

    private MLCamera captureCamera;
    private MLCamera.CaptureStreamConfig captureStreamConfig;
    private bool cameraDeviceAvailable = false;
    private List<MLCamera.StreamCapability> streamCapabilities;

    // This is was autogenerated and allows developers to create a dynamic
    // instance of an InputActionAsset which includes predefined action maps
    // that correspond to all of the Magic Leap 2's input.
    private MagicLeapInputs _magicLeapInputs;

    // This class is an Action Map and was autogenerated by the Unity Input
    // System and includes predefined bindings for the Magic Leap 2 Controller
    // Input Events.
    private MagicLeapInputs.ControllerActions _controllerActions;

    public CameraCaptureVisualizer Visualizer;

    private void Start()
    {

        _magicLeapInputs = new MagicLeapInputs();
        _magicLeapInputs.Enable();

        //Initialize the ControllerActions using the Magic Leap Input
        _controllerActions = new MagicLeapInputs.ControllerActions(_magicLeapInputs);

        MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();
        permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
        permissionCallbacks.OnPermissionDenied += OnPermissionDenied;
        permissionCallbacks.OnPermissionDeniedAndDontAskAgain += OnPermissionDeniedAndDontAskAgain;

        MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
    }

    private void OnPermissionGranted(string permission)
    {
        StartCoroutine(EnableMLCamera());
    }

    private void OnPermissionDenied(string permission)
    {
        Debug.Log("Permission denied: " + permission);
    }

    private void OnPermissionDeniedAndDontAskAgain(string permission)
    {
        Debug.Log("Permission denied and don't ask again: " + permission);
    }

    private IEnumerator EnableMLCamera()
    {
        while (!cameraDeviceAvailable)
        {
            MLResult result = MLCamera.GetDeviceAvailabilityStatus(MLCamera.Identifier.Main, out cameraDeviceAvailable);
            if (!(result.IsOk && cameraDeviceAvailable))
            {
                yield return new WaitForSeconds(1.0f);
            }
        }

        Debug.Log("Camera device available");
        ConnectCamera();
    }

    private void ConnectCamera()
    {
        MLCamera.ConnectContext context = MLCamera.ConnectContext.Create();
        context.Flags = MLCamera.ConnectFlag.CamOnly;
        context.EnableVideoStabilization = false;

        captureCamera = MLCamera.CreateAndConnect(context);

        if (captureCamera != null)
        {
            Debug.Log("ML2 RGB Camera connected!");
            if (GetStreamCapabilityWBestFit(out MLCameraBase.StreamCapability streamCapability))
            {
                targetStreamCapability = streamCapability;
                captureCamera.OnRawImageAvailable += OnCaptureRawImageComplete;
            }
            else
            {
                Debug.LogError("Failed to get image stream capabilities.");
            }
        }
    }

    private void OnCaptureRawImageComplete(MLCamera.CameraOutput output, MLCamera.ResultExtras resultExtras,
        MLCamera.Metadata metadataHandle)
    {
        Debug.Log("RGB image callback!");
        Visualizer.OnCaptureDataReceived(resultExtras, output);
        Visualizer.DisplayCapture(MLCamera.OutputFormat.JPEG,false);
        if (output.Format != MLCamera.OutputFormat.YUV_420_888)
        {
            string fileName = DateTime.Now.ToString("MM_dd_yyyy__HH_mm_ss") + ".jpg";
            var recordedFilePath = System.IO.Path.Combine(Application.persistentDataPath, fileName);
            try
            {
                File.WriteAllBytes(recordedFilePath, output.Planes[0].Data);
                Debug.Log($"\nSaved to {recordedFilePath}");
            }
            catch (Exception e)
            {
                Debug.LogError(e.Message);
            }
        }
    }

    /// Gets the Image stream capabilities
    /// True if MLCamera returned at least one stream capability
    private bool GetStreamCapabilityWBestFit(out MLCameraBase.StreamCapability streamCapability)
    {
        streamCapability = default;

        if (captureCamera == null)
        {
            Debug.Log("Could not get Stream capabilities Info. No Camera Connected");
            return false;
        }

        MLCameraBase.StreamCapability[] streamCapabilities =
            MLCameraBase.GetImageStreamCapabilitiesForCamera(captureCamera, MLCameraBase.CaptureType.Image);

        if (streamCapabilities.Length <= 0)
            return false;

        if (MLCameraBase.TryGetBestFitStreamCapabilityFromCollection(streamCapabilities, width,
                height, MLCameraBase.CaptureType.Image,
                out streamCapability))
        {
            Debug.Log($"Stream: {streamCapability} selected with best fit.");
            return true;
        }

        Debug.Log($"No best fit found. Stream: {streamCapabilities[0]} selected by default.");
        streamCapability = streamCapabilities[0];
        return true;
    }

    void Update()
    {
       
        if (_controllerActions.Bumper.WasPressedThisFrame() && captureCamera != null)
        {
            // Initialize CaptureConfig
            Debug.Log("Stream capabilities initialized ---------------------------------");

            MLCamera.StreamCapability selectedCapability = targetStreamCapability;

            MLCamera.CaptureConfig captureConfig = new MLCamera.CaptureConfig
            {
                CaptureFrameRate = frameRate,
                StreamConfigs = new MLCamera.CaptureStreamConfig[1]{ MLCamera.CaptureStreamConfig.Create(selectedCapability, OutputFormat.JPEG)}
            };

            // Prepare for capture
            Debug.Log("Prepare Capture ---------------------------------");
            MLResult result = captureCamera.PrepareCapture(captureConfig, out MLCamera.Metadata _);

            if (MLResult.DidNativeCallSucceed(result.Result, nameof(captureCamera.PrepareCapture)))
            {
                // Trigger auto-exposure and auto-white-balance convergence
                Debug.Log("Trigger auto-exposure and auto-white-balance convergence ---------------------------------");
                captureCamera.PreCaptureAEAWB();
            }
            else
            {
                Debug.Log("pre capture failed ---------------------------------");
                // Failed to prepare the camera
                return;
            }

            // Capture the image
            Debug.Log("Capture RGB ---------------------------------");
            captureCamera.CaptureImageAsync();
            if (!result.IsOk)
            {
                Debug.Log(" Image capture failed " + result);
                return;
            }
        }
    }

    void OnDestroy()
    {
        if (captureCamera != null)
        {
            captureCamera.OnRawImageAvailable -= OnCaptureRawImageComplete;
            captureCamera.Disconnect();
        }
    }
}
1 Like

Thanks @kbabilinski -- I appreciate it. A quick note -- recall the frames are coming in flipped on the Y axis. It's simple to fix, but of course the method shouldn't do this.

This topic was automatically closed 15 days after the last reply. New replies are no longer allowed.