Sorry, I was looking a a different demo. You can see the World Camera Example on the developer portal that queries the capabilities. Here is a version of a script that allows you to change the target exposure. Note changing target exposure is only available on the second stream, not the first one.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Unity.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using UnityEngine.XR.OpenXR;
using MagicLeap.OpenXR.Features.PixelSensors;
/// <summary>
/// This script captures images from a specified Magic Leap pixel sensor (e.g., World Center camera),
/// applies custom configurations (e.g., manual exposure), retrieves metadata, and displays the images
/// using specified renderers. It handles permissions, sensor initialization, configuration, data retrieval,
/// metadata processing, and cleanup.
/// </summary>
public class WorldCameraPixelSensor : MonoBehaviour
{
[Header("Sensor Configuration")]
[Tooltip("Set to one of the supported sensor names: World Center, World Left, World Right")]
[SerializeField]
private string pixelSensorName = "World Center";
[Tooltip("Enable or disable stream 0")]
[SerializeField]
private bool useStream0 = true;
[Tooltip("Enable or disable stream 1")]
[SerializeField]
private bool useStream1 = false;
[Tooltip("Use custom properties for sensor configuration")]
[SerializeField]
private bool useCustomProperties = true;
[Tooltip("Use manual exposure settings if true, otherwise use auto exposure")]
[SerializeField]
private bool useManualExposureSettings = false;
[Header("Auto Exposure Settings")]
[Tooltip("Exposure Mode: EnvironmentTracking (0) or ProximityIrTracking (1)")]
[SerializeField]
private PixelSensorAutoExposureMode autoExposureMode = PixelSensorAutoExposureMode.EnvironmentTracking;
[Tooltip("Auto Exposure Target Brightness (-5.0 to 5.0)")]
[SerializeField]
[Range(-5.0f, 5.0f)]
private float autoExposureTargetBrightness = 0.0f;
[Header("Manual Exposure Settings")]
[Tooltip("Manual Exposure Time in microseconds (e.g., 8500)")]
[SerializeField]
private uint manualExposureTimeUs = 8500;
[Tooltip("Analog Gain (e.g., 100, higher values increase brightness)")]
[SerializeField]
private uint analogGain = 100;
[Header("Render Settings")]
[Tooltip("Renderers to display the streams. The array size should match the number of streams used.")]
[SerializeField]
private Renderer[] streamRenderers = new Renderer[2];
private const string requiredPermission = MLPermission.Camera;
// Array to hold textures for each stream
private Texture2D[] streamTextures = new Texture2D[2];
// Sensor ID used to interact with the specific sensor
private PixelSensorId? sensorId;
// List to keep track of which streams have been configured
private readonly List<uint> configuredStreams = new List<uint>();
// Reference to the Magic Leap Pixel Sensor Feature
private MagicLeapPixelSensorFeature pixelSensorFeature;
// Capabilities that are required for configuration
private readonly PixelSensorCapabilityType[] requiredCapabilities = new[]
{
PixelSensorCapabilityType.UpdateRate,
PixelSensorCapabilityType.Format,
PixelSensorCapabilityType.Resolution
};
// Capabilities for manual exposure settings
private readonly PixelSensorCapabilityType[] manualExposureCapabilities = new[]
{
PixelSensorCapabilityType.ManualExposureTime,
PixelSensorCapabilityType.AnalogGain,
};
// Capabilities for auto exposure settings
private readonly PixelSensorCapabilityType[] autoExposureCapabilities = new[]
{
PixelSensorCapabilityType.AutoExposureMode,
PixelSensorCapabilityType.AutoExposureTargetBrightness,
};
private void Start()
{
InitializePixelSensorFeature();
}
/// <summary>
/// Initializes the Magic Leap Pixel Sensor Feature and requests necessary permissions.
/// </summary>
private void InitializePixelSensorFeature()
{
// Check if OpenXRSettings.Instance is not null
if (OpenXRSettings.Instance == null)
{
Debug.LogError("OpenXRSettings.Instance is null.");
enabled = false;
return;
}
// Get the Magic Leap Pixel Sensor Feature from the OpenXR settings
pixelSensorFeature = OpenXRSettings.Instance.GetFeature<MagicLeapPixelSensorFeature>();
if (pixelSensorFeature == null || !pixelSensorFeature.enabled)
{
Debug.LogError("Magic Leap Pixel Sensor Feature is not available or not enabled.");
enabled = false;
return;
}
// Request the necessary permission
MagicLeap.Android.Permissions.RequestPermission(
requiredPermission,
OnPermissionGranted, OnPermissionDenied, OnPermissionDenied);
}
/// <summary>
/// Callback when a permission is granted.
/// </summary>
/// <param name="permission">The permission that was granted.</param>
private void OnPermissionGranted(string permission)
{
if (permission == requiredPermission)
{
Debug.Log($"Permission granted: {permission}");
FindAndInitializeSensor();
}
}
/// <summary>
/// Callback when a permission is denied.
/// </summary>
/// <param name="permission">The permission that was denied.</param>
private void OnPermissionDenied(string permission)
{
if (permission == requiredPermission)
{
Debug.LogError($"Permission denied: {permission}");
enabled = false;
}
}
/// <summary>
/// Finds the sensor by name and attempts to initialize it.
/// </summary>
private void FindAndInitializeSensor()
{
List<PixelSensorId> sensors = pixelSensorFeature.GetSupportedSensors();
int index = sensors.FindIndex(x => x.SensorName.Contains(pixelSensorName));
if (index < 0)
{
Debug.LogError($"{pixelSensorName} sensor not found.");
enabled = false;
return;
}
sensorId = sensors[index];
// Unsubscribe before subscribing to prevent multiple subscriptions
pixelSensorFeature.OnSensorAvailabilityChanged += OnSensorAvailabilityChanged;
TryInitializeSensor();
}
/// <summary>
/// Handles changes in sensor availability.
/// </summary>
/// <param name="id">The sensor ID.</param>
/// <param name="available">Whether the sensor is available.</param>
private void OnSensorAvailabilityChanged(PixelSensorId id, bool available)
{
if (sensorId.HasValue && id.XrPath == sensorId.Value.XrPath && available)
{
Debug.Log($"Sensor became available: {id.SensorName}");
TryInitializeSensor();
}
}
/// <summary>
/// Attempts to create and initialize the sensor.
/// </summary>
private void TryInitializeSensor()
{
if (sensorId.HasValue)
{
if (pixelSensorFeature.CreatePixelSensor(sensorId.Value))
{
Debug.Log("Sensor created successfully.");
StartCoroutine(ConfigureSensorStreams());
}
else
{
Debug.LogWarning("Failed to create sensor. Will retry when available.");
}
}
else
{
Debug.LogError("Sensor ID is not set.");
}
}
/// <summary>
/// Configures the sensor streams with custom capabilities and starts streaming.
/// </summary>
private IEnumerator ConfigureSensorStreams()
{
if (!sensorId.HasValue)
{
Debug.LogError("Sensor ID was not set.");
enabled = false;
yield break;
}
uint streamCount = pixelSensorFeature.GetStreamCount(sensorId.Value);
if ((useStream0 && streamCount < 1) || (useStream1 && streamCount < 2))
{
Debug.LogError("Target streams are not available from the sensor.");
enabled = false;
yield break;
}
configuredStreams.Clear();
if (useStream0)
{
configuredStreams.Add(0);
}
if (useStream1)
{
configuredStreams.Add(1);
}
// Ensure that the number of renderers matches the number of configured streams
if (streamRenderers.Length < configuredStreams.Count)
{
Debug.LogError("Not enough stream renderers assigned for the configured streams.");
enabled = false;
yield break;
}
// Build the list of capabilities to configure
List<PixelSensorCapabilityType> targetCapabilities = new List<PixelSensorCapabilityType>(requiredCapabilities);
if (useCustomProperties)
{
if (useManualExposureSettings)
{
targetCapabilities.AddRange(manualExposureCapabilities);
}
else
{
targetCapabilities.AddRange(autoExposureCapabilities);
}
}
// Iterate over each configured stream and apply capabilities
foreach (uint streamIndex in configuredStreams)
{
// Get capabilities for the current stream
if (pixelSensorFeature.GetPixelSensorCapabilities(sensorId.Value, streamIndex, out PixelSensorCapability[] capabilities))
{
// Create a HashSet of available capabilities for quick lookup
HashSet<PixelSensorCapabilityType> availableCapabilities = capabilities.Select(c => c.CapabilityType).ToHashSet();
foreach (PixelSensorCapabilityType capabilityType in targetCapabilities)
{
// Check if the capability is available for this stream
if (!availableCapabilities.Contains(capabilityType))
{
Debug.LogWarning($"Capability {capabilityType} is not available for stream {streamIndex}. Skipping.");
continue;
}
// Find the capability we want to configure
PixelSensorCapability capability = capabilities.First(c => c.CapabilityType == capabilityType);
// Query the valid range for the capability
if (pixelSensorFeature.QueryPixelSensorCapability(sensorId.Value, capabilityType, streamIndex, out PixelSensorCapabilityRange range) && range.IsValid)
{
PixelSensorConfigData configData = new PixelSensorConfigData(capabilityType, streamIndex);
// Apply default values for required capabilities
if (requiredCapabilities.Contains(capabilityType))
{
configData = range.GetDefaultConfig(streamIndex);
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
yield return null;
}
else if (capabilityType == PixelSensorCapabilityType.ManualExposureTime)
{
// Apply manual exposure time
uint exposureTime = ClampUInt(manualExposureTimeUs, range.IntRange.Value.Min, range.IntRange.Value.Max);
configData.IntValue = exposureTime;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
yield return null;
}
else if (capabilityType == PixelSensorCapabilityType.AnalogGain)
{
// Apply analog gain
uint gain = ClampUInt(analogGain, range.IntRange.Value.Min, range.IntRange.Value.Max);
configData.IntValue = gain;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
yield return null;
}
else if (capabilityType == PixelSensorCapabilityType.AutoExposureMode)
{
// Apply auto exposure mode
if (range.IntValues.Contains((uint)autoExposureMode))
{
configData.IntValue = (uint)autoExposureMode;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
yield return null;
}
else
{
Debug.LogWarning($"Auto Exposure Mode {autoExposureMode} is not supported for stream {streamIndex}.");
continue;
}
}
else if (capabilityType == PixelSensorCapabilityType.AutoExposureTargetBrightness)
{
// Apply auto exposure target brightness
float brightness = Mathf.Clamp(autoExposureTargetBrightness, range.FloatRange.Value.Min, range.FloatRange.Value.Max);
configData.FloatValue = brightness;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
yield return null;
}
}
else
{
Debug.LogWarning($"Capability range for {capabilityType} is invalid or not supported for stream {streamIndex}. Skipping.");
continue;
}
}
}
else
{
Debug.LogError($"Failed to get capabilities for stream {streamIndex}.");
enabled = false;
yield break;
}
}
// Submit the configuration
PixelSensorAsyncOperationResult configureOperation = pixelSensorFeature.ConfigureSensor(sensorId.Value, configuredStreams.ToArray());
yield return configureOperation;
if (!configureOperation.DidOperationSucceed)
{
Debug.LogError("Failed to configure sensor with custom capabilities.");
enabled = false;
yield break;
}
Debug.Log("Sensor configured with custom capabilities successfully.");
// Obtain supported metadata types
Dictionary<uint, PixelSensorMetaDataType[]> supportedMetadataTypes = new Dictionary<uint, PixelSensorMetaDataType[]>();
foreach (uint streamIndex in configuredStreams)
{
if (pixelSensorFeature.EnumeratePixelSensorMetaDataTypes(sensorId.Value, streamIndex, out PixelSensorMetaDataType[] metaDataTypes))
{
// Request all available metadata types
supportedMetadataTypes.Add(streamIndex, metaDataTypes);
}
else
{
Debug.LogWarning($"Failed to enumerate metadata types for stream {streamIndex}.");
}
}
// Start the sensor streams with requested metadata
PixelSensorAsyncOperationResult sensorStartAsyncResult = pixelSensorFeature.StartSensor(sensorId.Value, configuredStreams, supportedMetadataTypes);
yield return sensorStartAsyncResult;
if (!sensorStartAsyncResult.DidOperationSucceed)
{
Debug.LogError("Failed to start sensor streaming.");
enabled = false;
yield break;
}
Debug.Log("Sensor streaming started successfully.");
// Start processing sensor data
StartCoroutine(ProcessSensorData());
}
/// <summary>
/// Coroutine to process sensor data continuously and retrieve metadata.
/// </summary>
private IEnumerator ProcessSensorData()
{
while (sensorId.HasValue && pixelSensorFeature.GetSensorStatus(sensorId.Value) == PixelSensorStatus.Started)
{
foreach (uint stream in configuredStreams)
{
if (stream >= streamRenderers.Length)
{
Debug.LogWarning($"Stream index {stream} is out of bounds for renderers.");
continue;
}
// Get sensor data and metadata
if (pixelSensorFeature.GetSensorData(
sensorId.Value,
stream,
out PixelSensorFrame frame,
out PixelSensorMetaData[] currentFrameMetaData,
Allocator.Temp,
shouldFlipTexture: true))
{
// Get sensor pose (optional)
Pose sensorPose = pixelSensorFeature.GetSensorPose(sensorId.Value);
// You can use sensorPose as needed
// Process the frame and update the texture
ProcessFrame(frame, streamRenderers[stream], ref streamTextures[stream]);
// Process metadata
ProcessMetadata(currentFrameMetaData);
}
else
{
Debug.LogWarning($"Failed to get sensor data for stream {stream}.");
}
}
yield return null;
}
}
/// <summary>
/// Processes a sensor frame and updates the associated renderer's texture.
/// </summary>
/// <param name="frame">The sensor frame.</param>
/// <param name="targetRenderer">The renderer to update.</param>
/// <param name="targetTexture">The texture to update.</param>
private void ProcessFrame(in PixelSensorFrame frame, Renderer targetRenderer, ref Texture2D targetTexture)
{
if (!frame.IsValid || targetRenderer == null || frame.Planes.Length == 0)
{
return;
}
TextureFormat textureFormat = GetTextureFormat(frame.FrameType);
if (textureFormat == TextureFormat.R8 && frame.FrameType == PixelSensorFrameType.Yuv420888)
{
// Skip processing or implement YUV to RGB conversion
return;
}
if (targetTexture == null)
{
ref PixelSensorPlane plane = ref frame.Planes[0];
targetTexture = new Texture2D((int)plane.Width, (int)plane.Height, textureFormat, false);
targetRenderer.material.mainTexture = targetTexture;
}
targetTexture.LoadRawTextureData(frame.Planes[0].ByteData);
targetTexture.Apply();
}
/// <summary>
/// Determines the appropriate Unity TextureFormat based on the frame type.
/// </summary>
/// <param name="frameType">The frame type.</param>
/// <returns>The corresponding TextureFormat.</returns>
private TextureFormat GetTextureFormat(PixelSensorFrameType frameType)
{
switch (frameType)
{
case PixelSensorFrameType.Grayscale:
return TextureFormat.R8;
case PixelSensorFrameType.Rgba8888:
return TextureFormat.RGBA32;
case PixelSensorFrameType.Yuv420888:
Debug.LogWarning("YUV420888 format requires conversion to RGB. Skipping frame processing for this format.");
return TextureFormat.R8; // Placeholder
case PixelSensorFrameType.Depth32:
case PixelSensorFrameType.DepthRaw:
case PixelSensorFrameType.DepthConfidence:
case PixelSensorFrameType.DepthFlags:
return TextureFormat.RFloat;
default:
Debug.LogWarning("Unsupported frame type. Defaulting to RFloat.");
return TextureFormat.RFloat;
}
}
/// <summary>
/// Processes the metadata retrieved from the sensor.
/// </summary>
/// <param name="metadataArray">An array of metadata objects.</param>
private void ProcessMetadata(PixelSensorMetaData[] metadataArray)
{
foreach (var metadata in metadataArray)
{
StringBuilder builder = new StringBuilder();
switch (metadata)
{
case PixelSensorExposureTime exposureTime:
builder.AppendLine($"Exposure Time: {exposureTime.ExposureTime:F1} ms");
break;
case PixelSensorAnalogGain analogGain:
builder.AppendLine($"Analog Gain: {analogGain.AnalogGain}");
break;
case PixelSensorDigitalGain digitalGain:
builder.AppendLine($"Digital Gain: {digitalGain.DigitalGain}");
break;
case PixelSensorPinholeIntrinsics pinholeIntrinsics:
builder.AppendLine($"Pinhole Camera Intrinsics:");
builder.AppendLine($"Focal Length: {pinholeIntrinsics.FocalLength}");
builder.AppendLine($"Principal Point: {pinholeIntrinsics.PrincipalPoint}");
builder.AppendLine($"Field of View: {pinholeIntrinsics.FOV}");
builder.AppendLine($"Distortion Coefficients: {string.Join(", ", pinholeIntrinsics.Distortion)}");
break;
case PixelSensorFisheyeIntrinsics fisheyeIntrinsics:
builder.AppendLine($"Fisheye Camera Intrinsics:");
builder.AppendLine($"Focal Length: {fisheyeIntrinsics.FocalLength}");
builder.AppendLine($"Principal Point: {fisheyeIntrinsics.PrincipalPoint}");
builder.AppendLine($"Field of View: {fisheyeIntrinsics.FOV}");
builder.AppendLine($"Radial Distortion Coefficients: {string.Join(", ", fisheyeIntrinsics.RadialDistortion)}");
builder.AppendLine($"Tangential Distortion Coefficients: {string.Join(", ", fisheyeIntrinsics.TangentialDistortion)}");
break;
case PixelSensorDepthFrameIllumination depthIllumination:
builder.AppendLine($"Depth Frame Illumination Type: {depthIllumination.IlluminationType}");
break;
// Handle other metadata types as needed
default:
builder.AppendLine($"Unknown metadata type: {metadata.MetaDataType}");
break;
}
Debug.Log(builder.ToString());
}
}
/// <summary>
/// Stops the sensor and cleans up resources when the script is disabled.
/// </summary>
private void OnDisable()
{
// Unsubscribe from events
if (pixelSensorFeature != null)
{
pixelSensorFeature.OnSensorAvailabilityChanged -= OnSensorAvailabilityChanged;
}
// Stop the sensor and destroy it
StartCoroutine(StopSensor());
}
/// <summary>
/// Coroutine to stop the sensor and clean up resources.
/// </summary>
private IEnumerator StopSensor()
{
if (sensorId.HasValue)
{
PixelSensorAsyncOperationResult stopSensorAsyncResult = pixelSensorFeature.StopSensor(sensorId.Value, configuredStreams);
yield return stopSensorAsyncResult;
if (stopSensorAsyncResult.DidOperationSucceed)
{
pixelSensorFeature.ClearAllAppliedConfigs(sensorId.Value);
if (pixelSensorFeature.DestroyPixelSensor(sensorId.Value))
{
Debug.Log("Sensor stopped and destroyed successfully.");
}
else
{
Debug.LogWarning("Sensor stopped but failed to destroy the sensor.");
}
}
else
{
Debug.LogError("Failed to stop the sensor.");
}
}
}
/// <summary>
/// Clamps a uint value between a minimum and maximum value.
/// </summary>
private uint ClampUInt(uint value, uint min, uint max)
{
if (value < min) return min;
else if (value > max) return max;
else return value;
}
}