I have experienced with my app using vuforia appears to be hanged a few seconds after launch. The imagetarget gets recognized and then all the prefabs I enabled disappeared, and only randomly would they re-appear for a second in uncertain periods.
I opened a brand new project, set up ML and Vuforia, created an ImageTarget from a image, and attached a script component for capturing camera image and then the app malfunctioned. See below for the camera capture code. Once I added the camera component to the app then every following build w/ or w/o the camera script components enabled would result in the same behavior that I mentioned earlier (the text mesh disapperas, image target is not recognized, but the text might randomly appear for a sec once for around 20s). Only a full restart of ML & my PC, and wiping out the app & apk file could solve the issue, but it comes again after I enable the camera code.Following that I tried delayed initialization of Vuforia with the following script:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Vuforia;
public class Test : MonoBehaviour
{
// Start is called before the first frame update
void Start()
{
VuforiaApplication.Instance.Initialize();
GameObject.Find("Main Camera").GetComponent<VuforiaBehaviour>().enabled = true;
// hard_1417 is the object name of the image target.
GameObject.Find("hard_1417").GetComponent<ImageTargetBehaviour>().enabled = true;
}
// Update is called once per frame
void Update()
{
}
}
and surprisingly the app starts to hang and fails to work properly. I'm reading something like this in the bug report:
08-17 16:46:28.602770 8644 8661 E BufferQueueConsumer: [BufferItemQueue] acquireBuffer: max acquired buffer count reached: 5 (max 4)
08-17 16:46:28.602821 8644 8661 E BufferItemConsumer: [BufferItemQueue] Error acquiring buffer: Unknown error -38 (-38)
08-17 16:46:28.602831 8644 8661 E : acquireBuffer BufferItemConsumer::acquireBuffer fail, status: -38
08-17 16:46:28.618019 3458 3936 I cvip-ch : Aug 17 20:46:19 0000010579345477491 ARM.05 00961 me_38a8bcb4da05 [algo/arm/src/wr_arm_blocks.cpp:00366]:I:: :wr: Reconstructing depth frame 6385/6385, 4926330299us
but I'm not sure if that's related with the problem I had. I'm using Version 1.3.1, Build B3E.230616.02-R.121, Android API Level 29 and I'm using ml sdk 1.3.0 and unity package 1.9.0. Also I'm using Vuforia 10.16.5.
If there's nothing wrong, should I reset magic leap just to make sure nothing is wrong on that side?
Thanks in advance!
For Camera Capture:
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using Vuforia;
public class NewCamera : MonoBehaviour
{
[SerializeField, Tooltip("Desired width for the camera capture")]
private int captureWidth = 1280;
[SerializeField, Tooltip("Desired height for the camera capture")]
private int captureHeight = 720;
//Not used in this example
// [SerializeField, Tooltip("The renderer to show the camera capture on RGB format")]
// private Renderer _screenRendererRGB = null;
//The identifier can either target the Main or CV cameras.
private MLCamera.Identifier _identifier = MLCamera.Identifier.Main;
private MLCamera _camera;
//Is true if the camera is ready to be connected.
private bool _cameraDeviceAvailable;
private MLCamera.CaptureConfig _captureConfig;
private Texture2D _videoTextureRgb;
//The camera capture state
bool _isCapturing;
private MLCamera.CameraOutput _lastCameraOutput;
private MLCamera.ResultExtras _lastExtras;
private Matrix4x4 _lastTransform;
private bool permissionCameraGranted = false;
private readonly MLPermissions.Callbacks permissionCallbacks = new MLPermissions.Callbacks();
private void Awake()
{
permissionCallbacks.OnPermissionGranted += OnPermissionGranted;
}
private void OnDestroy()
{
permissionCallbacks.OnPermissionGranted -= OnPermissionGranted;
}
void Start()
{
MLPermissions.RequestPermission(MLPermission.Camera, permissionCallbacks);
MLPermissions.RequestPermission(MLPermission.SpatialMapping, permissionCallbacks);
MLPermissions.RequestPermission(MLPermission.SpatialAnchors, permissionCallbacks);
StartCoroutine(EnableMLCamera());
}
private void OnPermissionGranted(string permission)
{
permissionCameraGranted = true;
Debug.Log($"{permission} granted.");
}
void OnDisable()
{
StopCapture();
}
//Waits for the camera to be ready and then connects to it.
private IEnumerator EnableMLCamera()
{
while (!permissionCameraGranted)
{
yield return null;
}
// VuforiaApplication.Instance.Initialize();
// GameObject.Find("Main Camera").GetComponent<VuforiaBehaviour>().enabled = true;
// GameObject.Find("hard_1417").GetComponent<ImageTargetBehaviour>().enabled = true;
yield return new WaitForSeconds(5);
//Checks the main camera's availability.
while (!_cameraDeviceAvailable)
{
MLResult result = MLCamera.GetDeviceAvailabilityStatus(_identifier, out _cameraDeviceAvailable);
if (result.IsOk == false || _cameraDeviceAvailable == false)
{
// Wait until camera device is available
yield return new WaitForSeconds(1.0f);
}
}
yield return ConnectCamera();
}
private IEnumerator ConnectCamera()
{
//Once the camera is available, we can connect to it.
if (_cameraDeviceAvailable)
{
MLCamera.ConnectContext connectContext = MLCamera.ConnectContext.Create();
connectContext.CamId = _identifier;
//MLCamera.Identifier.Main is the only camera that can access the virtual and mixed reality flags
connectContext.Flags = MLCamera.ConnectFlag.CamOnly;
var createAndConnectAsync = MLCamera.CreateAndConnectAsync(connectContext);
while (!createAndConnectAsync.IsCompleted)
{
yield return null;
}
_camera = createAndConnectAsync.Result;
if (_camera != null)
{
Debug.Log("Camera device connected");
if (TryGetCaptureConfig(out _captureConfig))
{
Debug.Log("Camera Config Created. Starting Video Capture");
yield return StartVideoCapture();
}
else
{
Debug.LogError("Cannot Create Capture Config");
yield break;
}
}
}
yield return null;
}
private bool TryGetCaptureConfig(out MLCameraBase.CaptureConfig captureConfig)
{
captureConfig = new MLCameraBase.CaptureConfig();
//Gets the stream capabilities the selected camera. (Supported capture types, formats and resolutions)
MLCamera.StreamCapability[] streamCapabilities = MLCamera.GetImageStreamCapabilitiesForCamera(_camera, MLCamera.CaptureType.Video);
if (streamCapabilities.Length == 0)
return false;
//Set the default capability stream
MLCamera.StreamCapability defaultCapability = streamCapabilities[0];
//Try to get the stream that most closely matches the target width and height
if (MLCamera.TryGetBestFitStreamCapabilityFromCollection(streamCapabilities, captureWidth, captureHeight,
MLCamera.CaptureType.Video, out MLCamera.StreamCapability selectedCapability))
{
defaultCapability = selectedCapability;
}
//Initialize a new capture config.
captureConfig = new MLCamera.CaptureConfig();
//Set RGBA video as the output
MLCamera.OutputFormat outputFormat = MLCamera.OutputFormat.RGBA_8888;
//Set the Frame Rate to 30fps
captureConfig.CaptureFrameRate = MLCamera.CaptureFrameRate._30FPS;
//Initialize a camera stream config.
//The Main Camera can support up to two stream configurations
captureConfig.StreamConfigs = new MLCamera.CaptureStreamConfig[1];
captureConfig.StreamConfigs[0] = MLCamera.CaptureStreamConfig.Create(defaultCapability, outputFormat);
return true;
}
private IEnumerator StartVideoCapture()
{
MLResult result = _camera.PrepareCapture(_captureConfig, out MLCamera.Metadata metaData);
if (result.IsOk)
{
//Assume this is done by Vuforia
// _camera.PreCaptureAEAWB();
//Images capture uses the CaptureImage function instead.
var captureVideoStartAsync = _camera.CaptureVideoStartAsync();
while (!captureVideoStartAsync.IsCompleted)
{
yield return null;
}
result = captureVideoStartAsync.Result;
_isCapturing = MLResult.DidNativeCallSucceed(result.Result, nameof(_camera.CaptureVideoStart));
if (_isCapturing)
{
Debug.Log("Video capture started!");
_camera.OnRawVideoFrameAvailable += RawVideoFrameAvailable;
}
else
{
Debug.LogError($"Could not start camera capture. Result : {result}");
}
}
}
private void StopCapture()
{
if (_isCapturing)
{
_camera.CaptureVideoStop();
_camera.OnRawVideoFrameAvailable -= RawVideoFrameAvailable;
}
_camera?.Disconnect();
_isCapturing = false;
}
void RawVideoFrameAvailable(MLCamera.CameraOutput output, MLCamera.ResultExtras extras, MLCameraBase.Metadata metadataHandle)
{
_lastCameraOutput = output;
_lastExtras = extras;
if (MLCVCamera.GetFramePose(extras.VCamTimestamp, out Matrix4x4 cameraTransform).IsOk)
{
_lastTransform = cameraTransform;
}
//Additional logic to render the image
// if (output.Format == MLCamera.OutputFormat.RGBA_8888)
// {
// UpdateRGBTexture(ref _videoTextureRgb, output.Planes[0], _screenRendererRGB);
// }
}
public Vector2 WorldPointToPixel(Vector3 worldPoint)
{
if (_lastExtras.Intrinsics.HasValue)
{
int width = (int)_lastCameraOutput.Planes[0].Width;
int height = (int)_lastCameraOutput.Planes[0].Height;
return WorldPointToPixel(worldPoint, width, height, _lastExtras.Intrinsics.Value, _lastTransform);
}
Debug.Log("No Intrinsic value");
return new Vector2(0, 0);
}
private Vector2 WorldPointToPixel(Vector3 worldPoint, int width, int height, MLCameraBase.IntrinsicCalibrationParameters parameters, Matrix4x4 cameraTransformationMatrix)
{
// Step 1: Convert the world space point to camera space
Vector3 cameraSpacePoint = cameraTransformationMatrix.inverse.MultiplyPoint(worldPoint);
// Step 2: Project the camera space point onto the normalized image plane
Vector2 normalizedImagePoint = new Vector2(cameraSpacePoint.x / cameraSpacePoint.z, cameraSpacePoint.y / cameraSpacePoint.z);
// Step 3: Adjust for FOV
float verticalFOVRad = parameters.FOV * Mathf.Deg2Rad;
float aspectRatio = width / (float)height;
float horizontalFOVRad = 2 * Mathf.Atan(Mathf.Tan(verticalFOVRad / 2) * aspectRatio);
normalizedImagePoint.x /= Mathf.Tan(horizontalFOVRad / 2);
normalizedImagePoint.y /= Mathf.Tan(verticalFOVRad / 2);
// Step 4: Convert normalized image coordinates to pixel coordinates
Vector2 pixelPosition = new Vector2(
normalizedImagePoint.x * width + parameters.PrincipalPoint.x,
normalizedImagePoint.y * height + parameters.PrincipalPoint.y
);
return pixelPosition;
}
private void UpdateRGBTexture(ref Texture2D videoTextureRGB, MLCamera.PlaneInfo imagePlane, Renderer renderer)
{
if (videoTextureRGB != null &&
(videoTextureRGB.width != imagePlane.Width || videoTextureRGB.height != imagePlane.Height))
{
Destroy(videoTextureRGB);
videoTextureRGB = null;
}
if (videoTextureRGB == null)
{
videoTextureRGB = new Texture2D((int)imagePlane.Width, (int)imagePlane.Height, TextureFormat.RGBA32, false);
videoTextureRGB.filterMode = FilterMode.Bilinear;
Material material = renderer.material;
material.mainTexture = videoTextureRGB;
material.mainTextureScale = new Vector2(1.0f, -1.0f);
}
int actualWidth = (int)(imagePlane.Width * imagePlane.PixelStride);
if (imagePlane.Stride != actualWidth)
{
var newTextureChannel = new byte[actualWidth * imagePlane.Height];
for (int i = 0; i < imagePlane.Height; i++)
{
Buffer.BlockCopy(imagePlane.Data, (int)(i * imagePlane.Stride), newTextureChannel, i * actualWidth, actualWidth);
}
videoTextureRGB.LoadRawTextureData(newTextureChannel);
}
else
{
videoTextureRGB.LoadRawTextureData(imagePlane.Data);
}
videoTextureRGB.Apply();
}
}