从Unity ARCore保存相机图像

这与此文章相关, 从Unity ARCore将AcquireCameraImageBytes()保存为存储为图像

我尝试了@JordanRobinson提到的步骤我有类似的问题只看到一个灰色方块。 我不断重新阅读他的更新,我不清楚第2步(创建纹理阅读器)如何与第3步相关联。我添加了更新函数来调用Frame.CameraImage.AcquireCameraImageBytes。 我想错过了什么。

我觉得我很亲密,因为它保存了一个图像(只是一个灰色的图像:-)任何你能提供的帮助将不胜感激

这是我的代码

private Texture2D m_TextureRender; private TextureReader m_CachedTextureReader; void Start () { m_CachedTextureReader = GetComponent(); m_CachedTextureReader.OnImageAvailableCallback += OnImageAvailable; QuitOnConnectionErrors (); } void Update () { Screen.sleepTimeout = SleepTimeout.NeverSleep; using (var image = Frame.CameraImage.AcquireCameraImageBytes()) { if (!image.IsAvailable) { return; } OnImageAvailable(TextureReaderApi.ImageFormatType.ImageFormatColor, image.Width, image.Height, image.Y, 0); } } private void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, System.IntPtr pixelBuffer, int bufferSize) { if (format != TextureReaderApi.ImageFormatType.ImageFormatColor) { Debug.Log("No edge detected due to incorrect image format."); return; } if (m_TextureRender == null || m_EdgeDetectionResultImage == null || m_TextureRender.width != width || m_TextureRender.height != height) { m_TextureRender = new Texture2D(width, height, TextureFormat.RGBA32, false, false); m_EdgeDetectionResultImage = new byte[width * height * 4]; m_TextureRender.width = width; m_TextureRender.height = height; } System.Runtime.InteropServices.Marshal.Copy(pixelBuffer, m_EdgeDetectionResultImage, 0, bufferSize); // Update the rendering texture with the sampled image. m_TextureRender.LoadRawTextureData(m_EdgeDetectionResultImage); m_TextureRender.Apply(); var encodedJpg = m_TextureRender.EncodeToJPG(); var path = Application.persistentDataPath; File.WriteAllBytes(path + "/test2.jpg", encodedJpg); }