在屏幕点读取像素颜色(有效)

我需要在屏幕上读取C#Unity3D中的像素颜色。

我正在使用Render Texture和ReadPixels方法。 每0.5秒使用它对性能有害(即使我的渲染纹理大小为128x128px),我正试图找到另一种方法来更快地获得这些数据。 我在某处读到可以直接使用glReadPixels但我不知道如何使用它?

我需要检测在这个地方(屏幕上的点)是否是某种东西。

这是我目前的代码。

 using UnityEngine; using System; using System.Collections; public class ColController : MonoBehaviour { public Camera collCamera; public Texture2D tex2d; void Start() { collCamera = GetComponent(); tex2d = new Texture2D(collCamera.targetTexture.width, collCamera.targetTexture.height, TextureFormat.ARGB32, false); RenderTexture.active = collCamera.targetTexture; StartCoroutine (Execute ()); } IEnumerator Execute() { while (true) { yield return new WaitForEndOfFrame (); tex2d = GetRTPixels (collCamera.targetTexture); yield return new WaitForSeconds (0.5f); } } static public Texture2D GetRTPixels(RenderTexture rt) { RenderTexture currentActiveRT = RenderTexture.active; RenderTexture.active = rt; // Create a new Texture2D and read the RenderTexture image into it Texture2D tex = new Texture2D(rt.width, rt.height); tex.ReadPixels(new Rect(0, 0, tex.width, tex.height), 0, 0); RenderTexture.active = currentActiveRT; return tex; } void Update() { screenPos = collCamera.WorldToScreenPoint (player.transform.position); positionColor = tex2d.GetPixel ((int)screenPos.x, (int)screenPos.y); } } 

你必须使用glReadPixels 。 通过在OnPostRender函数中从C#调用它可以更容易实现,但是你不能再这样做了。 您必须使用GL.IssuePluginEvent来调用将截取屏幕截图的函数。

您还需要位于\Editor\Data\PluginAPI Unity C ++ API头( IUnityInterface.hIUnityGraphics.h )。

我创建了一个名为UnityPluginHeaders的文件夹,并在其中放入了IUnityInterface.hIUnityGraphics.h头文件,以便可以使用#include "UnityPluginHeaders/IUnityInterface.h"#include "UnityPluginHeaders/IUnityGraphics.h"导入它们。

C ++ScreenPointPixel.h ):

 #ifndef ANDROIDSCREENSHOT_NATIVE_LIB_H #define ANDROIDSCREENSHOT_NATIVE_LIB_H #define DLLExport __declspec(dllexport) extern "C" { #if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(WINAPI_FAMILY) DLLExport void initScreenPointPixel(void* buffer, int x, int y, int width, int height); DLLExport void updateScreenPointPixelBufferPointer(void* buffer); DLLExport void updateScreenPointPixelCoordinate(int x, int y); DLLExport void updateScreenPointPixelSize(int width, int height); int GetScreenPixels(void* buffer, int x, int y, int width, int height); #else void initScreenPointPixel(void *buffer, int x, int y, int width, int height); void updateScreenPointPixelBufferPointer(void *buffer); void updateScreenPointPixelCoordinate(int x, int y); void updateScreenPointPixelSize(int width, int height); int GetScreenPixels(void *buffer, int x, int y, int width, int height); #endif } #endif //ANDROIDSCREENSHOT_NATIVE_LIB_H 

C ++ScreenPointPixel.cpp ):

 #include "ScreenPointPixel.h" #include  #include  //For Debugging //#include "DebugCPP.h" //http://stackoverflow.com/questions/43732825/use-debug-log-from-c/43735531#43735531 //Unity Headers #include "UnityPluginHeaders/IUnityInterface.h" #include "UnityPluginHeaders/IUnityGraphics.h" //Headers for Windows #if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(WINAPI_FAMILY) #include  #include  #include  #include  #include "glext.h" #pragma comment(lib, "opengl32.lib") //-------------------------------------------------- //Headers for Android #elif defined(ANDROID) || defined(__ANDROID__) #include  #include  #include  //Link lGLESv2 in the CMakeList.txt file //LOCAL_LDLIBS += −lGLESv2 //-------------------------------------------------- //Headers for MAC and iOS //http://nadeausoftware.com/articles/2012/01/c_c_tip_how_use_compiler_predefined_macros_detect_operating_system #elif defined(__APPLE__) && defined(__MACH__) //Apple OSX and iOS (Darwin) #include  #if TARGET_IPHONE_SIMULATOR == 1 //iOS in Xcode simulator #include  #include  #elif TARGET_OS_IPHONE == 1 //iOS on iPhone, iPad, etc. #include  #include  #elif TARGET_OS_MAC == 1 #include  #include  #include  #endif //-------------------------------------------------- //Headers for Linux #elif defined(__linux__) #include  #include  #endif static void* screenPointPixelData = nullptr; static int _x; static int _y; static int _width; static int _height; //----------------------------Enable Screenshot----------------------------- void initScreenPointPixel(void* buffer, int x, int y, int width, int height) { screenPointPixelData = buffer; _x = x; _y = y; _width = width; _height = height; } void updateScreenPointPixelBufferPointer(void* buffer) { screenPointPixelData = buffer; } void updateScreenPointPixelCoordinate(int x, int y) { _x = x; _y = y; } void updateScreenPointPixelSize(int width, int height) { _width = width; _height = height; } int GetScreenPixels(void* buffer, int x, int y, int width, int height) { if (glGetError()) return -1; //glReadPixels(x, y, width, height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer); glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, buffer); if (glGetError()) return -2; return 0; } //----------------------------UNITY RENDERING CALLBACK----------------------------- // Plugin function to handle a specific rendering event static void UNITY_INTERFACE_API OnRenderEventScreenPointPixel(int eventID) { //Put rendering code below if (screenPointPixelData == nullptr) { //Debug::Log("Pointer is null", Color::Red); return; } int result = GetScreenPixels(screenPointPixelData, _x, _y, _width, _height); //std::string log_msg = "Cobol " + std::to_string(result); //Debug::Log(log_msg, Color::Green); } // Freely defined function to pass a callback to plugin-specific scripts extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API GetRenderEventScreenPointPixelFunc() { return OnRenderEventScreenPointPixel; } 

从Android Studio编译/构建时,它应该为您提供两个文件夹( armeabi-v7ax86位于\app\build\intermediates\cmake\release\obj目录。它们都应包含共享* .so库。如果你不能为Android Studio编译这个,然后在这里使用我为此制作的Android Studio项目的副本。你可以用它来生成共享* .so库。

将两个文件夹放在Assets\Plugins\Android\libs Unity项目文件夹中。

你现在应该:

Assets\Plugins\Android\libs\armeabi-v7a\libScreenPointPixel-lib.so

Assets\Plugins\Android\libs\x86\libScreenPointPixel-lib.so


C#测试代码:

创建一个小的简单RawImage组件并将其定位到屏幕的右上角 。 将RawImage拖动到下面脚本中的rawImageColor槽。 单击屏幕上的任意位置时,该屏幕点的像素颜色应显示在rawImageColor RawImage上。

C#

 using System; using System.Collections; using System.Runtime.InteropServices; using UnityEngine; using UnityEngine.UI; public class ScreenPointPixel : MonoBehaviour { [DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)] public static extern void initScreenPointPixel(IntPtr buffer, int x, int y, int width, int height); //------------------------------------------------------------------------------------- [DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)] public static extern void updateScreenPointPixelBufferPointer(IntPtr buffer); //------------------------------------------------------------------------------------- [DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)] public static extern void updateScreenPointPixelCoordinate(int x, int y); //------------------------------------------------------------------------------------- [DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)] public static extern void updateScreenPointPixelSize(int width, int height); //------------------------------------------------------------------------------------- //------------------------------------------------------------------------------------- [DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.StdCall)] private static extern IntPtr GetRenderEventScreenPointPixelFunc(); //------------------------------------------------------------------------------------- int width = 500; int height = 500; //Where Pixel data will be saved byte[] screenData; //Where handle that pins the Pixel data will stay GCHandle pinHandler; //Used to test the color public RawImage rawImageColor; // Use this for initialization void Awake() { Resolution res = Screen.currentResolution; width = res.width; height = res.height; //Allocate array to be used screenData = new byte[width * height * 4]; //Pin the Array so that it doesn't move around pinHandler = GCHandle.Alloc(screenData, GCHandleType.Pinned); //Register the screenshot and pass the array that will receive the pixels IntPtr arrayPtr = pinHandler.AddrOfPinnedObject(); initScreenPointPixel(arrayPtr, 0, 0, width, height); StartCoroutine(caller()); } IEnumerator caller() { while (true) { //Use mouse position as the pixel position //Input.tou #if UNITY_ANDROID || UNITY_IOS || UNITY_WSA_10_0 if (!(Input.touchCount > 0)) { yield return null; continue; } //Use touch position as the pixel position int x = Mathf.FloorToInt(Input.GetTouch(0).position.x); int y = Mathf.FloorToInt(Input.GetTouch(0).position.y); #else //Use mouse position as the pixel position int x = Mathf.FloorToInt(Input.mousePosition.x); int y = Mathf.FloorToInt(Input.mousePosition.y); #endif //Change this to any location from the screen you want updateScreenPointPixelCoordinate(x, y); //Must be 1 and 1 updateScreenPointPixelSize(1, 1); //Take screenshot of the screen GL.IssuePluginEvent(GetRenderEventScreenPointPixelFunc(), 1); //Get the Color Color32 tempColor = new Color32(); tempColor.r = screenData[0]; tempColor.g = screenData[1]; tempColor.b = screenData[2]; tempColor.a = screenData[3]; //Test it by assigning it to a raw image rawImageColor.color = tempColor; //Wait for a frame yield return null; } } void OnDisable() { //Unpin the array when disabled pinHandler.Free(); } }