128 lines
4.0 KiB
C#

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using OpenCvSharp;
using System.Threading.Tasks;
public class OnRenderImageTest : MonoBehaviour
{
public MeshRenderer processedImageRenderer;
// OpenCVSharp parameters
private Mat videoSourceImage;
private Mat cannyImage;
private Texture2D processedTexture;
private Vec3b[] videoSourceImageData;
private byte[] cannyImageData;
private const int imWidth = 320; //TODO: Set width and height based on agent observation size
private const int imHeight = 240;
private Camera textureCamera;
private void Start()
{
textureCamera = GetComponent<Camera>();
//assign the processed targetTexture to the renderer to display the image
processedImageRenderer.material.mainTexture = textureCamera.targetTexture;
// initialize video / image with given size
videoSourceImage = new Mat(imHeight, imWidth, MatType.CV_8UC3);
videoSourceImageData = new Vec3b[imHeight * imWidth];
cannyImage = new Mat(imHeight, imWidth, MatType.CV_8UC1);
cannyImageData = new byte[imHeight * imWidth];
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
Texture2D tex = RenderTextureToTexture2D(source);
videoSourceImage = TextureToMat(tex);
cannyImage = ProcessImage(videoSourceImage);
processedTexture = MatToTexture(cannyImage);
Graphics.Blit(processedTexture, destination);
}
private Texture2D RenderTextureToTexture2D(RenderTexture rTex)
{
Texture2D tex = new Texture2D(imWidth, imHeight, TextureFormat.RGBA32, true, true);
RenderTexture.active = rTex;
tex.ReadPixels(new UnityEngine.Rect(0, 0, rTex.width, rTex.height), 0, 0);
tex.Apply();
return tex;
}
// Convert Unity Texture2D object to OpenCVSharp Mat object
private Mat TextureToMat(Texture2D source)
{
// Color32 array : r, g, b, a
Color32[] c = source.GetPixels32();
// Parallel for loop
// convert Color32 object to Vec3b object
// Vec3b is the representation of pixel for Mat
Parallel.For(0, imHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
var col = c[j + i * imWidth];
var vec3 = new Vec3b
{
Item0 = col.b,
Item1 = col.g,
Item2 = col.r
};
// set pixel to an array
videoSourceImageData[j + i * imWidth] = vec3;
}
});
// assign the Vec3b array to Mat
Mat tmpMat = new Mat(imHeight, imWidth, MatType.CV_8UC3);
tmpMat.SetArray(0, 0, videoSourceImageData);
return tmpMat;
}
// Simple example of canny edge detect
private Mat ProcessImage(Mat _image)
{
Mat cannyImg = new Mat();
Cv2.Canny(_image, cannyImg, 100, 100);
return cannyImg;
}
// Convert OpenCVSharp Mat object to Unity Texture2D object
private Texture2D MatToTexture(Mat mat)
{
// cannyImageData is byte array, because canny image is grayscale
mat.GetArray(0, 0, cannyImageData);
// create Color32 array that can be assigned to Texture2D directly
Color32[] c = new Color32[imHeight * imWidth];
// parallel for loop
Parallel.For(0, imHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
byte vec = cannyImageData[j + i * imWidth];
var color32 = new Color32
{
r = vec,
g = vec,
b = vec,
a = 0
};
c[j + i * imWidth] = color32;
}
});
Texture2D texture = new Texture2D(imWidth, imHeight, TextureFormat.RGBA32, true, true);
texture.SetPixels32(c);
// to update the texture, OpenGL manner
texture.Apply();
return texture;
}
}