using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using OpenCvSharp;
using System.Threading.Tasks;
public class ImageProcessor : MonoBehaviour
{
[Header("RenderTexture")]
[Tooltip("RenderTexture that will be passed to the LearningBrain.")]
public RenderTexture renderTextureCropped;
[Header("Debug Helper")]
[Tooltip("Reference to the MeshRenderer that will show the processed Image from Cozmo")]
public MeshRenderer processedImageRenderer;
[Tooltip("Reference to the MeshRenderer that will show the processed and cropped Image from Cozmo")]
public MeshRenderer processedImageRendererCropped;
///
/// Center of Gravity in the cropped canny image
///
public Point CenterOfGravity { get; private set; }
// OpenCVSharp parameters
private Mat videoSourceImage;
private Mat cannyImage;
private Texture2D processedTexture;
private Vec3b[] videoSourceImageData;
private byte[] cannyImageData;
private int imWidth = 320; // Width of the camera image from the virtual cozmo
private int imHeight = 240; // Height of the camera image from the virtual cozmo
private int croppedImHeight = 120; // Height of the cropped camera image from the virtual cozmo
private Camera textureCamera; // Virtual Cozmo camera
private Texture2D firstTexture;
private void Start()
{
// Get reference to the cozmo camera
textureCamera = GetComponent();
// Set image widths and heights based on the given RenderTextures
imWidth = textureCamera.targetTexture.width;
imHeight = textureCamera.targetTexture.height;
//croppedImHeight = renderTextureCropped.height;
//assign the processed targetTexture to the renderer to display the image
processedImageRenderer.material.mainTexture = textureCamera.targetTexture;
processedImageRendererCropped.material.mainTexture = renderTextureCropped;
// initialize video / image with given size
videoSourceImage = new Mat(imHeight, imWidth, MatType.CV_8UC3);
videoSourceImageData = new Vec3b[imHeight * imWidth];
cannyImage = new Mat(imHeight, imWidth, MatType.CV_8UC1);
cannyImageData = new byte[croppedImHeight * imWidth];
firstTexture = new Texture2D(imWidth, imHeight, TextureFormat.RGBA32, true, true);
processedTexture = new Texture2D(imWidth, croppedImHeight, TextureFormat.RGBA32, true, true);
}
/////
///// Gets called when a new image arrives from the camera this script lies on
/////
/////
/////
//private void OnRenderImage(RenderTexture source, RenderTexture destination)
//{
// Texture2D tex = RenderTextureToTexture2D(source);
// videoSourceImage = TextureToMat(tex);
// cannyImage = ProcessImage(videoSourceImage);
// cannyImage = CropImage(cannyImage);
// CenterOfGravityTest(cannyImage);
// processedTexture = MatToTexture(cannyImage);
// //processedTexture.Resize(80, 30);
// //processedTexture.Apply();
// //Graphics.Blit(processedTexture, destination);
// Graphics.Blit(processedTexture, renderTextureCropped);
//}
public void ProcessRenderTexture(RenderTexture source)
{
ProcessRenderTexture(source, source);
}
public void ProcessRenderTexture(RenderTexture source, RenderTexture target)
{
Texture2D tex = RenderTextureToTexture2D(source);
videoSourceImage = TextureToMat(tex);
cannyImage = ProcessImage(videoSourceImage);
cannyImage = CropImage(cannyImage);
CenterOfGravityTest(cannyImage);
processedTexture = MatToTexture(cannyImage);
//processedTexture.Resize(80, 30);
//processedTexture.Apply();
//Graphics.Blit(processedTexture, destination);
Graphics.Blit(processedTexture, target);
}
//public void ProcessRenderTexture_NEW(RenderTexture source, RenderTexture target)
//{
// RenderTextureToTexture2D_NEW(source);
// TextureToMat_NEW(firstTexture);
// ProcessImage_NEW(videoSourceImage);
// cannyImage = CropImage(cannyImage);
// CenterOfGravityTest(cannyImage);
// MatToTexture_NEW(cannyImage);
// //processedTexture.Resize(80, 30);
// //processedTexture.Apply();
// //Graphics.Blit(processedTexture, destination);
// Graphics.Blit(processedTexture, target);
//}
public void OnRenderImage(RenderTexture source, RenderTexture destination)
{
RenderTextureToTexture2D_NEW(source);
TextureToMat_NEW(firstTexture);
ProcessImage_NEW(videoSourceImage);
cannyImage = CropImage(cannyImage);
CenterOfGravityTest(cannyImage);
MatToTexture_NEW(cannyImage);
Graphics.Blit(processedTexture, destination);
Graphics.Blit(processedTexture, renderTextureCropped);
}
// Crop image to just see the middle of the original image
private Mat CropImage(Mat image)
{
//cut a fourth out of the top and bottom of the image
OpenCvSharp.Rect rectCroped = new OpenCvSharp.Rect(0, image.Height / 4, image.Width, image.Height / 2);
Mat croppedImage = new Mat(image, rectCroped);
return croppedImage;
}
//// Crop image to just see the middle of the original image
//private Mat CropImage_NEW(Mat image)
//{
// //cut a fourth out of the top and bottom of the image
// OpenCvSharp.Rect rectCroped = new OpenCvSharp.Rect(0, image.Height / 4, image.Width, image.Height / 2);
// Mat croppedImage = new Mat(image, rectCroped);
// return croppedImage;
//}
private Texture2D RenderTextureToTexture2D(RenderTexture rTex)
{
Texture2D tex = new Texture2D(imWidth, imHeight, TextureFormat.RGBA32, true, true);
RenderTexture.active = rTex;
tex.ReadPixels(new UnityEngine.Rect(0, 0, rTex.width, rTex.height), 0, 0);
tex.Apply();
return tex;
}
private void RenderTextureToTexture2D_NEW(RenderTexture rTex)
{
RenderTexture.active = rTex;
firstTexture.ReadPixels(new UnityEngine.Rect(0, 0, rTex.width, rTex.height), 0, 0);
firstTexture.Apply();
}
// Convert Unity Texture2D object to OpenCVSharp Mat object
private Mat TextureToMat(Texture2D source)
{
// Color32 array : r, g, b, a
Color32[] c = source.GetPixels32();
// Parallel for loop
// convert Color32 object to Vec3b object
// Vec3b is the representation of pixel for Mat
Parallel.For(0, imHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
var col = c[j + i * imWidth];
var vec3 = new Vec3b
{
Item0 = col.b,
Item1 = col.g,
Item2 = col.r
};
// set pixel to an array
videoSourceImageData[j + i * imWidth] = vec3;
}
});
// assign the Vec3b array to Mat
Mat tmpMat = new Mat(imHeight, imWidth, MatType.CV_8UC3);
tmpMat.SetArray(0, 0, videoSourceImageData);
return tmpMat;
}
// Convert Unity Texture2D object to OpenCVSharp Mat object
private void TextureToMat_NEW(Texture2D source)
{
// Color32 array : r, g, b, a
Color32[] c = source.GetPixels32();
// Parallel for loop
// convert Color32 object to Vec3b object
// Vec3b is the representation of pixel for Mat
Parallel.For(0, imHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
var col = c[j + i * imWidth];
var vec3 = new Vec3b
{
Item0 = col.b,
Item1 = col.g,
Item2 = col.r
};
// set pixel to an array
videoSourceImageData[j + i * imWidth] = vec3;
}
});
// assign the Vec3b array to Mat
videoSourceImage.SetArray(0, 0, videoSourceImageData);
}
// Simple example of canny edge detect
private Mat ProcessImage(Mat _image)
{
Mat cannyImg = new Mat();
Cv2.Canny(_image, cannyImg, 100, 100);
return cannyImg;
}
// Simple example of canny edge detect
private void ProcessImage_NEW(Mat _image)
{
Cv2.Canny(_image, cannyImage, 100, 100);
}
// Convert OpenCVSharp Mat object to Unity Texture2D object
private Texture2D MatToTexture(Mat mat)
{
// cannyImageData is byte array, because canny image is grayscale
mat.GetArray(0, 0, cannyImageData);
// create Color32 array that can be assigned to Texture2D directly
Color32[] c = new Color32[croppedImHeight * imWidth];
// parallel for loop
Parallel.For(0, croppedImHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
byte vec = cannyImageData[j + i * imWidth];
var color32 = new Color32
{
r = vec,
g = vec,
b = vec,
a = 0
};
c[j + i * imWidth] = color32;
}
});
Texture2D texture = new Texture2D(imWidth, croppedImHeight, TextureFormat.RGBA32, true, true);
texture.SetPixels32(c);
// to update the texture, OpenGL manner
texture.Apply();
return texture;
}
// Convert OpenCVSharp Mat object to Unity Texture2D object
private void MatToTexture_NEW(Mat mat)
{
// cannyImageData is byte array, because canny image is grayscale
mat.GetArray(0, 0, cannyImageData);
// create Color32 array that can be assigned to Texture2D directly
Color32[] c = new Color32[croppedImHeight * imWidth];
// parallel for loop
Parallel.For(0, croppedImHeight, i =>
{
for (var j = 0; j < imWidth; j++)
{
byte vec = cannyImageData[j + i * imWidth];
var color32 = new Color32
{
r = vec,
g = vec,
b = vec,
a = 0
};
c[j + i * imWidth] = color32;
}
});
processedTexture.SetPixels32(c);
// to update the texture, OpenGL manner
processedTexture.Apply();
}
private void CenterOfGravityTest(Mat processedImage)
{
// find moments of the image
Moments m = new Moments(processedImage, true);
CenterOfGravity = new Point(m.M10 / m.M00, m.M01 / m.M00);
#if UNITY_EDITOR
// show the image with a point mark at the centroid
Cv2.Circle(processedImage, CenterOfGravity, 5, new Scalar(128, 0, 0), -1);
Cv2.Flip(processedImage, processedImage, FlipMode.X);
Cv2.ImShow("Image with center", processedImage);
#endif
}
}