Moved OpenCV Helper to its own Java File
This commit is contained in:
parent
057c7fc139
commit
6954d38143
@ -3,6 +3,7 @@ package com.example.ueberwachungssystem.Detection;
|
|||||||
import android.os.CountDownTimer;
|
import android.os.CountDownTimer;
|
||||||
|
|
||||||
import androidx.annotation.NonNull;
|
import androidx.annotation.NonNull;
|
||||||
|
import androidx.camera.core.ExperimentalGetImage;
|
||||||
|
|
||||||
|
|
||||||
abstract public class Detector {
|
abstract public class Detector {
|
||||||
|
@ -0,0 +1,109 @@
|
|||||||
|
package com.example.ueberwachungssystem.Detection;
|
||||||
|
|
||||||
|
import android.graphics.Bitmap;
|
||||||
|
import android.media.Image;
|
||||||
|
import android.widget.ImageView;
|
||||||
|
|
||||||
|
import androidx.annotation.NonNull;
|
||||||
|
import androidx.camera.core.ExperimentalGetImage;
|
||||||
|
import androidx.camera.core.ImageProxy;
|
||||||
|
|
||||||
|
import org.opencv.android.Utils;
|
||||||
|
import org.opencv.core.Core;
|
||||||
|
import org.opencv.core.CvType;
|
||||||
|
import org.opencv.core.Mat;
|
||||||
|
import org.opencv.core.MatOfPoint;
|
||||||
|
import org.opencv.core.Scalar;
|
||||||
|
import org.opencv.core.Size;
|
||||||
|
import org.opencv.imgproc.Imgproc;
|
||||||
|
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
@ExperimentalGetImage
|
||||||
|
public class OpenCVHelper {
|
||||||
|
|
||||||
|
/** OpenCV helper methods **/
|
||||||
|
public static Mat addGaussianBlur(Mat inputMat, Size kernelSize){
|
||||||
|
Mat outputMat = new Mat();
|
||||||
|
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0);
|
||||||
|
return outputMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mat addBlur(Mat inputMat, Size kernelSize){
|
||||||
|
Mat outputMat = new Mat();
|
||||||
|
Imgproc.blur(inputMat, outputMat, kernelSize);
|
||||||
|
return outputMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
|
||||||
|
Image img = imgProxy.getImage();
|
||||||
|
|
||||||
|
assert img != null;
|
||||||
|
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
|
||||||
|
byte[] yData = new byte[yBuffer.remaining()];
|
||||||
|
yBuffer.get(yData);
|
||||||
|
|
||||||
|
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
|
||||||
|
yMat.put(0, 0, yData);
|
||||||
|
|
||||||
|
return yMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){
|
||||||
|
Mat diffImage = new Mat();
|
||||||
|
Core.absdiff(inputMat, previousImage, diffImage);
|
||||||
|
Mat binaryMat = new Mat();
|
||||||
|
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY);
|
||||||
|
return binaryMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
|
||||||
|
List<MatOfPoint> contours = new ArrayList<>();
|
||||||
|
Mat hierarchy = new Mat();
|
||||||
|
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||||
|
|
||||||
|
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0));
|
||||||
|
// Iterate over the contours and draw only the larger contours on the outputMat
|
||||||
|
for (MatOfPoint contour : contours) {
|
||||||
|
double contourArea = Imgproc.contourArea(contour);
|
||||||
|
if (contourArea > areaThreshold) {
|
||||||
|
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Apply the outputMat as a mask to the dilatedImage
|
||||||
|
Mat maskedImage = new Mat();
|
||||||
|
inputMat.copyTo(maskedImage, outputMat);
|
||||||
|
return outputMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){
|
||||||
|
Mat dilatedMat = new Mat();
|
||||||
|
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize);
|
||||||
|
Imgproc.dilate(inputMat, dilatedMat, kernel);
|
||||||
|
return dilatedMat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int countNonZeroPixels(Mat inputImage) {
|
||||||
|
if (inputImage != null)
|
||||||
|
return Core.countNonZero(inputImage);
|
||||||
|
else
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static void debugMat(Mat mat, ImageView imageView) {
|
||||||
|
if (imageView == null || mat == null)
|
||||||
|
return;
|
||||||
|
|
||||||
|
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
|
||||||
|
Utils.matToBitmap(mat, bitmap);
|
||||||
|
|
||||||
|
// Display the bitmap in an ImageView
|
||||||
|
imageView.setImageBitmap(bitmap);
|
||||||
|
}
|
||||||
|
}
|
@ -5,7 +5,6 @@ import android.annotation.SuppressLint;
|
|||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.graphics.Bitmap;
|
|
||||||
import android.graphics.ImageFormat;
|
import android.graphics.ImageFormat;
|
||||||
import android.media.Image;
|
import android.media.Image;
|
||||||
import android.os.CountDownTimer;
|
import android.os.CountDownTimer;
|
||||||
@ -31,20 +30,12 @@ import androidx.lifecycle.LifecycleOwner;
|
|||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
|
||||||
import org.opencv.android.OpenCVLoader;
|
import org.opencv.android.OpenCVLoader;
|
||||||
import org.opencv.android.Utils;
|
|
||||||
import org.opencv.core.Core;
|
|
||||||
import org.opencv.core.CvType;
|
|
||||||
import org.opencv.core.Mat;
|
import org.opencv.core.Mat;
|
||||||
import org.opencv.core.MatOfPoint;
|
|
||||||
import org.opencv.core.Scalar;
|
|
||||||
import org.opencv.core.Size;
|
import org.opencv.core.Size;
|
||||||
import org.opencv.imgproc.Imgproc;
|
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.nio.ByteBuffer;
|
import java.time.LocalDateTime;
|
||||||
import java.util.ArrayList;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
|
|
||||||
@ -109,10 +100,13 @@ public class VideoDetector extends Detector {
|
|||||||
this.preview = new Preview.Builder().build();
|
this.preview = new Preview.Builder().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get State of the Detector */
|
/** Get States */
|
||||||
public boolean isRunning() {
|
public boolean isDetecting() {
|
||||||
return isDetecting;
|
return isDetecting;
|
||||||
}
|
}
|
||||||
|
public boolean isRecording(){
|
||||||
|
return isRecording;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/** Starts the Video Detection */
|
/** Starts the Video Detection */
|
||||||
@ -190,6 +184,7 @@ public class VideoDetector extends Detector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** Setup Use Cases */
|
||||||
private ImageAnalysis setupImageAnalysis() {
|
private ImageAnalysis setupImageAnalysis() {
|
||||||
// Configure and create Image Analysis
|
// Configure and create Image Analysis
|
||||||
ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
|
ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
|
||||||
@ -234,7 +229,6 @@ public class VideoDetector extends Detector {
|
|||||||
extendViolation();
|
extendViolation();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return On Request Permissions
|
// Return On Request Permissions
|
||||||
if (!hasPermissions()) {
|
if (!hasPermissions()) {
|
||||||
getPermissions();
|
getPermissions();
|
||||||
@ -248,7 +242,7 @@ public class VideoDetector extends Detector {
|
|||||||
isRecording = true;
|
isRecording = true;
|
||||||
bindCameraProvider(UseCase.VideoCapture);
|
bindCameraProvider(UseCase.VideoCapture);
|
||||||
|
|
||||||
File vidFile = new File(context.getFilesDir() + "/" + outputName);
|
File vidFile = new File(context.getFilesDir() + "/" + generateFileName() + ".mp4");
|
||||||
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -268,7 +262,7 @@ public class VideoDetector extends Detector {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} catch (ExecutionException | InterruptedException e) {}
|
} catch (ExecutionException | InterruptedException ignored) {}
|
||||||
}, ContextCompat.getMainExecutor(context));
|
}, ContextCompat.getMainExecutor(context));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,115 +321,12 @@ public class VideoDetector extends Detector {
|
|||||||
this.preview.setSurfaceProvider(previewView.getSurfaceProvider());
|
this.preview.setSurfaceProvider(previewView.getSurfaceProvider());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String generateFileName(){
|
||||||
|
// Get the current timestamp
|
||||||
private static class OpenCVHelper{
|
LocalDateTime currentTime = LocalDateTime.now();
|
||||||
private OpenCVHelper() {}
|
// Define the format for the timestamp
|
||||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss");
|
||||||
/** OpenCV helper methods **/
|
// Return the timestamp as a string
|
||||||
private static Mat addGaussianBlur(Mat inputMat, Size kernelSize){
|
return currentTime.format(formatter);
|
||||||
Mat outputMat = new Mat();
|
|
||||||
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0);
|
|
||||||
return outputMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Mat addBlur(Mat inputMat, Size kernelSize){
|
|
||||||
Mat outputMat = new Mat();
|
|
||||||
Imgproc.blur(inputMat, outputMat, kernelSize);
|
|
||||||
return outputMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
|
|
||||||
Image img = imgProxy.getImage();
|
|
||||||
|
|
||||||
assert img != null;
|
|
||||||
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
|
|
||||||
byte[] yData = new byte[yBuffer.remaining()];
|
|
||||||
yBuffer.get(yData);
|
|
||||||
|
|
||||||
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
|
|
||||||
yMat.put(0, 0, yData);
|
|
||||||
|
|
||||||
return yMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){
|
|
||||||
Mat diffImage = new Mat();
|
|
||||||
Core.absdiff(inputMat, previousImage, diffImage);
|
|
||||||
Mat binaryMat = new Mat();
|
|
||||||
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY);
|
|
||||||
return binaryMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) {
|
|
||||||
// Step 1: Extract the image data from ImageProxy
|
|
||||||
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();
|
|
||||||
ByteBuffer yBuffer = planes[0].getBuffer();
|
|
||||||
byte[] yData = new byte[yBuffer.remaining()];
|
|
||||||
yBuffer.get(yData);
|
|
||||||
|
|
||||||
// Step 2: Convert the image data to NV21 format
|
|
||||||
int width = imageProxy.getWidth();
|
|
||||||
int height = imageProxy.getHeight();
|
|
||||||
byte[] nv21Data = new byte[width * height * 3 / 2];
|
|
||||||
// Assuming the image format is YUV_420_888
|
|
||||||
System.arraycopy(yData, 0, nv21Data, 0, yData.length);
|
|
||||||
for (int i = yData.length; i < nv21Data.length; i += 2) {
|
|
||||||
nv21Data[i] = yData[i + 1];
|
|
||||||
nv21Data[i + 1] = yData[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3: Create a grayscale Mat from the NV21 data
|
|
||||||
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1);
|
|
||||||
grayscaleMat.put(0, 0, nv21Data);
|
|
||||||
|
|
||||||
return grayscaleMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
|
|
||||||
List<MatOfPoint> contours = new ArrayList<>();
|
|
||||||
Mat hierarchy = new Mat();
|
|
||||||
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
|
|
||||||
|
|
||||||
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0));
|
|
||||||
// Iterate over the contours and draw only the larger contours on the outputMat
|
|
||||||
for (MatOfPoint contour : contours) {
|
|
||||||
double contourArea = Imgproc.contourArea(contour);
|
|
||||||
if (contourArea > areaThreshold) {
|
|
||||||
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Apply the outputMat as a mask to the dilatedImage
|
|
||||||
Mat maskedImage = new Mat();
|
|
||||||
inputMat.copyTo(maskedImage, outputMat);
|
|
||||||
return outputMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){
|
|
||||||
Mat dilatedMat = new Mat();
|
|
||||||
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize);
|
|
||||||
Imgproc.dilate(inputMat, dilatedMat, kernel);
|
|
||||||
return dilatedMat;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int countNonZeroPixels(Mat inputImage) {
|
|
||||||
if (inputImage != null)
|
|
||||||
return Core.countNonZero(inputImage);
|
|
||||||
else
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static void debugMat(Mat mat, ImageView imageView) {
|
|
||||||
if (imageView == null || mat == null)
|
|
||||||
return;
|
|
||||||
|
|
||||||
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
|
|
||||||
Utils.matToBitmap(mat, bitmap);
|
|
||||||
|
|
||||||
// Display the bitmap in an ImageView
|
|
||||||
imageView.setImageBitmap(bitmap);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -11,6 +11,7 @@ import android.view.View;
|
|||||||
import android.widget.ImageView;
|
import android.widget.ImageView;
|
||||||
import android.widget.ToggleButton;
|
import android.widget.ToggleButton;
|
||||||
|
|
||||||
|
import com.example.ueberwachungssystem.Detection.AudioRecorder;
|
||||||
import com.example.ueberwachungssystem.Detection.DetectionReport;
|
import com.example.ueberwachungssystem.Detection.DetectionReport;
|
||||||
import com.example.ueberwachungssystem.Detection.Detector;
|
import com.example.ueberwachungssystem.Detection.Detector;
|
||||||
import com.example.ueberwachungssystem.Detection.VideoDetector;
|
import com.example.ueberwachungssystem.Detection.VideoDetector;
|
||||||
@ -41,6 +42,9 @@ public class MainActivity extends AppCompatActivity {
|
|||||||
vd.startDetection();
|
vd.startDetection();
|
||||||
|
|
||||||
|
|
||||||
|
AudioRecorder audioRecorder = new AudioRecorder(this);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ToggleButton toggleButton = findViewById(R.id.toggleButton);
|
ToggleButton toggleButton = findViewById(R.id.toggleButton);
|
||||||
toggleButton.setOnClickListener(new View.OnClickListener() {
|
toggleButton.setOnClickListener(new View.OnClickListener() {
|
||||||
@ -50,10 +54,12 @@ public class MainActivity extends AppCompatActivity {
|
|||||||
{
|
{
|
||||||
//vd.startDetection();
|
//vd.startDetection();
|
||||||
vd.startRecording();
|
vd.startRecording();
|
||||||
|
audioRecorder.startRecording();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
//vd.stopDetection();
|
//vd.stopDetection();
|
||||||
vd.stopRecording();
|
vd.stopRecording();
|
||||||
|
audioRecorder.stopRecording();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
Loading…
x
Reference in New Issue
Block a user