Fixed bug in Detector and VideoDetector

This commit is contained in:
Bastian Kohler 2023-06-17 11:20:56 +02:00
parent fb8bdcd895
commit d32e0a11f5
5 changed files with 211 additions and 158 deletions

View File

@ -5,6 +5,8 @@
<uses-feature android:name="android.hardware.camera"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"

View File

@ -11,8 +11,8 @@ abstract public class Detector {
private boolean extendDetection = false;
// Countdown parameters
private int COUNTDOWN_TIME = 3000; // milliseconds
private int COUNTDOWN_POLLING_TIME = 100; // milliseconds
private final int COUNTDOWN_TIME = 5000; // milliseconds
private final int COUNTDOWN_POLLING_TIME = 100; // milliseconds
/** Constructor - takes context of current activity */
public Detector() {}
@ -40,7 +40,6 @@ abstract public class Detector {
} else {
isDetecting = false;
extendDetection = false;
throw new IllegalStateException("No listener set for violation reporting");
}
}

View File

@ -1,6 +1,7 @@
package com.example.ueberwachungssystem.Detection;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
@ -9,13 +10,17 @@ import android.graphics.ImageFormat;
import android.media.Image;
import android.os.CountDownTimer;
import android.util.Log;
import android.view.Surface;
import android.widget.ImageView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.Preview;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
@ -33,6 +38,7 @@ import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
@ -52,58 +58,56 @@ public class VideoDetector extends Detector {
// Calling Activity
private final Context context;
// Permission handling
private static final int CAMERA_PERMISSION_REQUEST_CODE = 3691;
// Camera Provider
private ProcessCameraProvider cameraProvider;
private final ImageAnalysis imageAnalysis;
private final VideoCapture videoCapture;
// Logic
private boolean isDetectionRunning = false;
private boolean allowReportViolation = false;
// Image Processing
private Mat previousImage = null;
// Debugging
private Mat previousImage = null;
public ImageView imageView1 = null;
public ImageView imageView2 = null;
private ImageView inputImageView = null;
private ImageView outputImageView = null;
// Parameters
private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed
private static final long START_DELAY = 1000; // milliseconds
private static final long START_DELAY = 5000; // milliseconds
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480);
/**
* Constructor
* @param context: the context of calling activity (usually "this")
* */
/** Constructor */
public VideoDetector(Context context) {
super();
this.context = context;
this.imageAnalysis = setupImageAnalysis();
this.videoCapture = setupVideoCapture();
}
/**
* Get State of the Detector
*/
/** Get State of the Detector */
public boolean isRunning() {
return isDetectionRunning;
}
/**
* Starts the Video Detection
* */
/** Starts the Video Detection */
@Override
public void startDetection() {
// Check States
if (isDetectionRunning)
return;
if (!isCameraAccessAllowed()){
if (!isCameraAccessAllowed()) {
getCameraAccess();
}
// Open CV startup check
if (!OpenCVLoader.initDebug()) {
Log.e("OpenCV", "Unable to load OpenCV!");
@ -111,35 +115,31 @@ public class VideoDetector extends Detector {
} else
Log.d("OpenCV", "OpenCV loaded Successfully!");
// Request Camera Provider
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);
//Check for Camera availability
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
bindAnalysis(cameraProvider);
bindCameraProvider(cameraProvider);
isDetectionRunning = true;
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future. This should never be reached.
}
},ContextCompat.getMainExecutor(context));
}, ContextCompat.getMainExecutor(context));
}
/**
* Stops the Video Detection
* */
/** Stops the Video Detection */
@Override
public void stopDetection() {
if (!isDetectionRunning)
if (!isDetectionRunning || imageAnalysis == null)
return;
cameraProvider.unbindAll();
cameraProvider.unbind(imageAnalysis);
isDetectionRunning = false;
allowReportViolation = false;
}
/**
* Permission handling
*/
/** Permission handling */
private boolean isCameraAccessAllowed() {
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
}
@ -150,70 +150,88 @@ public class VideoDetector extends Detector {
}
/**
* Binds the Luminosity Analyzer (configure and run Analysis)
* @param cameraProvider: CameraProvider of Context passed by Constructor
* */
private void bindAnalysis(@NonNull ProcessCameraProvider cameraProvider) {
/** Binds the Luminosity Analyzer (configure and run Analysis) */
private void bindCameraProvider(@NonNull ProcessCameraProvider cameraProvider) {
// Create Preview
//Preview preview = new Preview.Builder().build();
// Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis);
// Delay till violation is allowed
startViolationTimer();
}
/** Start delay until Violation Report is allowed */
private void startViolationTimer() {
new CountDownTimer((long) (START_DELAY), 100) {
@Override
public void onTick(long millisUntilFinished) {
}
@Override
public void onFinish() {
allowReportViolation = true;
}
}.start();
}
private ImageAnalysis setupImageAnalysis() {
// Configure and create Image Analysis
ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
builder.setTargetResolution(IMAGE_RES);
builder.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST);
builder.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888);
ImageAnalysis imageAnalysis = builder.build();
// Set Analyzer
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(context), imageProxy -> {
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) {
Image image = imageProxy.getImage();
assert image != null;
Mat currentMat = extractYChannel(image);
Mat mat = currentMat.clone();
debugMat(mat, imageView2);
Mat processed = processImage(mat);
debugMat(processed, imageView1);
int n = countNonZeroPixels(processed);
// Violation Handling
Mat processed = processImage(image);
int n = OpenCVHelper.countNonZeroPixels(processed);
int pixelCount = image.getWidth() * image.getHeight();
float percentChanged = (float)n / pixelCount;
// report violation
if (percentChanged * 100 > ALARM_THRESHOLD) {
reportViolation("Video", n);
float percentChanged = (float) n / pixelCount;
// Violation Condition
if (percentChanged * 100 > ALARM_THRESHOLD) {
if (allowReportViolation)
reportViolation("Video", n);
}
}
imageProxy.close();
});
// Create Preview
Preview preview = new Preview.Builder().build();
// Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
// Delay till start
new CountDownTimer((long)(START_DELAY), 100){
@Override
public void onTick(long millisUntilFinished) {}
@Override
public void onFinish() {
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview);
}
}.start();
return imageAnalysis;
}
@SuppressLint("RestrictedApi")
private VideoCapture setupVideoCapture() {
return new VideoCapture.Builder()
.setVideoFrameRate(30)
.setTargetRotation(Surface.ROTATION_0)
.build();
}
/**
* Process Image to be used for Motion Detection
*
* @param image: OpenCV Mat file that should be processed
*/
private Mat processImage(Mat image){
/** Process Image to be used for Motion Detection */
private Mat processImage(Image image){
// Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(image);
// Show Input Image
if (inputImageView != null)
OpenCVHelper.debugMat(imageMat, inputImageView);
// Preprocess Image
Mat preprocessed = image.clone();
preprocessed = addGaussianBlur(preprocessed, new Size(21, 21));
preprocessed = addBlur(preprocessed, new Size(3, 3));
Mat preprocessed = imageMat;
preprocessed = OpenCVHelper.addGaussianBlur(preprocessed, new Size(21, 21));
preprocessed = OpenCVHelper.addBlur(preprocessed, new Size(3, 3));
if (previousImage == null) {
previousImage = preprocessed;
@ -222,91 +240,111 @@ public class VideoDetector extends Detector {
// Process Image
Mat processed = preprocessed.clone();
processed = thresholdPixels(processed, previousImage, 25);
processed = dilateBinaryMat(processed, new Size(3,3));
processed = dilateBinaryMat(processed, new Size(3,3));
processed = thresholdContourArea(processed, 500);
processed = OpenCVHelper.thresholdPixels(processed, previousImage, 25);
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3));
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3));
processed = OpenCVHelper.thresholdContourArea(processed, 500);
// Output
previousImage = preprocessed.clone();
// Show Output Image
if (outputImageView != null)
OpenCVHelper.debugMat(processed, outputImageView);
return processed;
}
/** OpenCV helper methods **/
private Mat addGaussianBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0);
return outputMat;
public void debugProcessing(ImageView inputImageView, ImageView outputImageView){
this.inputImageView = inputImageView;
this.outputImageView = outputImageView;
}
private Mat addBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.blur(inputMat, outputMat, kernelSize);
return outputMat;
}
private Mat extractYChannel(@NonNull Image img) {
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
yMat.put(0, 0, yData);
return yMat;
}
private Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){
Mat diffImage = new Mat();
Core.absdiff(inputMat, previousImage, diffImage);
Mat binaryMat = new Mat();
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY);
return binaryMat;
}
private Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0));
// Iterate over the contours and draw only the larger contours on the outputMat
for (MatOfPoint contour : contours) {
double contourArea = Imgproc.contourArea(contour);
if (contourArea > areaThreshold) {
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1);
}
private static class OpenCVHelper{
private OpenCVHelper() {}
/** OpenCV helper methods **/
private static Mat addGaussianBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0);
return outputMat;
}
// Apply the outputMat as a mask to the dilatedImage
Mat maskedImage = new Mat();
inputMat.copyTo(maskedImage, outputMat);
return outputMat;
}
private Mat dilateBinaryMat(Mat inputMat, Size kernelSize){
Mat dilatedMat = new Mat();
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize);
Imgproc.dilate(inputMat, dilatedMat, kernel);
return dilatedMat;
}
private static Mat addBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.blur(inputMat, outputMat, kernelSize);
return outputMat;
}
private int countNonZeroPixels(Mat inputImage) {
if (inputImage != null)
return Core.countNonZero(inputImage);
else
return 0;
}
private static Mat extractYChannel(@NonNull Image img) {
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
yMat.put(0, 0, yData);
return yMat;
}
private static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){
Mat diffImage = new Mat();
Core.absdiff(inputMat, previousImage, diffImage);
Mat binaryMat = new Mat();
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY);
return binaryMat;
}
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0));
// Iterate over the contours and draw only the larger contours on the outputMat
for (MatOfPoint contour : contours) {
double contourArea = Imgproc.contourArea(contour);
if (contourArea > areaThreshold) {
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1);
}
}
// Apply the outputMat as a mask to the dilatedImage
Mat maskedImage = new Mat();
inputMat.copyTo(maskedImage, outputMat);
return outputMat;
}
private static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){
Mat dilatedMat = new Mat();
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize);
Imgproc.dilate(inputMat, dilatedMat, kernel);
return dilatedMat;
}
private static int countNonZeroPixels(Mat inputImage) {
if (inputImage != null)
return Core.countNonZero(inputImage);
else
return 0;
}
private static void debugMat(Mat mat, ImageView imageView) {
if (imageView == null || mat == null)
return;
private void debugMat(Mat mat, ImageView imageView) {
if (imageView == null || mat == null)
return;
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(mat, bitmap);
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(mat, bitmap);
// Display the bitmap in an ImageView
imageView.setImageBitmap(bitmap);
// Display the bitmap in an ImageView
imageView.setImageBitmap(bitmap);
}
}
}

View File

@ -2,18 +2,11 @@ package com.example.ueberwachungssystem;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.camera.core.ExperimentalGetImage;
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.example.ueberwachungssystem.Detection.DetectionReport;
@ -32,19 +25,34 @@ public class MainActivity extends AppCompatActivity {
setContentView(R.layout.activity_main);
ImageView imageView = findViewById(R.id.imageView);
ImageView ogiv = findViewById(R.id.ogiv);
ImageView inputImageView = findViewById(R.id.inputImageView);
ImageView outputImageView = findViewById(R.id.outputImageView);
VideoDetector vd = new VideoDetector(this);
vd.imageView1 = imageView;
vd.imageView2 = ogiv;
vd.debugProcessing(inputImageView, outputImageView);
vd.setOnDetectionListener(new Detector.OnDetectionListener(){
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
detectionReport.log("OnDetection");
}
});
vd.startDetection();
ToggleButton toggleButton = findViewById(R.id.toggleButton);
toggleButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (toggleButton.isChecked())
{
vd.startDetection();
}
else {
vd.stopDetection();
}
}
});
}
}

View File

@ -16,14 +16,20 @@
android:layout_height="1dp"
android:backgroundTint="@android:color/black"/>
<ToggleButton
android:id="@+id/toggleButton"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="ToggleButton" />
<ImageView
android:id="@+id/ogiv"
android:id="@+id/inputImageView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
tools:srcCompat="@tools:sample/avatars" />
<ImageView
android:id="@+id/imageView"
android:id="@+id/outputImageView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
tools:srcCompat="@tools:sample/avatars" />