Browse Source

Removed Preview Use Case, Audio still not working

bk_video_test
Bastian Kohler 1 year ago
parent
commit
29c193b716

+ 7
- 7
app/src/main/java/com/example/ueberwachungssystem/Detection/Detector.java View File

abstract public class Detector { abstract public class Detector {
private OnDetectionListener listener; private OnDetectionListener listener;
private boolean isDetecting = false; private boolean isDetecting = false;
private boolean extendDetection = false;
private boolean extendViolation = false;


// Countdown parameters // Countdown parameters
private final int COUNTDOWN_TIME = 5000; // milliseconds private final int COUNTDOWN_TIME = 5000; // milliseconds
listener.onDetection(detectionReport); listener.onDetection(detectionReport);
startDetectionTimer(detectionType, amplitude); startDetectionTimer(detectionType, amplitude);
} else { } else {
extendDetection = true;
extendViolation = true;
} }
} else { } else {
isDetecting = false; isDetecting = false;
extendDetection = false;
extendViolation = false;
} }
} }


new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) { new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) {
@Override @Override
public void onTick(long millisUntilFinished) { public void onTick(long millisUntilFinished) {
if (extendDetection) {
extendDetection = false;
if (extendViolation) {
extendViolation = false;
startDetectionTimer(detectionType, amplitude); startDetectionTimer(detectionType, amplitude);
this.cancel(); this.cancel();
} }
}.start(); }.start();
} }


public void extendDetection(){
this.extendDetection = true;
public void extendViolation(){
this.extendViolation = true;
} }


/** Starts Detection (abstract method: needs to be overridden in child class) */ /** Starts Detection (abstract method: needs to be overridden in child class) */

+ 45
- 25
app/src/main/java/com/example/ueberwachungssystem/Detection/VideoDetector.java View File

import androidx.camera.core.CameraSelector; import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage; import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis; import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview; import androidx.camera.core.Preview;
import androidx.camera.core.VideoCapture; import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider; import androidx.camera.lifecycle.ProcessCameraProvider;
bindCameraProvider(UseCase.ImageAnalysis); bindCameraProvider(UseCase.ImageAnalysis);
} catch (ExecutionException | InterruptedException e) {} } catch (ExecutionException | InterruptedException e) {}
}, ContextCompat.getMainExecutor(context)); }, ContextCompat.getMainExecutor(context));

startViolationTimer();
} }


/** Stops the Video Detection */ /** Stops the Video Detection */
if (!isDetecting || imageAnalysis == null) if (!isDetecting || imageAnalysis == null)
return; return;
cameraProvider.unbind(imageAnalysis); cameraProvider.unbind(imageAnalysis);
cameraProvider.unbind(preview);
isDetecting = false; isDetecting = false;
allowReportViolation = false; allowReportViolation = false;
} }
private void bindCameraProvider(UseCase useCase) { private void bindCameraProvider(UseCase useCase) {
// Specify which Camera to use // Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
if(useCase == UseCase.ImageAnalysis && !cameraProvider.isBound(videoCapture)) {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview);
startViolationTimer();
}
if(useCase == UseCase.VideoCapture) {
if(cameraProvider.isBound(imageAnalysis)) {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
} else {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, videoCapture);
}
}
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
} }




assert image != null; assert image != null;


// Violation Handling // Violation Handling
Mat processed = processImage(image);
Mat processed = processImage(imageProxy);
int n = OpenCVHelper.countNonZeroPixels(processed); int n = OpenCVHelper.countNonZeroPixels(processed);
int pixelCount = image.getWidth() * image.getHeight(); int pixelCount = image.getWidth() * image.getHeight();
float percentChanged = (float) n / pixelCount; float percentChanged = (float) n / pixelCount;
public void startRecording() { public void startRecording() {
// Check States // Check States
if (isRecording){ if (isRecording){
extendDetection();
extendViolation();
return; return;
} }


new VideoCapture.OnVideoSavedCallback() { new VideoCapture.OnVideoSavedCallback() {
@Override @Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) { public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show(); Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show();
} }
@Override @Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) { public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show(); Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show();
} }
} }
public void stopRecording(){ public void stopRecording(){
videoCapture.stopRecording(); videoCapture.stopRecording();
cameraProvider.unbind(videoCapture); cameraProvider.unbind(videoCapture);
if (isDetecting)
bindCameraProvider(UseCase.ImageAnalysis);
isRecording = false;
} }


/** Process Image to be used for Motion Detection */ /** Process Image to be used for Motion Detection */
private Mat processImage(Image image){
private Mat processImage(ImageProxy imageProxy){
if (imageProxy == null)
return null;

// Image Transformation // Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(image);
Mat imageMat = OpenCVHelper.extractYChannel(imageProxy);


// Show Input Image // Show Input Image
if (inputImageView != null) if (inputImageView != null)









private static class OpenCVHelper{ private static class OpenCVHelper{
private OpenCVHelper() {} private OpenCVHelper() {}


return outputMat; return outputMat;
} }


private static Mat extractYChannel(@NonNull Image img) {
private static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
Image img = imgProxy.getImage();

assert img != null;
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()]; byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData); yBuffer.get(yData);
return binaryMat; return binaryMat;
} }


private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) {
// Step 1: Extract the image data from ImageProxy
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);

// Step 2: Convert the image data to NV21 format
int width = imageProxy.getWidth();
int height = imageProxy.getHeight();
byte[] nv21Data = new byte[width * height * 3 / 2];
// Assuming the image format is YUV_420_888
System.arraycopy(yData, 0, nv21Data, 0, yData.length);
for (int i = yData.length; i < nv21Data.length; i += 2) {
nv21Data[i] = yData[i + 1];
nv21Data[i + 1] = yData[i];
}

// Step 3: Create a grayscale Mat from the NV21 data
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1);
grayscaleMat.put(0, 0, nv21Data);

return grayscaleMat;
}


private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){ private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>(); List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat(); Mat hierarchy = new Mat();

+ 9
- 1
app/src/main/java/com/example/ueberwachungssystem/MainActivity.java View File

import androidx.camera.view.PreviewView; import androidx.camera.view.PreviewView;


import android.os.Bundle; import android.os.Bundle;
import android.util.Log;
import android.view.View; import android.view.View;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.ToggleButton; import android.widget.ToggleButton;




VideoDetector vd = new VideoDetector(this); VideoDetector vd = new VideoDetector(this);
vd.setPreviewView(previewView);
//vd.setPreviewView(previewView);
vd.debugProcessing(inputImageView, outputImageView); vd.debugProcessing(inputImageView, outputImageView);
vd.setOnDetectionListener(new Detector.OnDetectionListener() {
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
Log.d("onDetection", detectionReport.toString());
}
});
vd.startDetection();







Loading…
Cancel
Save