Removed Preview Use Case, Audio still not working

This commit is contained in:
Bastian Kohler 2023-06-18 10:55:21 +02:00
parent 76442888f7
commit 29c193b716
3 changed files with 61 additions and 33 deletions

View File

@ -8,7 +8,7 @@ import androidx.annotation.NonNull;
abstract public class Detector { abstract public class Detector {
private OnDetectionListener listener; private OnDetectionListener listener;
private boolean isDetecting = false; private boolean isDetecting = false;
private boolean extendDetection = false; private boolean extendViolation = false;
// Countdown parameters // Countdown parameters
private final int COUNTDOWN_TIME = 5000; // milliseconds private final int COUNTDOWN_TIME = 5000; // milliseconds
@ -35,11 +35,11 @@ abstract public class Detector {
listener.onDetection(detectionReport); listener.onDetection(detectionReport);
startDetectionTimer(detectionType, amplitude); startDetectionTimer(detectionType, amplitude);
} else { } else {
extendDetection = true; extendViolation = true;
} }
} else { } else {
isDetecting = false; isDetecting = false;
extendDetection = false; extendViolation = false;
} }
} }
@ -48,8 +48,8 @@ abstract public class Detector {
new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) { new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) {
@Override @Override
public void onTick(long millisUntilFinished) { public void onTick(long millisUntilFinished) {
if (extendDetection) { if (extendViolation) {
extendDetection = false; extendViolation = false;
startDetectionTimer(detectionType, amplitude); startDetectionTimer(detectionType, amplitude);
this.cancel(); this.cancel();
} }
@ -63,8 +63,8 @@ abstract public class Detector {
}.start(); }.start();
} }
public void extendDetection(){ public void extendViolation(){
this.extendDetection = true; this.extendViolation = true;
} }
/** Starts Detection (abstract method: needs to be overridden in child class) */ /** Starts Detection (abstract method: needs to be overridden in child class) */

View File

@ -19,6 +19,7 @@ import androidx.annotation.Nullable;
import androidx.camera.core.CameraSelector; import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage; import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis; import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview; import androidx.camera.core.Preview;
import androidx.camera.core.VideoCapture; import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider; import androidx.camera.lifecycle.ProcessCameraProvider;
@ -140,6 +141,8 @@ public class VideoDetector extends Detector {
bindCameraProvider(UseCase.ImageAnalysis); bindCameraProvider(UseCase.ImageAnalysis);
} catch (ExecutionException | InterruptedException e) {} } catch (ExecutionException | InterruptedException e) {}
}, ContextCompat.getMainExecutor(context)); }, ContextCompat.getMainExecutor(context));
startViolationTimer();
} }
/** Stops the Video Detection */ /** Stops the Video Detection */
@ -148,7 +151,6 @@ public class VideoDetector extends Detector {
if (!isDetecting || imageAnalysis == null) if (!isDetecting || imageAnalysis == null)
return; return;
cameraProvider.unbind(imageAnalysis); cameraProvider.unbind(imageAnalysis);
cameraProvider.unbind(preview);
isDetecting = false; isDetecting = false;
allowReportViolation = false; allowReportViolation = false;
} }
@ -169,20 +171,8 @@ public class VideoDetector extends Detector {
private void bindCameraProvider(UseCase useCase) { private void bindCameraProvider(UseCase useCase) {
// Specify which Camera to use // Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
if(useCase == UseCase.ImageAnalysis && !cameraProvider.isBound(videoCapture)) { cameraProvider.unbindAll();
cameraProvider.unbindAll(); cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview);
startViolationTimer();
}
if(useCase == UseCase.VideoCapture) {
if(cameraProvider.isBound(imageAnalysis)) {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
} else {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, videoCapture);
}
}
} }
@ -214,7 +204,7 @@ public class VideoDetector extends Detector {
assert image != null; assert image != null;
// Violation Handling // Violation Handling
Mat processed = processImage(image); Mat processed = processImage(imageProxy);
int n = OpenCVHelper.countNonZeroPixels(processed); int n = OpenCVHelper.countNonZeroPixels(processed);
int pixelCount = image.getWidth() * image.getHeight(); int pixelCount = image.getWidth() * image.getHeight();
float percentChanged = (float) n / pixelCount; float percentChanged = (float) n / pixelCount;
@ -241,7 +231,7 @@ public class VideoDetector extends Detector {
public void startRecording() { public void startRecording() {
// Check States // Check States
if (isRecording){ if (isRecording){
extendDetection(); extendViolation();
return; return;
} }
@ -268,10 +258,12 @@ public class VideoDetector extends Detector {
new VideoCapture.OnVideoSavedCallback() { new VideoCapture.OnVideoSavedCallback() {
@Override @Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) { public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show(); Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show();
} }
@Override @Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) { public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show(); Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show();
} }
} }
@ -284,14 +276,16 @@ public class VideoDetector extends Detector {
public void stopRecording(){ public void stopRecording(){
videoCapture.stopRecording(); videoCapture.stopRecording();
cameraProvider.unbind(videoCapture); cameraProvider.unbind(videoCapture);
if (isDetecting) isRecording = false;
bindCameraProvider(UseCase.ImageAnalysis);
} }
/** Process Image to be used for Motion Detection */ /** Process Image to be used for Motion Detection */
private Mat processImage(Image image){ private Mat processImage(ImageProxy imageProxy){
if (imageProxy == null)
return null;
// Image Transformation // Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(image); Mat imageMat = OpenCVHelper.extractYChannel(imageProxy);
// Show Input Image // Show Input Image
if (inputImageView != null) if (inputImageView != null)
@ -335,9 +329,6 @@ public class VideoDetector extends Detector {
private static class OpenCVHelper{ private static class OpenCVHelper{
private OpenCVHelper() {} private OpenCVHelper() {}
@ -354,7 +345,10 @@ public class VideoDetector extends Detector {
return outputMat; return outputMat;
} }
private static Mat extractYChannel(@NonNull Image img) { private static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
Image img = imgProxy.getImage();
assert img != null;
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()]; byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData); yBuffer.get(yData);
@ -373,6 +367,32 @@ public class VideoDetector extends Detector {
return binaryMat; return binaryMat;
} }
private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) {
// Step 1: Extract the image data from ImageProxy
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);
// Step 2: Convert the image data to NV21 format
int width = imageProxy.getWidth();
int height = imageProxy.getHeight();
byte[] nv21Data = new byte[width * height * 3 / 2];
// Assuming the image format is YUV_420_888
System.arraycopy(yData, 0, nv21Data, 0, yData.length);
for (int i = yData.length; i < nv21Data.length; i += 2) {
nv21Data[i] = yData[i + 1];
nv21Data[i + 1] = yData[i];
}
// Step 3: Create a grayscale Mat from the NV21 data
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1);
grayscaleMat.put(0, 0, nv21Data);
return grayscaleMat;
}
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){ private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>(); List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat(); Mat hierarchy = new Mat();

View File

@ -6,6 +6,7 @@ import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.view.PreviewView; import androidx.camera.view.PreviewView;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log;
import android.view.View; import android.view.View;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.ToggleButton; import android.widget.ToggleButton;
@ -29,8 +30,15 @@ public class MainActivity extends AppCompatActivity {
VideoDetector vd = new VideoDetector(this); VideoDetector vd = new VideoDetector(this);
vd.setPreviewView(previewView); //vd.setPreviewView(previewView);
vd.debugProcessing(inputImageView, outputImageView); vd.debugProcessing(inputImageView, outputImageView);
vd.setOnDetectionListener(new Detector.OnDetectionListener() {
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
Log.d("onDetection", detectionReport.toString());
}
});
vd.startDetection();