Removed Preview Use Case, Audio still not working

This commit is contained in:
Bastian Kohler 2023-06-18 10:55:21 +02:00
parent 76442888f7
commit 29c193b716
3 changed files with 61 additions and 33 deletions

View File

@ -8,7 +8,7 @@ import androidx.annotation.NonNull;
abstract public class Detector {
private OnDetectionListener listener;
private boolean isDetecting = false;
private boolean extendDetection = false;
private boolean extendViolation = false;
// Countdown parameters
private final int COUNTDOWN_TIME = 5000; // milliseconds
@ -35,11 +35,11 @@ abstract public class Detector {
listener.onDetection(detectionReport);
startDetectionTimer(detectionType, amplitude);
} else {
extendDetection = true;
extendViolation = true;
}
} else {
isDetecting = false;
extendDetection = false;
extendViolation = false;
}
}
@ -48,8 +48,8 @@ abstract public class Detector {
new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) {
@Override
public void onTick(long millisUntilFinished) {
if (extendDetection) {
extendDetection = false;
if (extendViolation) {
extendViolation = false;
startDetectionTimer(detectionType, amplitude);
this.cancel();
}
@ -63,8 +63,8 @@ abstract public class Detector {
}.start();
}
public void extendDetection(){
this.extendDetection = true;
public void extendViolation(){
this.extendViolation = true;
}
/** Starts Detection (abstract method: needs to be overridden in child class) */

View File

@ -19,6 +19,7 @@ import androidx.annotation.Nullable;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
@ -140,6 +141,8 @@ public class VideoDetector extends Detector {
bindCameraProvider(UseCase.ImageAnalysis);
} catch (ExecutionException | InterruptedException e) {}
}, ContextCompat.getMainExecutor(context));
startViolationTimer();
}
/** Stops the Video Detection */
@ -148,7 +151,6 @@ public class VideoDetector extends Detector {
if (!isDetecting || imageAnalysis == null)
return;
cameraProvider.unbind(imageAnalysis);
cameraProvider.unbind(preview);
isDetecting = false;
allowReportViolation = false;
}
@ -169,20 +171,8 @@ public class VideoDetector extends Detector {
private void bindCameraProvider(UseCase useCase) {
// Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
if(useCase == UseCase.ImageAnalysis && !cameraProvider.isBound(videoCapture)) {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview);
startViolationTimer();
}
if(useCase == UseCase.VideoCapture) {
if(cameraProvider.isBound(imageAnalysis)) {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
} else {
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, videoCapture);
}
}
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
}
@ -214,7 +204,7 @@ public class VideoDetector extends Detector {
assert image != null;
// Violation Handling
Mat processed = processImage(image);
Mat processed = processImage(imageProxy);
int n = OpenCVHelper.countNonZeroPixels(processed);
int pixelCount = image.getWidth() * image.getHeight();
float percentChanged = (float) n / pixelCount;
@ -241,7 +231,7 @@ public class VideoDetector extends Detector {
public void startRecording() {
// Check States
if (isRecording){
extendDetection();
extendViolation();
return;
}
@ -268,10 +258,12 @@ public class VideoDetector extends Detector {
new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show();
}
@Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show();
}
}
@ -284,14 +276,16 @@ public class VideoDetector extends Detector {
public void stopRecording(){
videoCapture.stopRecording();
cameraProvider.unbind(videoCapture);
if (isDetecting)
bindCameraProvider(UseCase.ImageAnalysis);
isRecording = false;
}
/** Process Image to be used for Motion Detection */
private Mat processImage(Image image){
private Mat processImage(ImageProxy imageProxy){
if (imageProxy == null)
return null;
// Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(image);
Mat imageMat = OpenCVHelper.extractYChannel(imageProxy);
// Show Input Image
if (inputImageView != null)
@ -335,9 +329,6 @@ public class VideoDetector extends Detector {
private static class OpenCVHelper{
private OpenCVHelper() {}
@ -354,7 +345,10 @@ public class VideoDetector extends Detector {
return outputMat;
}
private static Mat extractYChannel(@NonNull Image img) {
private static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
Image img = imgProxy.getImage();
assert img != null;
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);
@ -373,6 +367,32 @@ public class VideoDetector extends Detector {
return binaryMat;
}
private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) {
// Step 1: Extract the image data from ImageProxy
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();
ByteBuffer yBuffer = planes[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);
// Step 2: Convert the image data to NV21 format
int width = imageProxy.getWidth();
int height = imageProxy.getHeight();
byte[] nv21Data = new byte[width * height * 3 / 2];
// Assuming the image format is YUV_420_888
System.arraycopy(yData, 0, nv21Data, 0, yData.length);
for (int i = yData.length; i < nv21Data.length; i += 2) {
nv21Data[i] = yData[i + 1];
nv21Data[i + 1] = yData[i];
}
// Step 3: Create a grayscale Mat from the NV21 data
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1);
grayscaleMat.put(0, 0, nv21Data);
return grayscaleMat;
}
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();

View File

@ -6,6 +6,7 @@ import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.view.PreviewView;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.ToggleButton;
@ -29,8 +30,15 @@ public class MainActivity extends AppCompatActivity {
VideoDetector vd = new VideoDetector(this);
vd.setPreviewView(previewView);
//vd.setPreviewView(previewView);
vd.debugProcessing(inputImageView, outputImageView);
vd.setOnDetectionListener(new Detector.OnDetectionListener() {
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
Log.d("onDetection", detectionReport.toString());
}
});
vd.startDetection();