|
|
@@ -19,6 +19,7 @@ import androidx.annotation.Nullable; |
|
|
|
import androidx.camera.core.CameraSelector; |
|
|
|
import androidx.camera.core.ExperimentalGetImage; |
|
|
|
import androidx.camera.core.ImageAnalysis; |
|
|
|
import androidx.camera.core.ImageProxy; |
|
|
|
import androidx.camera.core.Preview; |
|
|
|
import androidx.camera.core.VideoCapture; |
|
|
|
import androidx.camera.lifecycle.ProcessCameraProvider; |
|
|
@@ -140,6 +141,8 @@ public class VideoDetector extends Detector { |
|
|
|
bindCameraProvider(UseCase.ImageAnalysis); |
|
|
|
} catch (ExecutionException | InterruptedException e) {} |
|
|
|
}, ContextCompat.getMainExecutor(context)); |
|
|
|
|
|
|
|
startViolationTimer(); |
|
|
|
} |
|
|
|
|
|
|
|
/** Stops the Video Detection */ |
|
|
@@ -148,7 +151,6 @@ public class VideoDetector extends Detector { |
|
|
|
if (!isDetecting || imageAnalysis == null) |
|
|
|
return; |
|
|
|
cameraProvider.unbind(imageAnalysis); |
|
|
|
cameraProvider.unbind(preview); |
|
|
|
isDetecting = false; |
|
|
|
allowReportViolation = false; |
|
|
|
} |
|
|
@@ -169,20 +171,8 @@ public class VideoDetector extends Detector { |
|
|
|
private void bindCameraProvider(UseCase useCase) { |
|
|
|
// Specify which Camera to use |
|
|
|
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); |
|
|
|
if(useCase == UseCase.ImageAnalysis && !cameraProvider.isBound(videoCapture)) { |
|
|
|
cameraProvider.unbindAll(); |
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, preview); |
|
|
|
startViolationTimer(); |
|
|
|
} |
|
|
|
if(useCase == UseCase.VideoCapture) { |
|
|
|
if(cameraProvider.isBound(imageAnalysis)) { |
|
|
|
cameraProvider.unbindAll(); |
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture); |
|
|
|
} else { |
|
|
|
cameraProvider.unbindAll(); |
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, videoCapture); |
|
|
|
} |
|
|
|
} |
|
|
|
cameraProvider.unbindAll(); |
|
|
|
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@@ -214,7 +204,7 @@ public class VideoDetector extends Detector { |
|
|
|
assert image != null; |
|
|
|
|
|
|
|
// Violation Handling |
|
|
|
Mat processed = processImage(image); |
|
|
|
Mat processed = processImage(imageProxy); |
|
|
|
int n = OpenCVHelper.countNonZeroPixels(processed); |
|
|
|
int pixelCount = image.getWidth() * image.getHeight(); |
|
|
|
float percentChanged = (float) n / pixelCount; |
|
|
@@ -241,7 +231,7 @@ public class VideoDetector extends Detector { |
|
|
|
public void startRecording() { |
|
|
|
// Check States |
|
|
|
if (isRecording){ |
|
|
|
extendDetection(); |
|
|
|
extendViolation(); |
|
|
|
return; |
|
|
|
} |
|
|
|
|
|
|
@@ -268,10 +258,12 @@ public class VideoDetector extends Detector { |
|
|
|
new VideoCapture.OnVideoSavedCallback() { |
|
|
|
@Override |
|
|
|
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) { |
|
|
|
isRecording = false; |
|
|
|
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show(); |
|
|
|
} |
|
|
|
@Override |
|
|
|
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) { |
|
|
|
isRecording = false; |
|
|
|
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show(); |
|
|
|
} |
|
|
|
} |
|
|
@@ -284,14 +276,16 @@ public class VideoDetector extends Detector { |
|
|
|
public void stopRecording(){ |
|
|
|
videoCapture.stopRecording(); |
|
|
|
cameraProvider.unbind(videoCapture); |
|
|
|
if (isDetecting) |
|
|
|
bindCameraProvider(UseCase.ImageAnalysis); |
|
|
|
isRecording = false; |
|
|
|
} |
|
|
|
|
|
|
|
/** Process Image to be used for Motion Detection */ |
|
|
|
private Mat processImage(Image image){ |
|
|
|
private Mat processImage(ImageProxy imageProxy){ |
|
|
|
if (imageProxy == null) |
|
|
|
return null; |
|
|
|
|
|
|
|
// Image Transformation |
|
|
|
Mat imageMat = OpenCVHelper.extractYChannel(image); |
|
|
|
Mat imageMat = OpenCVHelper.extractYChannel(imageProxy); |
|
|
|
|
|
|
|
// Show Input Image |
|
|
|
if (inputImageView != null) |
|
|
@@ -335,9 +329,6 @@ public class VideoDetector extends Detector { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private static class OpenCVHelper{ |
|
|
|
private OpenCVHelper() {} |
|
|
|
|
|
|
@@ -354,7 +345,10 @@ public class VideoDetector extends Detector { |
|
|
|
return outputMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat extractYChannel(@NonNull Image img) { |
|
|
|
private static Mat extractYChannel(@NonNull ImageProxy imgProxy) { |
|
|
|
Image img = imgProxy.getImage(); |
|
|
|
|
|
|
|
assert img != null; |
|
|
|
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); |
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
yBuffer.get(yData); |
|
|
@@ -373,6 +367,32 @@ public class VideoDetector extends Detector { |
|
|
|
return binaryMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) { |
|
|
|
// Step 1: Extract the image data from ImageProxy |
|
|
|
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes(); |
|
|
|
ByteBuffer yBuffer = planes[0].getBuffer(); |
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
yBuffer.get(yData); |
|
|
|
|
|
|
|
// Step 2: Convert the image data to NV21 format |
|
|
|
int width = imageProxy.getWidth(); |
|
|
|
int height = imageProxy.getHeight(); |
|
|
|
byte[] nv21Data = new byte[width * height * 3 / 2]; |
|
|
|
// Assuming the image format is YUV_420_888 |
|
|
|
System.arraycopy(yData, 0, nv21Data, 0, yData.length); |
|
|
|
for (int i = yData.length; i < nv21Data.length; i += 2) { |
|
|
|
nv21Data[i] = yData[i + 1]; |
|
|
|
nv21Data[i + 1] = yData[i]; |
|
|
|
} |
|
|
|
|
|
|
|
// Step 3: Create a grayscale Mat from the NV21 data |
|
|
|
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1); |
|
|
|
grayscaleMat.put(0, 0, nv21Data); |
|
|
|
|
|
|
|
return grayscaleMat; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){ |
|
|
|
List<MatOfPoint> contours = new ArrayList<>(); |
|
|
|
Mat hierarchy = new Mat(); |