|
|
@@ -5,7 +5,6 @@ import android.annotation.SuppressLint; |
|
|
|
import android.app.Activity; |
|
|
|
import android.content.Context; |
|
|
|
import android.content.pm.PackageManager; |
|
|
|
import android.graphics.Bitmap; |
|
|
|
import android.graphics.ImageFormat; |
|
|
|
import android.media.Image; |
|
|
|
import android.os.CountDownTimer; |
|
|
@@ -31,20 +30,12 @@ import androidx.lifecycle.LifecycleOwner; |
|
|
|
import com.google.common.util.concurrent.ListenableFuture; |
|
|
|
|
|
|
|
import org.opencv.android.OpenCVLoader; |
|
|
|
import org.opencv.android.Utils; |
|
|
|
import org.opencv.core.Core; |
|
|
|
import org.opencv.core.CvType; |
|
|
|
import org.opencv.core.Mat; |
|
|
|
import org.opencv.core.MatOfPoint; |
|
|
|
import org.opencv.core.Scalar; |
|
|
|
import org.opencv.core.Size; |
|
|
|
import org.opencv.imgproc.Imgproc; |
|
|
|
|
|
|
|
import java.io.File; |
|
|
|
import java.nio.ByteBuffer; |
|
|
|
import java.util.ArrayList; |
|
|
|
import java.util.Collections; |
|
|
|
import java.util.List; |
|
|
|
import java.time.LocalDateTime; |
|
|
|
import java.time.format.DateTimeFormatter; |
|
|
|
import java.util.concurrent.ExecutionException; |
|
|
|
|
|
|
|
|
|
|
@@ -109,10 +100,13 @@ public class VideoDetector extends Detector { |
|
|
|
this.preview = new Preview.Builder().build(); |
|
|
|
} |
|
|
|
|
|
|
|
/** Get State of the Detector */ |
|
|
|
public boolean isRunning() { |
|
|
|
/** Get States */ |
|
|
|
public boolean isDetecting() { |
|
|
|
return isDetecting; |
|
|
|
} |
|
|
|
public boolean isRecording(){ |
|
|
|
return isRecording; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/** Starts the Video Detection */ |
|
|
@@ -190,6 +184,7 @@ public class VideoDetector extends Detector { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/** Setup Use Cases */ |
|
|
|
private ImageAnalysis setupImageAnalysis() { |
|
|
|
// Configure and create Image Analysis |
|
|
|
ImageAnalysis.Builder builder = new ImageAnalysis.Builder(); |
|
|
@@ -234,7 +229,6 @@ public class VideoDetector extends Detector { |
|
|
|
extendViolation(); |
|
|
|
return; |
|
|
|
} |
|
|
|
|
|
|
|
// Return On Request Permissions |
|
|
|
if (!hasPermissions()) { |
|
|
|
getPermissions(); |
|
|
@@ -248,7 +242,7 @@ public class VideoDetector extends Detector { |
|
|
|
isRecording = true; |
|
|
|
bindCameraProvider(UseCase.VideoCapture); |
|
|
|
|
|
|
|
File vidFile = new File(context.getFilesDir() + "/" + outputName); |
|
|
|
File vidFile = new File(context.getFilesDir() + "/" + generateFileName() + ".mp4"); |
|
|
|
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { |
|
|
|
return; |
|
|
|
} |
|
|
@@ -268,7 +262,7 @@ public class VideoDetector extends Detector { |
|
|
|
} |
|
|
|
} |
|
|
|
); |
|
|
|
} catch (ExecutionException | InterruptedException e) {} |
|
|
|
} catch (ExecutionException | InterruptedException ignored) {} |
|
|
|
}, ContextCompat.getMainExecutor(context)); |
|
|
|
} |
|
|
|
|
|
|
@@ -327,115 +321,12 @@ public class VideoDetector extends Detector { |
|
|
|
this.preview.setSurfaceProvider(previewView.getSurfaceProvider()); |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private static class OpenCVHelper{ |
|
|
|
private OpenCVHelper() {} |
|
|
|
|
|
|
|
/** OpenCV helper methods **/ |
|
|
|
private static Mat addGaussianBlur(Mat inputMat, Size kernelSize){ |
|
|
|
Mat outputMat = new Mat(); |
|
|
|
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0); |
|
|
|
return outputMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat addBlur(Mat inputMat, Size kernelSize){ |
|
|
|
Mat outputMat = new Mat(); |
|
|
|
Imgproc.blur(inputMat, outputMat, kernelSize); |
|
|
|
return outputMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat extractYChannel(@NonNull ImageProxy imgProxy) { |
|
|
|
Image img = imgProxy.getImage(); |
|
|
|
|
|
|
|
assert img != null; |
|
|
|
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer(); |
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
yBuffer.get(yData); |
|
|
|
|
|
|
|
Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1); |
|
|
|
yMat.put(0, 0, yData); |
|
|
|
|
|
|
|
return yMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){ |
|
|
|
Mat diffImage = new Mat(); |
|
|
|
Core.absdiff(inputMat, previousImage, diffImage); |
|
|
|
Mat binaryMat = new Mat(); |
|
|
|
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY); |
|
|
|
return binaryMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat imageProxyToGrayscaleMat(ImageProxy imageProxy) { |
|
|
|
// Step 1: Extract the image data from ImageProxy |
|
|
|
ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes(); |
|
|
|
ByteBuffer yBuffer = planes[0].getBuffer(); |
|
|
|
byte[] yData = new byte[yBuffer.remaining()]; |
|
|
|
yBuffer.get(yData); |
|
|
|
|
|
|
|
// Step 2: Convert the image data to NV21 format |
|
|
|
int width = imageProxy.getWidth(); |
|
|
|
int height = imageProxy.getHeight(); |
|
|
|
byte[] nv21Data = new byte[width * height * 3 / 2]; |
|
|
|
// Assuming the image format is YUV_420_888 |
|
|
|
System.arraycopy(yData, 0, nv21Data, 0, yData.length); |
|
|
|
for (int i = yData.length; i < nv21Data.length; i += 2) { |
|
|
|
nv21Data[i] = yData[i + 1]; |
|
|
|
nv21Data[i + 1] = yData[i]; |
|
|
|
} |
|
|
|
|
|
|
|
// Step 3: Create a grayscale Mat from the NV21 data |
|
|
|
Mat grayscaleMat = new Mat(height, width, CvType.CV_8UC1); |
|
|
|
grayscaleMat.put(0, 0, nv21Data); |
|
|
|
|
|
|
|
return grayscaleMat; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private static Mat thresholdContourArea(Mat inputMat, float areaThreshold){ |
|
|
|
List<MatOfPoint> contours = new ArrayList<>(); |
|
|
|
Mat hierarchy = new Mat(); |
|
|
|
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); |
|
|
|
|
|
|
|
Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0)); |
|
|
|
// Iterate over the contours and draw only the larger contours on the outputMat |
|
|
|
for (MatOfPoint contour : contours) { |
|
|
|
double contourArea = Imgproc.contourArea(contour); |
|
|
|
if (contourArea > areaThreshold) { |
|
|
|
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1); |
|
|
|
} |
|
|
|
} |
|
|
|
// Apply the outputMat as a mask to the dilatedImage |
|
|
|
Mat maskedImage = new Mat(); |
|
|
|
inputMat.copyTo(maskedImage, outputMat); |
|
|
|
return outputMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){ |
|
|
|
Mat dilatedMat = new Mat(); |
|
|
|
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize); |
|
|
|
Imgproc.dilate(inputMat, dilatedMat, kernel); |
|
|
|
return dilatedMat; |
|
|
|
} |
|
|
|
|
|
|
|
private static int countNonZeroPixels(Mat inputImage) { |
|
|
|
if (inputImage != null) |
|
|
|
return Core.countNonZero(inputImage); |
|
|
|
else |
|
|
|
return 0; |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
private static void debugMat(Mat mat, ImageView imageView) { |
|
|
|
if (imageView == null || mat == null) |
|
|
|
return; |
|
|
|
|
|
|
|
Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); |
|
|
|
Utils.matToBitmap(mat, bitmap); |
|
|
|
|
|
|
|
// Display the bitmap in an ImageView |
|
|
|
imageView.setImageBitmap(bitmap); |
|
|
|
} |
|
|
|
private String generateFileName(){ |
|
|
|
// Get the current timestamp |
|
|
|
LocalDateTime currentTime = LocalDateTime.now(); |
|
|
|
// Define the format for the timestamp |
|
|
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss"); |
|
|
|
// Return the timestamp as a string |
|
|
|
return currentTime.format(formatter); |
|
|
|
} |
|
|
|
} |