Browse Source

Working Recorder and Video Detector

bk_video
Bastian Kohler 1 year ago
parent
commit
e088b0afa3

+ 0
- 11
app/src/main/java/com/example/ueberwachungssystem/Detection/AudioRecorder.java View File

// Return the timestamp as a string // Return the timestamp as a string
return currentTime.format(formatter); return currentTime.format(formatter);
} }

public void playAudio() {
MediaPlayer mp = new MediaPlayer();
try {
mp.setDataSource(context.getFilesDir() + "/audio.3gp");
mp.prepare();
mp.start();
} catch (Exception e) {
e.printStackTrace();
}
}
} }

+ 1
- 1
app/src/main/java/com/example/ueberwachungssystem/Detection/Detector.java View File

private boolean extendViolation = false; private boolean extendViolation = false;


// Countdown parameters // Countdown parameters
private final int COUNTDOWN_TIME = 5000; // milliseconds
private final int COUNTDOWN_TIME = 10000; // milliseconds
private final int COUNTDOWN_POLLING_TIME = 100; // milliseconds private final int COUNTDOWN_POLLING_TIME = 100; // milliseconds


/** Constructor - takes context of current activity */ /** Constructor - takes context of current activity */

+ 106
- 108
app/src/main/java/com/example/ueberwachungssystem/Detection/VideoDetector.java View File

import android.content.Context; import android.content.Context;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.SensorManager;
import android.media.Image; import android.media.Image;
import android.os.CountDownTimer; import android.os.CountDownTimer;
import android.util.Log; import android.util.Log;
import android.view.Display; import android.view.Display;
import android.view.OrientationEventListener;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager; import android.view.WindowManager;
import android.widget.ImageView; import android.widget.ImageView;


import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.CameraSelector; import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage; import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis; import androidx.camera.core.ImageAnalysis;


@ExperimentalGetImage @ExperimentalGetImage
public class VideoDetector extends Detector { public class VideoDetector extends Detector {

// Calling Activity // Calling Activity
private final Context context; private final Context context;


private ProcessCameraProvider cameraProvider; private ProcessCameraProvider cameraProvider;
private ImageAnalysis imageAnalysis; private ImageAnalysis imageAnalysis;
private VideoCapture videoCapture; private VideoCapture videoCapture;
private final Preview preview;
//private Preview preview;


// Logic // Logic
private boolean isDetecting = false; private boolean isDetecting = false;
private ImageView inputImageView = null; private ImageView inputImageView = null;
private ImageView outputImageView = null; private ImageView outputImageView = null;


// Recorder
private File outputDir; // Default: in app files directory
private int rotation = 0; private int rotation = 0;




// Parameters // Parameters
private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed private static final float ALARM_THRESHOLD = 0.5f; // Percent of pixels changed
private static final long START_DELAY = 20000; // milliseconds
private static final float START_DELAY = 20000; // milliseconds
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480); private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480);




private enum UseCase {
ImageAnalysis,
Preview,
VideoCapture
};



/** Constructor */ /** Constructor */
public VideoDetector(Context context) { public VideoDetector(Context context) {
this.context = context; this.context = context;
this.imageAnalysis = setupImageAnalysis(); this.imageAnalysis = setupImageAnalysis();
this.videoCapture = setupVideoCapture(); this.videoCapture = setupVideoCapture();
this.preview = new Preview.Builder().build();
this.outputDir = context.getFilesDir();
//this.preview = new Preview.Builder().build();
} }


/** Get States */ /** Get States */
getPermissions(); getPermissions();
return; return;
} }
// Configure Image Analysis
imageAnalysis = setupImageAnalysis(); imageAnalysis = setupImageAnalysis();

// Open CV startup check // Open CV startup check
if (!OpenCVLoader.initDebug()) { if (!OpenCVLoader.initDebug()) {
Log.e("OpenCV", "Unable to load OpenCV!"); Log.e("OpenCV", "Unable to load OpenCV!");
try { try {
cameraProvider = cameraProviderFuture.get(); cameraProvider = cameraProviderFuture.get();
isDetecting = true; isDetecting = true;
bindCameraProvider(UseCase.ImageAnalysis);
bindCameraProvider();
} catch (ExecutionException | InterruptedException e) {} } catch (ExecutionException | InterruptedException e) {}
}, ContextCompat.getMainExecutor(context)); }, ContextCompat.getMainExecutor(context));
// Disable Violation Calling for Setup Time
startViolationTimer(START_DELAY);
}

/** Starts the Recorder */
@SuppressLint("RestrictedApi")
public void startRecording() {
// Check States
if (isRecording){
extendViolation();
return;
}
// Return On Request Permissions
if (!hasPermissions()) {
getPermissions();
return;
}

videoCapture = setupVideoCapture();

final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
isRecording = true;
bindCameraProvider();


startViolationTimer();
File vidFile = new File(context.getFilesDir() + "/" + generateFileName() + ".mp4");
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
return;
}
videoCapture.startRecording(
new VideoCapture.OutputFileOptions.Builder(vidFile).build(),
context.getMainExecutor(),
new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show();
}
@Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show();
}
}
);
} catch (ExecutionException | InterruptedException ignored) {}
}, ContextCompat.getMainExecutor(context));
} }


/** Stops the Video Detection */ /** Stops the Video Detection */
allowReportViolation = false; allowReportViolation = false;
} }


/** Permission handling */
private boolean hasPermissions() {
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED;
}
private void getPermissions() {
if (!hasPermissions())
ActivityCompat.requestPermissions((Activity) context, new String[]{android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, PERMISSION_REQUEST_CODE);
/** Stops the Recording */
@SuppressLint("RestrictedApi")
public void stopRecording(){
videoCapture.stopRecording();
cameraProvider.unbind(videoCapture);
isRecording = false;
} }




/** Binds the Luminosity Analyzer (configure and run Analysis) */
private void bindCameraProvider(UseCase useCase) {
/** Bind Camera Provider */
private void bindCameraProvider() {
// Specify which Camera to use // Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build(); CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
cameraProvider.unbindAll(); cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture); cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
} }



/** Start delay until Violation Report is allowed */
private void startViolationTimer() {
new CountDownTimer((long) (START_DELAY), 100) {
@Override
public void onTick(long millisUntilFinished) {
}
@Override
public void onFinish() {
allowReportViolation = true;
}
}.start();
}


/** Setup Use Cases */ /** Setup Use Cases */
private ImageAnalysis setupImageAnalysis() { private ImageAnalysis setupImageAnalysis() {
// Configure and create Image Analysis // Configure and create Image Analysis


@SuppressLint("RestrictedApi") @SuppressLint("RestrictedApi")
private VideoCapture setupVideoCapture() { private VideoCapture setupVideoCapture() {
int rotation = getRotation();
int rotation = getDisplayRotation();
return new VideoCapture.Builder() return new VideoCapture.Builder()
.setTargetRotation(rotation) .setTargetRotation(rotation)
.build(); .build();
} }


@SuppressLint("RestrictedApi")
public void startRecording() {
// Check States
if (isRecording){
extendViolation();
return;
}
// Return On Request Permissions
if (!hasPermissions()) {
getPermissions();
return;
}

videoCapture = setupVideoCapture();

final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
isRecording = true;
bindCameraProvider(UseCase.VideoCapture);

File vidFile = new File(context.getFilesDir() + "/" + generateFileName() + ".mp4");
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
return;
}
videoCapture.startRecording(
new VideoCapture.OutputFileOptions.Builder(vidFile).build(),
context.getMainExecutor(),
new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "recording saved", Toast.LENGTH_SHORT).show();
}
@Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "recording failed", Toast.LENGTH_SHORT).show();
}
}
);
} catch (ExecutionException | InterruptedException ignored) {}
}, ContextCompat.getMainExecutor(context));
}

@SuppressLint("RestrictedApi")
public void stopRecording(){
videoCapture.stopRecording();
cameraProvider.unbind(videoCapture);
isRecording = false;
}

/** Process Image to be used for Motion Detection */ /** Process Image to be used for Motion Detection */
private Mat processImage(ImageProxy imageProxy){ private Mat processImage(ImageProxy imageProxy){
if (imageProxy == null) if (imageProxy == null)
return null; return null;

// Image Transformation // Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(imageProxy); Mat imageMat = OpenCVHelper.extractYChannel(imageProxy);

// Show Input Image // Show Input Image
if (inputImageView != null) if (inputImageView != null)
OpenCVHelper.debugMat(imageMat, inputImageView); OpenCVHelper.debugMat(imageMat, inputImageView);

// Preprocess Image // Preprocess Image
Mat preprocessed = imageMat; Mat preprocessed = imageMat;
preprocessed = OpenCVHelper.addGaussianBlur(preprocessed, new Size(21, 21)); preprocessed = OpenCVHelper.addGaussianBlur(preprocessed, new Size(21, 21));
preprocessed = OpenCVHelper.addBlur(preprocessed, new Size(3, 3)); preprocessed = OpenCVHelper.addBlur(preprocessed, new Size(3, 3));
// Set Previous Image
if (previousImage == null) { if (previousImage == null) {
previousImage = preprocessed; previousImage = preprocessed;
return null; return null;
} }

// Process Image // Process Image
Mat processed = preprocessed.clone(); Mat processed = preprocessed.clone();
processed = OpenCVHelper.thresholdPixels(processed, previousImage, 25); processed = OpenCVHelper.thresholdPixels(processed, previousImage, 25);
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3)); processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3));
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3)); processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3));
processed = OpenCVHelper.thresholdContourArea(processed, 500); processed = OpenCVHelper.thresholdContourArea(processed, 500);

// Output // Output
previousImage = preprocessed.clone(); previousImage = preprocessed.clone();
// Show Output Image // Show Output Image
if (outputImageView != null) if (outputImageView != null)
OpenCVHelper.debugMat(processed, outputImageView); OpenCVHelper.debugMat(processed, outputImageView);

return processed; return processed;
} }



/** Debug input and result of processing */
public void debugProcessing(@NonNull ImageView inputImageView, @NonNull ImageView outputImageView){ public void debugProcessing(@NonNull ImageView inputImageView, @NonNull ImageView outputImageView){
this.inputImageView = inputImageView; this.inputImageView = inputImageView;
this.outputImageView = outputImageView; this.outputImageView = outputImageView;
} }


public void setPreviewView(@NonNull PreviewView previewView) {

/**
private void setPreviewView(@NonNull PreviewView previewView) {
// Create Preview // Create Preview
if (this.preview != null) if (this.preview != null)
this.preview.setSurfaceProvider(previewView.getSurfaceProvider()); this.preview.setSurfaceProvider(previewView.getSurfaceProvider());
} }
*/




/** Generate File Name */
private String generateFileName(){ private String generateFileName(){
// Get the current timestamp // Get the current timestamp
LocalDateTime currentTime = LocalDateTime.now(); LocalDateTime currentTime = LocalDateTime.now();
return currentTime.format(formatter); return currentTime.format(formatter);
} }


private int getRotation() {

/** Get current Display Rotation */
private int getDisplayRotation() {
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay(); Display display = windowManager.getDefaultDisplay();
return display.getRotation(); return display.getRotation();
} }


/** Start delay until Violation Report is allowed */
private void startViolationTimer(float setupTime) {
new CountDownTimer((long) (START_DELAY), 100) {
@Override
public void onTick(long millisUntilFinished) {
}
@Override
public void onFinish() {
allowReportViolation = true;
}
}.start();
}

/** Permission handling */
private boolean hasPermissions() {
return ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED &&
ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED;
}
private void getPermissions() {
if (!hasPermissions())
ActivityCompat.requestPermissions((Activity) context, new String[]{android.Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, PERMISSION_REQUEST_CODE);
}

public void setOutputDir(File outputDir) {
this.outputDir = outputDir;
}
} }

+ 5
- 1
app/src/main/java/com/example/ueberwachungssystem/MainActivity.java View File

@Override @Override
public void onDetection(@NonNull DetectionReport detectionReport) { public void onDetection(@NonNull DetectionReport detectionReport) {
Log.d("onDetection", detectionReport.toString()); Log.d("onDetection", detectionReport.toString());
if (detectionReport.detectionState)
vd.startRecording();
else
vd.stopRecording();
} }
}); });
vd.startDetection(); vd.startDetection();
{ {
//vd.startDetection(); //vd.startDetection();
vd.stopDetection(); vd.stopDetection();
vd.startRecording();
//vd.startRecording();
audioRecorder.startRecording(); audioRecorder.startRecording();
} }
else { else {

Loading…
Cancel
Save