Browse Source

removed Fragment classes from MainActivity

- changed buttons, added fragments
koLayoutTest
Oliver Kleinecke 1 year ago
parent
commit
076db89147
22 changed files with 259 additions and 1818 deletions
  1. 0
    108
      app/src/main/java/com/example/ueberwachungssystem/Detection/Accelerometer.java
  2. 0
    74
      app/src/main/java/com/example/ueberwachungssystem/Detection/AudioRecorder.java
  3. 0
    38
      app/src/main/java/com/example/ueberwachungssystem/Detection/DetectionReport.java
  4. 0
    76
      app/src/main/java/com/example/ueberwachungssystem/Detection/Detector.java
  5. 0
    174
      app/src/main/java/com/example/ueberwachungssystem/Detection/DetectorService.java
  6. 0
    382
      app/src/main/java/com/example/ueberwachungssystem/Detection/MicrophoneDetector.java
  7. 0
    109
      app/src/main/java/com/example/ueberwachungssystem/Detection/OpenCVHelper.java
  8. 0
    148
      app/src/main/java/com/example/ueberwachungssystem/Detection/Signalverarbeitung/Complex.java
  9. 0
    246
      app/src/main/java/com/example/ueberwachungssystem/Detection/Signalverarbeitung/FFT.java
  10. 0
    327
      app/src/main/java/com/example/ueberwachungssystem/Detection/VideoDetector.java
  11. 8
    3
      app/src/main/java/com/example/ueberwachungssystem/Fragments/Fragment1.java
  12. 22
    2
      app/src/main/java/com/example/ueberwachungssystem/Fragments/Fragment2.java
  13. 41
    41
      app/src/main/java/com/example/ueberwachungssystem/MainActivity.java
  14. 31
    0
      app/src/main/java/com/example/ueberwachungssystem/MeinAdapter.java
  15. 36
    0
      app/src/main/java/com/example/ueberwachungssystem/VideoListAdapter.java
  16. 0
    5
      app/src/main/java/com/example/ueberwachungssystem/WifiCommunication.java
  17. 5
    0
      app/src/main/res/drawable/toggle_btn.xml
  18. 70
    64
      app/src/main/res/layout/activity_main.xml
  19. 17
    13
      app/src/main/res/layout/fragment1.xml
  20. 19
    8
      app/src/main/res/layout/fragment2.xml
  21. 5
    0
      app/src/main/res/values/colors.xml
  22. 5
    0
      app/src/main/res/values/themes.xml

+ 0
- 108
app/src/main/java/com/example/ueberwachungssystem/Detection/Accelerometer.java View File

package com.example.ueberwachungssystem.Detection;

import static java.lang.Math.sqrt;

import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;


/**
* Accelerometer inherits some methods from abstract Detector class (more info there)
*
*
* USE FROM MAIN ACTIVITY:
*
* Accelerometer beschleunigungssensor = new Accelerometer(this);
* onCreate:
* //Accelerometer Setup
* beschleunigungssensor = new Accelerometer(this, logger, textViewLog); //logger and textview only for debugging necessary
* beschleunigungssensor.getSensor();
*
* //Starting Detection:
* beschleunigungssensor.startDetection();
* //Stopping Detection: also recommended at onPause to avoid unnecessary battery consumption
* beschleunigungssensor.stopDetection();
*
* */

public class Accelerometer extends Detector implements SensorEventListener {

public SensorManager sensorManager;
private static final int sensorType = Sensor.TYPE_LINEAR_ACCELERATION;
private Sensor accelerometer;
private Context context;
boolean alarm = false;
//Preallocate memory for the data of each axis of the acceleration sensor
float x;
float y;
float z;
float betrag; //Betrag aller drei Achsen sqrt(x*x + y*y + z*z)
private DetectionReport detectionReport;

// In constructor pass Activity, Context and TextView from MainActivity in Accelerometer class
public Accelerometer(Context context){
super(); //von Detektor
this.context = context;
}

public void getSensor(){
sensorManager = (SensorManager)context.getSystemService(Context.SENSOR_SERVICE);
if(sensorManager.getSensorList(sensorType).size()==0) {
accelerometer = null;
}
else {
accelerometer = sensorManager.getSensorList(sensorType).get(0);
}
}

@Override
public void onSensorChanged(SensorEvent event) {
try {
checkAlarm(event);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}

public void checkAlarm (SensorEvent event) throws InterruptedException {
x = event.values[0];
y = event.values[1];
z = event.values[2];
betrag = (float) sqrt(x*x + y*y + z*z);
float threshold = 1.5F;

if (!alarm) {
if (betrag > threshold) {
alarm = true;
reportViolation("Bewegung", betrag);
}
} else {
if (betrag < threshold) {
alarm = false;
} else {
}
}
}

@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}

@Override
public void startDetection() {
// entspricht void start()
//getSensor();
if (accelerometer != null) {
sensorManager.registerListener(this, accelerometer, SensorManager.SENSOR_DELAY_GAME);
}
}

@Override
public void stopDetection() {
// entspricht void stop()
sensorManager.unregisterListener(this, accelerometer);
}
}

+ 0
- 74
app/src/main/java/com/example/ueberwachungssystem/Detection/AudioRecorder.java View File

package com.example.ueberwachungssystem.Detection;

import android.content.Context;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.widget.Toast;

import java.io.File;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

public class AudioRecorder {
private final Context context;
private MediaRecorder mediaRecorder = null;
private boolean isRecording = false;
private File outputDir; // Default: in app files directory


public AudioRecorder (Context context) {
this.context = context;
this.outputDir = context.getFilesDir();
}

public void startRecording() {
// Handle logic
if (outputDir == null)
return;
if (isRecording)
return;
isRecording = true;

// Setup Audio Recorder for output Format: 3GP
mediaRecorder = new MediaRecorder();
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mediaRecorder.setOutputFile(outputDir + "/" + generateFileName() + ".3gp");
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
mediaRecorder.start();
}

public void stopRecording() {
if (mediaRecorder != null) {
mediaRecorder.stop();
mediaRecorder.reset();
mediaRecorder.release();
mediaRecorder = null;
isRecording = false;
Toast.makeText(context, "audio recording saved", Toast.LENGTH_SHORT).show();
}
}

public boolean isRecording(){
return isRecording;
}

public void setOutputDir(File outputDir) {
this.outputDir = outputDir;
}

private String generateFileName(){
// Get the current timestamp
LocalDateTime currentTime = LocalDateTime.now();
// Define the format for the timestamp
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss");
// Return the timestamp as a string
return currentTime.format(formatter);
}
}

+ 0
- 38
app/src/main/java/com/example/ueberwachungssystem/Detection/DetectionReport.java View File

package com.example.ueberwachungssystem.Detection;

import android.util.Log;

import java.util.Calendar;

/** Detection Report Class */
public class DetectionReport {
public String timeStamp;
public String detectionType;
public float detectedValue;
public boolean detectionState;

public DetectionReport(boolean detectionState, String detectionType, float detectedAmplitude) {
this.timeStamp = String.valueOf(Calendar.getInstance().getTime());
this.detectionType = detectionType;
this.detectedValue = detectedAmplitude;
this.detectionState = detectionState;

//this.detectorID = detectorID;
}


/** Get Detection Report in String format */
public String toString() {
String state = "State: " + "[" + this.detectionState + "]";
String time = "Time: " + "[" + this.timeStamp + "]";
String type = "Type: " + "[" + this.detectionType + "]";
String value = "Value: " + "[" + this.detectedValue + "]";

return String.join("\t", state, time, type, value);
}

/** Debug Report */
public void log(String tag) {
Log.d(tag, this.toString());
}
}

+ 0
- 76
app/src/main/java/com/example/ueberwachungssystem/Detection/Detector.java View File

package com.example.ueberwachungssystem.Detection;

import android.os.CountDownTimer;

import androidx.annotation.NonNull;
import androidx.camera.core.ExperimentalGetImage;


abstract public class Detector {
private OnDetectionListener listener;
private boolean isDetecting = false;
private boolean extendViolation = false;

// Countdown parameters
private final int COUNTDOWN_TIME = 10000; // milliseconds
private final int COUNTDOWN_POLLING_TIME = 100; // milliseconds

/** Constructor - takes context of current activity */
public Detector() {}


/** On Detection Listener - runs when violation is reported */
public interface OnDetectionListener {
void onDetection(@NonNull DetectionReport detectionReport);
}
public void setOnDetectionListener(@NonNull OnDetectionListener listener) {
this.listener = listener;
}

/** Triggers onDetectionListener - call this to trigger violation/alarm */
public void reportViolation(String detectionType, float amplitude) {
if (listener != null) {
if (!isDetecting) {
isDetecting = true;
DetectionReport detectionReport = new DetectionReport(true, detectionType, amplitude);
listener.onDetection(detectionReport);
startDetectionTimer(detectionType, amplitude);
} else {
extendViolation = true;
}
} else {
isDetecting = false;
extendViolation = false;
}
}

private void startDetectionTimer(String detectionType, float amplitude) {
isDetecting = true;
new CountDownTimer((long) COUNTDOWN_TIME, COUNTDOWN_POLLING_TIME) {
@Override
public void onTick(long millisUntilFinished) {
if (extendViolation) {
extendViolation = false;
startDetectionTimer(detectionType, amplitude);
this.cancel();
}
}
@Override
public void onFinish() {
isDetecting = false;
DetectionReport detectionReport = new DetectionReport(false, detectionType, amplitude);
listener.onDetection(detectionReport);
}
}.start();
}

public void extendViolation(){
this.extendViolation = true;
}

/** Starts Detection (abstract method: needs to be overridden in child class) */
public abstract void startDetection();

/** Stops Detection (abstract method: needs to be overridden in child class) */
public abstract void stopDetection();
}

+ 0
- 174
app/src/main/java/com/example/ueberwachungssystem/Detection/DetectorService.java View File

package com.example.ueberwachungssystem.Detection;

import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.ImageView;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.ExperimentalGetImage;
import androidx.lifecycle.LifecycleService;

import com.example.ueberwachungssystem.WifiCommunication;

import java.io.File;

@ExperimentalGetImage
public class DetectorService extends LifecycleService {
public ServiceBinder serviceBinder = new ServiceBinder();
private DetectorService.OnDetectionListener listener;
private boolean isServiceRunning = false;

VideoDetector videoDetector = null;
AudioRecorder audioRecorder = null;

/** Communication **/

WifiCommunication wifiCommunication;

StringBuffer dataFromWifi;

@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (isServiceRunning)
return START_NOT_STICKY;


// Setup Service classes:
videoDetector = new VideoDetector(this);
videoDetector.setOnDetectionListener(new Detector.OnDetectionListener() {
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
passToServiceListener(detectionReport);
}
});

audioRecorder = new AudioRecorder(this);



isServiceRunning = true;

wifiCommunication = new WifiCommunication (1234);
wifiCommunication.setOnConnectionListener(new WifiCommunication.OnConnectionListener() {
@Override
public void onConnection(StringBuffer data) {
dataFromWifi = data;
}
});
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
super.onDestroy();
isServiceRunning = false;
}

/** Service methods */
public class ServiceBinder extends Binder {
public DetectorService getBoundService() {
// Return an instance of the TestService
return DetectorService.this;
}
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
super.onBind(intent);
return serviceBinder;
}


/** Video Detection */
public void startVideoDetection() {
if(videoDetector != null)
videoDetector.startDetection();
}
public void stopVideoDetection() {
if(videoDetector != null)
videoDetector.stopDetection();
}
public boolean isVideoDetectionRunning() {
if(videoDetector != null)
return videoDetector.isDetecting();
return false;
}
public void debugVideoProcessing(ImageView input, ImageView output) {
if(videoDetector != null)
videoDetector.debugProcessing(input, output);
}

/** Audio Detection */
public void startAudioDetection() {

}
public void stopAudioDetection() {

}

/** Motion Detection */
public void startMotionDetection() {

}
public void stopMotionDetection() {

}

/** Video Recording */
public void startVideoRecording() {
if(videoDetector != null)
videoDetector.startRecording();
}
public void stopVideoRecording() {
if(videoDetector != null)
videoDetector.stopRecording();
}
public boolean isVideoRecordingRunning() {
if(videoDetector != null)
return videoDetector.isRecording();
return false;
}
public void setVideoRecordingDir(File outputDir) {
if (videoDetector != null)
videoDetector.setOutputDir(outputDir);
}

/** Audio Recording */
public void startAudioRecording() {
if(audioRecorder != null)
audioRecorder.startRecording();
}
public void stopAudioRecording() {
if(audioRecorder != null)
audioRecorder.stopRecording();
}
public boolean isAudioRecordingRunning() {
if(videoDetector != null)
return audioRecorder.isRecording();
return false;
}
public void setAudioRecordingDir(File outputDir) {
if (audioRecorder != null)
audioRecorder.setOutputDir(outputDir);
}


/** pass Detection Report to Service Detection Listener and trigger it */
public void passToServiceListener(DetectionReport detectionReport) {
if (listener != null) {
listener.onDetection(detectionReport);
}
}


/** On Detection Listener - runs when violation is reported */
public interface OnDetectionListener {
void onDetection(@NonNull DetectionReport detectionReport);
}
public void setOnDetectionListener(@NonNull DetectorService.OnDetectionListener listener) {
this.listener = listener;
}
}

+ 0
- 382
app/src/main/java/com/example/ueberwachungssystem/Detection/MicrophoneDetector.java View File

package com.example.ueberwachungssystem.Detection;

import static java.lang.Math.*;

import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.util.Log;

import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;

import com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex;
import com.example.ueberwachungssystem.Detection.Signalverarbeitung.FFT;
import com.example.ueberwachungssystem.Detection.DetectionReport;
import com.example.ueberwachungssystem.Detection.Detector;

public class MicrophoneDetector extends Detector {
/**
* Constructor - takes context of current activity
*
* @param context
*/

private static final int RECHTEANFORDERUNG_MIKROFON = 1;

private AufnahmeTask aufnahmeTask;
public boolean armed = false;
public int Schwellwert_Alarm = 100;
private Context context;

public MicrophoneDetector(Context context) {
super();
this.context = context;
}

@Override
public void startDetection() {
aufnahmeTask = new AufnahmeTask();
aufnahmeTask.execute();
}

@Override
public void stopDetection() {
if (aufnahmeTask != null) {
aufnahmeTask.cancel(true);
}
}

class AufnahmeTask extends AsyncTask<Long, Verarbeitungsergebnis, Void> {
private AudioRecord recorder;
private final int sampleRateInHz = 44100;
private final int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private final int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private int minPufferGroesseInBytes;
private int pufferGroesseInBytes;
private RingPuffer ringPuffer = new RingPuffer(10);
private float kalibierWert;
private com.example.ueberwachungssystem.Detection.DetectionReport detectionReport;

@SuppressLint("MissingPermission")
AufnahmeTask() {
minPufferGroesseInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
pufferGroesseInBytes = minPufferGroesseInBytes * 2;
try {
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRateInHz, channelConfig, audioFormat, pufferGroesseInBytes);
} catch (Exception e) {
e.printStackTrace();
}
Log.d("0","Puffergroeße: "+ minPufferGroesseInBytes + " " + pufferGroesseInBytes);
Log.d("0","Recorder (SR, CH): "+ recorder.getSampleRate() + " " + recorder.getChannelCount());

int anzahlBytesProAbtastwert;
String s;
switch (recorder.getAudioFormat()) {
case AudioFormat.ENCODING_PCM_8BIT:
s = "8 Bit PCM ";
anzahlBytesProAbtastwert = 1;
break;
case AudioFormat.ENCODING_PCM_16BIT:
s = "16 Bit PCM";
anzahlBytesProAbtastwert = 2;
break;
case AudioFormat.ENCODING_PCM_FLOAT:
s = "Float PCM";
anzahlBytesProAbtastwert = 4;
break;
default:
throw new IllegalArgumentException();
}

switch (recorder.getChannelConfiguration()) {
case AudioFormat.CHANNEL_IN_MONO:
s = "Mono";
break;
case AudioFormat.CHANNEL_IN_STEREO:
s = "Stereo";
anzahlBytesProAbtastwert *= 2;
break;
case AudioFormat.CHANNEL_INVALID:
s = "ungültig";
break;
default:
throw new IllegalArgumentException();
}

Log.d("0","Konfiguration: "+ s);

int pufferGroesseInAnzahlAbtastwerten = pufferGroesseInBytes / anzahlBytesProAbtastwert;

}

@Override
protected Void doInBackground(Long... params) {
recorder.startRecording();
short[] puffer = new short[pufferGroesseInBytes / 2];
long lastTime = System.currentTimeMillis();
float verarbeitungsrate = 0;
final int maxZaehlerZeitMessung = 10;
int zaehlerZeitMessung = 0;
int anzahlVerarbeitet = 0;
GleitenderMittelwert gleitenderMittelwert = new GleitenderMittelwert(0.3f);

//Kalibrierung
try {
Thread.sleep(3000); // Time to lay down the phone
} catch (InterruptedException e) {
e.printStackTrace();
}
int i = 0;
for (i = 0; i < 20; i++) {
int n = recorder.read(puffer, 0, puffer.length);
Verarbeitungsergebnis kalibrierErgebnis = verarbeiten(puffer, n);
kalibierWert += kalibrierErgebnis.maxAmp;
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
kalibierWert = kalibierWert/i;

// __Part of FFT__
// Complex[] zeitSignal = new Complex[puffer.length];
// for (int j = 0; j < puffer.length; j++) {
// zeitSignal[j] = new Complex(puffer[j], 0);
// }
// Complex[] spektrum = FFT.fft(zeitSignal);
// double[] spektrum = calculateFFT(puffer);
// DataPoint AddPoint;
// LineGraphSeries<DataPoint> series = new LineGraphSeries<DataPoint>(new DataPoint[]{});
// for (i = 0; i < spektrum.length; i++) {
// AddPoint = new DataPoint(i, spektrum[i]);
// series.appendData(AddPoint, true, spektrum.length);
// }
// graph.addSeries(series);

for (; ; ) {
if (aufnahmeTask.isCancelled()) {
break;
} else {
int n = recorder.read(puffer, 0, puffer.length);
Verarbeitungsergebnis ergebnis = verarbeiten(puffer, n);
anzahlVerarbeitet += n;

// __Part of FFT__
// spektrum = calculateFFT(puffer);
// LineGraphSeries<DataPoint> newseries = new LineGraphSeries<DataPoint>(new DataPoint[]{});
// for (i = 0; i < spektrum.length; i++) {
// AddPoint = new DataPoint(i, spektrum[i]);
// newseries.appendData(AddPoint, true, spektrum.length);
// }

zaehlerZeitMessung++;
if (zaehlerZeitMessung == maxZaehlerZeitMessung) {
long time = System.currentTimeMillis();
long deltaTime = time - lastTime;
verarbeitungsrate = 1000.0f * anzahlVerarbeitet / deltaTime;
verarbeitungsrate = gleitenderMittelwert.mittel(verarbeitungsrate);
zaehlerZeitMessung = 0;
anzahlVerarbeitet = 0;
lastTime = time;
}


ergebnis.verarbeitungsrate = (int) verarbeitungsrate;
publishProgress(ergebnis);

try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
recorder.release();
return null;
}

private Verarbeitungsergebnis verarbeiten(short[] daten, int n) {
String status;
short maxAmp = -1;
if (n == AudioRecord.ERROR_INVALID_OPERATION) {
status = "ERROR_INVALID_OPERATION";
} else if (n == AudioRecord.ERROR_BAD_VALUE) {
status = "ERROR_BAD_VALUE";
} else {
status = "OK";
short max = 0;
for (int i = 0; i < n; i++) {
if (daten[i] > max) {
max = daten[i];
}
}

ringPuffer.hinzufuegen(max);
maxAmp = ringPuffer.maximum();
if (maxAmp <= Schwellwert_Alarm+kalibierWert) {
armed = true;
}
}

return new Verarbeitungsergebnis(status, maxAmp, 0);
}

@Override
protected void onProgressUpdate(Verarbeitungsergebnis... progress) {
super.onProgressUpdate(progress);
float maxAmpPrint = round(20*log10(abs(progress[0].maxAmp/1.0)));
float kalibierWertPrint = round(20*log10(abs(kalibierWert)));
Log.d("0","VR, Max, Kal:" + progress[0].verarbeitungsrate + ", " + maxAmpPrint
+ " dB, " + kalibierWertPrint + " dB");

if (progress[0].maxAmp >= Schwellwert_Alarm+kalibierWert && armed == true) {
armed = false;
detectionReport = new DetectionReport(true, "Audio", maxAmpPrint);
reportViolation("Audio", maxAmpPrint);
Log.d("1",detectionReport.toString());
}
}
}

private double[] calculateFFT(short[] zeitsignal)
{
byte signal[] = new byte[zeitsignal.length];
// loops through all the values of a Short
for (int i = 0; i < zeitsignal.length-1; i++) {
signal[i] = (byte) (zeitsignal[i]);
signal[i+1] = (byte) (zeitsignal[i] >> 8);
}

final int mNumberOfFFTPoints =1024;

double temp;
Complex[] y;
Complex[] complexSignal = new Complex[mNumberOfFFTPoints];
double[] absSignal = new double[mNumberOfFFTPoints/2];

for(int i = 0; i < mNumberOfFFTPoints; i++){
temp = (double)((signal[2*i] & 0xFF) | (signal[2*i+1] << 8)) / 32768.0F;
complexSignal[i] = new Complex(temp,0.0);
}

y = FFT.fft(complexSignal);

for(int i = 0; i < (mNumberOfFFTPoints/2); i++)
{
absSignal[i] = y[i].abs();
}

return absSignal;

}

class Verarbeitungsergebnis {
String status;
short maxAmp;
int verarbeitungsrate;
Verarbeitungsergebnis(String status, short maxAmp, int verarbeitungsrate) {
this.status = status;
this.maxAmp = maxAmp;
this.verarbeitungsrate = verarbeitungsrate;
}
}

class RingPuffer {
private short[] puffer;
private final int laenge;
private int anzahlEnthaltenerDaten;
private int position;

public RingPuffer(int n) {
laenge = n;
anzahlEnthaltenerDaten = 0;
position = 0;
puffer = new short[laenge];
}

public void hinzufuegen(short wert) {
puffer[position] = wert;
position++;
if (position >= laenge) {
position = 0;
}
if (anzahlEnthaltenerDaten < laenge) {
anzahlEnthaltenerDaten++;
}
}

public void hinzufuegen(short[] daten) {
for (short d : daten) {
puffer[position] = d;
position++;
if (position >= laenge) {
position = 0;
}
}
if (anzahlEnthaltenerDaten < laenge) {
anzahlEnthaltenerDaten += daten.length;
if (anzahlEnthaltenerDaten >= laenge) {
anzahlEnthaltenerDaten = laenge;
}
}
}

public short maximum() {
short max = 0;
for (int i = 0; i < anzahlEnthaltenerDaten; i++) {
if (puffer[i] > max) {
max = puffer[i];
}
}
return max;
}

public float mittelwert() {
float summe = 0;
for (int i = 0; i < anzahlEnthaltenerDaten; i++) {
summe += puffer[i];
}
return summe / anzahlEnthaltenerDaten;
}
}

class GleitenderMittelwert {
private final float wichtungNeuerWert;
private final float wichtungAlterWert;
private float mittelwert = 0;
private boolean istMittelwertGesetzt = false;

GleitenderMittelwert(float wichtungNeuerWert) {
this.wichtungNeuerWert = wichtungNeuerWert;
this.wichtungAlterWert = 1 - this.wichtungNeuerWert;
}

float MittelwertPuffer(short[] puffer) {

for (int i = 0; i < puffer.length; i++) {
mittelwert = Math.abs(puffer[i]);
}
mittelwert = mittelwert/puffer.length;

return mittelwert;
}

float mittel(float wert) {
if (istMittelwertGesetzt) {
mittelwert = wert * wichtungNeuerWert + mittelwert * wichtungAlterWert;
} else {
mittelwert = wert;
istMittelwertGesetzt = true;
}
return mittelwert;
}
}
}

+ 0
- 109
app/src/main/java/com/example/ueberwachungssystem/Detection/OpenCVHelper.java View File

package com.example.ueberwachungssystem.Detection;

import android.graphics.Bitmap;
import android.media.Image;
import android.widget.ImageView;

import androidx.annotation.NonNull;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageProxy;

import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;

import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;


@ExperimentalGetImage
public class OpenCVHelper {

/** OpenCV helper methods **/
public static Mat addGaussianBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.GaussianBlur(inputMat, outputMat, kernelSize, 0);
return outputMat;
}

public static Mat addBlur(Mat inputMat, Size kernelSize){
Mat outputMat = new Mat();
Imgproc.blur(inputMat, outputMat, kernelSize);
return outputMat;
}

public static Mat extractYChannel(@NonNull ImageProxy imgProxy) {
Image img = imgProxy.getImage();

assert img != null;
ByteBuffer yBuffer = img.getPlanes()[0].getBuffer();
byte[] yData = new byte[yBuffer.remaining()];
yBuffer.get(yData);

Mat yMat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC1);
yMat.put(0, 0, yData);

return yMat;
}

public static Mat thresholdPixels(Mat inputMat, Mat previousImage, int threshold){
Mat diffImage = new Mat();
Core.absdiff(inputMat, previousImage, diffImage);
Mat binaryMat = new Mat();
Imgproc.threshold(diffImage, binaryMat, threshold, 255, Imgproc.THRESH_BINARY);
return binaryMat;
}


public static Mat thresholdContourArea(Mat inputMat, float areaThreshold){
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(inputMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

Mat outputMat = new Mat(inputMat.size(), inputMat.type(), new Scalar(0));
// Iterate over the contours and draw only the larger contours on the outputMat
for (MatOfPoint contour : contours) {
double contourArea = Imgproc.contourArea(contour);
if (contourArea > areaThreshold) {
Imgproc.drawContours(outputMat, Collections.singletonList(contour), 0, new Scalar(255), -1);
}
}
// Apply the outputMat as a mask to the dilatedImage
Mat maskedImage = new Mat();
inputMat.copyTo(maskedImage, outputMat);
return outputMat;
}

public static Mat dilateBinaryMat(Mat inputMat, Size kernelSize){
Mat dilatedMat = new Mat();
Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, kernelSize);
Imgproc.dilate(inputMat, dilatedMat, kernel);
return dilatedMat;
}

public static int countNonZeroPixels(Mat inputImage) {
if (inputImage != null)
return Core.countNonZero(inputImage);
else
return 0;
}


public static void debugMat(Mat mat, ImageView imageView) {
if (imageView == null || mat == null)
return;

Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(mat, bitmap);

// Display the bitmap in an ImageView
imageView.setImageBitmap(bitmap);
}
}

+ 0
- 148
app/src/main/java/com/example/ueberwachungssystem/Detection/Signalverarbeitung/Complex.java View File

package com.example.ueberwachungssystem.Detection.Signalverarbeitung;

import java.util.Objects;

public class Complex {
private final double re; // the real part
private final double im; // the imaginary part

// create a new object with the given real and imaginary parts
public Complex(double real, double imag) {
re = real;
im = imag;
}

// return a string representation of the invoking com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object
public String toString() {
if (im == 0) return re + "";
if (re == 0) return im + "i";
if (im < 0) return re + " - " + (-im) + "i";
return re + " + " + im + "i";
}

// return abs/modulus/magnitude
public double abs() {
return Math.hypot(re, im);
}

// return angle/phase/argument, normalized to be between -pi and pi
public double phase() {
return Math.atan2(im, re);
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is (this + b)
public Complex plus(Complex b) {
Complex a = this; // invoking object
double real = a.re + b.re;
double imag = a.im + b.im;
return new Complex(real, imag);
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is (this - b)
public Complex minus(Complex b) {
Complex a = this;
double real = a.re - b.re;
double imag = a.im - b.im;
return new Complex(real, imag);
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is (this * b)
public Complex times(Complex b) {
Complex a = this;
double real = a.re * b.re - a.im * b.im;
double imag = a.re * b.im + a.im * b.re;
return new Complex(real, imag);
}

// return a new object whose value is (this * alpha)
public Complex scale(double alpha) {
return new Complex(alpha * re, alpha * im);
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the conjugate of this
public Complex conjugate() {
return new Complex(re, -im);
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the reciprocal of this
public Complex reciprocal() {
double scale = re * re + im * im;
return new Complex(re / scale, -im / scale);
}

// return the real or imaginary part
public double re() {
return re;
}

public double im() {
return im;
}

// return a / b
public Complex divides(Complex b) {
Complex a = this;
return a.times(b.reciprocal());
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the complex exponential of this
public Complex exp() {
return new Complex(Math.exp(re) * Math.cos(im), Math.exp(re) * Math.sin(im));
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the complex sine of this
public Complex sin() {
return new Complex(Math.sin(re) * Math.cosh(im), Math.cos(re) * Math.sinh(im));
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the complex cosine of this
public Complex cos() {
return new Complex(Math.cos(re) * Math.cosh(im), -Math.sin(re) * Math.sinh(im));
}

// return a new com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex object whose value is the complex tangent of this
public Complex tan() {
return sin().divides(cos());
}

// a static version of plus
public static Complex plus(Complex a, Complex b) {
double real = a.re + b.re;
double imag = a.im + b.im;
Complex sum = new Complex(real, imag);
return sum;
}

// See Section 3.3.
public boolean equals(Object x) {
if (x == null) return false;
if (this.getClass() != x.getClass()) return false;
Complex that = (Complex) x;
return (this.re == that.re) && (this.im == that.im);
}

// See Section 3.3.
public int hashCode() {
return Objects.hash(re, im);
}

// sample client for testing
public static void main(String[] args) {
Complex a = new Complex(5.0, 6.0);
Complex b = new Complex(-3.0, 4.0);

System.out.println("a = " + a);
System.out.println("b = " + b);
System.out.println("Re(a) = " + a.re());
System.out.println("Im(a) = " + a.im());
System.out.println("b + a = " + b.plus(a));
System.out.println("a - b = " + a.minus(b));
System.out.println("a * b = " + a.times(b));
System.out.println("b * a = " + b.times(a));
System.out.println("a / b = " + a.divides(b));
System.out.println("(a / b) * b = " + a.divides(b).times(b));
System.out.println("conj(a) = " + a.conjugate());
System.out.println("|a| = " + a.abs());
System.out.println("tan(a) = " + a.tan());
}
}

+ 0
- 246
app/src/main/java/com/example/ueberwachungssystem/Detection/Signalverarbeitung/FFT.java View File

package com.example.ueberwachungssystem.Detection.Signalverarbeitung;
// Source: https://introcs.cs.princeton.edu/java/97data/FFT.java.html

/******************************************************************************
* Compilation: javac FFT.java
* Execution: java FFT n
* Dependencies: com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex.java
*
* Compute the FFT and inverse FFT of a length n complex sequence
* using the radix 2 Cooley-Tukey algorithm.
* Bare bones implementation that runs in O(n log n) time and O(n)
* space. Our goal is to optimize the clarity of the code, rather
* than performance.
*
* This implementation uses the primitive root of unity w = e^(-2 pi i / n).
* Some resources use w = e^(2 pi i / n).
*
* Reference: https://www.cs.princeton.edu/~wayne/kleinberg-tardos/pdf/05DivideAndConquerII.pdf
*
* Limitations
* -----------
* - assumes n is a power of 2
*
* - not the most memory efficient algorithm (because it uses
* an object type for representing complex numbers and because
* it re-allocates memory for the subarray, instead of doing
* in-place or reusing a single temporary array)
*
* For an in-place radix 2 Cooley-Tukey FFT, see
* https://introcs.cs.princeton.edu/java/97data/InplaceFFT.java.html
*
******************************************************************************/

public class FFT {

// compute the FFT of x[], assuming its length n is a power of 2
public static Complex[] fft(Complex[] x) {
int n = x.length;

// base case
if (n == 1) return new Complex[]{x[0]};

// radix 2 Cooley-Tukey FFT
if (n % 2 != 0) {
throw new IllegalArgumentException("n is not a power of 2");
}

// compute FFT of even terms
Complex[] even = new Complex[n / 2];
for (int k = 0; k < n / 2; k++) {
even[k] = x[2 * k];
}
Complex[] evenFFT = fft(even);

// compute FFT of odd terms
Complex[] odd = even; // reuse the array (to avoid n log n space)
for (int k = 0; k < n / 2; k++) {
odd[k] = x[2 * k + 1];
}
Complex[] oddFFT = fft(odd);

// combine
Complex[] y = new Complex[n];
for (int k = 0; k < n / 2; k++) {
double kth = -2 * k * Math.PI / n;
Complex wk = new Complex(Math.cos(kth), Math.sin(kth));
y[k] = evenFFT[k].plus(wk.times(oddFFT[k]));
y[k + n / 2] = evenFFT[k].minus(wk.times(oddFFT[k]));
}
return y;
}


// compute the inverse FFT of x[], assuming its length n is a power of 2
public static Complex[] ifft(Complex[] x) {
int n = x.length;
Complex[] y = new Complex[n];

// take conjugate
for (int i = 0; i < n; i++) {
y[i] = x[i].conjugate();
}

// compute forward FFT
y = fft(y);

// take conjugate again
for (int i = 0; i < n; i++) {
y[i] = y[i].conjugate();
}

// divide by n
for (int i = 0; i < n; i++) {
y[i] = y[i].scale(1.0 / n);
}

return y;

}

// compute the circular convolution of x and y
public static Complex[] cconvolve(Complex[] x, Complex[] y) {

// should probably pad x and y with 0s so that they have same length
// and are powers of 2
if (x.length != y.length) {
throw new IllegalArgumentException("Dimensions don't agree");
}

int n = x.length;

// compute FFT of each sequence
Complex[] a = fft(x);
Complex[] b = fft(y);

// point-wise multiply
Complex[] c = new Complex[n];
for (int i = 0; i < n; i++) {
c[i] = a[i].times(b[i]);
}

// compute inverse FFT
return ifft(c);
}


// compute the linear convolution of x and y
public static Complex[] convolve(Complex[] x, Complex[] y) {
Complex ZERO = new Complex(0, 0);

Complex[] a = new Complex[2 * x.length];
for (int i = 0; i < x.length; i++) a[i] = x[i];
for (int i = x.length; i < 2 * x.length; i++) a[i] = ZERO;

Complex[] b = new Complex[2 * y.length];
for (int i = 0; i < y.length; i++) b[i] = y[i];
for (int i = y.length; i < 2 * y.length; i++) b[i] = ZERO;

return cconvolve(a, b);
}

// compute the DFT of x[] via brute force (n^2 time)
public static Complex[] dft(Complex[] x) {
int n = x.length;
Complex ZERO = new Complex(0, 0);
Complex[] y = new Complex[n];
for (int k = 0; k < n; k++) {
y[k] = ZERO;
for (int j = 0; j < n; j++) {
int power = (k * j) % n;
double kth = -2 * power * Math.PI / n;
Complex wkj = new Complex(Math.cos(kth), Math.sin(kth));
y[k] = y[k].plus(x[j].times(wkj));
}
}
return y;
}

// display an array of com.example.ueberwachungssystem.Detection.Signalverarbeitung.Complex numbers to standard output
public static void show(Complex[] x, String title) {
System.out.println(title);
System.out.println("-------------------");
for (int i = 0; i < x.length; i++) {
System.out.println(x[i]);
}
System.out.println();
}

/***************************************************************************
* Test client and sample execution
*
* % java FFT 4
* x
* -------------------
* -0.03480425839330703
* 0.07910192950176387
* 0.7233322451735928
* 0.1659819820667019
*
* y = fft(x)
* -------------------
* 0.9336118983487516
* -0.7581365035668999 + 0.08688005256493803i
* 0.44344407521182005
* -0.7581365035668999 - 0.08688005256493803i
*
* z = ifft(y)
* -------------------
* -0.03480425839330703
* 0.07910192950176387 + 2.6599344570851287E-18i
* 0.7233322451735928
* 0.1659819820667019 - 2.6599344570851287E-18i
*
* c = cconvolve(x, x)
* -------------------
* 0.5506798633981853
* 0.23461407150576394 - 4.033186818023279E-18i
* -0.016542951108772352
* 0.10288019294318276 + 4.033186818023279E-18i
*
* d = convolve(x, x)
* -------------------
* 0.001211336402308083 - 3.122502256758253E-17i
* -0.005506167987577068 - 5.058885073636224E-17i
* -0.044092969479563274 + 2.1934338938072244E-18i
* 0.10288019294318276 - 3.6147323062478115E-17i
* 0.5494685269958772 + 3.122502256758253E-17i
* 0.240120239493341 + 4.655566391833896E-17i
* 0.02755001837079092 - 2.1934338938072244E-18i
* 4.01805098805014E-17i
*
***************************************************************************/

public static void main(String[] args) {
int n = Integer.parseInt(args[0]);
Complex[] x = new Complex[n];

// original data
for (int i = 0; i < n; i++) {
x[i] = new Complex(i, 0);
}
show(x, "x");

// FFT of original data
Complex[] y = fft(x);
show(y, "y = fft(x)");

// FFT of original data
Complex[] y2 = dft(x);
show(y2, "y2 = dft(x)");

// take inverse FFT
Complex[] z = ifft(y);
show(z, "z = ifft(y)");

// circular convolution of x with itself
Complex[] c = cconvolve(x, x);
show(c, "c = cconvolve(x, x)");

// linear convolution of x with itself
Complex[] d = convolve(x, x);
show(d, "d = convolve(x, x)");
}
}



+ 0
- 327
app/src/main/java/com/example/ueberwachungssystem/Detection/VideoDetector.java View File

package com.example.ueberwachungssystem.Detection;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.media.Image;
import android.os.CountDownTimer;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;

import com.google.common.util.concurrent.ListenableFuture;

import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.core.Size;

import java.io.File;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.ExecutionException;


/**
* Video Detector inherits some methods from abstract Detector class (more info there)
* USE FROM MAIN ACTIVITY:
* VideoDetector vd = new VideoDetector(this);
* */


@ExperimentalGetImage
public class VideoDetector extends Detector {
// Calling Activity
private final Context context;

// Camera Provider
private ProcessCameraProvider cameraProvider;
private ImageAnalysis imageAnalysis;
private VideoCapture videoCapture;
//private Preview preview;

// Logic
private boolean isDetecting = false;
private boolean isRecording = false;
private boolean allowReportViolation = false;

// Image Processing
private Mat previousImage = null;

// Debugging
private ImageView inputImageView = null;
private ImageView outputImageView = null;

// Recorder
private File outputDir; // Default: in app files directory


// Parameters
private static final float ALARM_THRESHOLD = 0f; // Percent of pixels changed
private static final float AREA_THRESHOLD = 10f;
private static final int DILATE_ITERATIONS = 2;
private static final float START_DELAY = 20000; // milliseconds
private static final android.util.Size IMAGE_RES = new android.util.Size(640, 480);



/** Constructor */
public VideoDetector(Context context) {
super();
this.context = context;
this.imageAnalysis = setupImageAnalysis();
this.videoCapture = setupVideoCapture();
this.outputDir = context.getFilesDir();
//this.preview = new Preview.Builder().build();
}

/** Get States */
public boolean isDetecting() {
return isDetecting;
}
public boolean isRecording(){
return isRecording;
}


/** Starts the Video Detection */
@Override
public void startDetection() {
// Check States
if (isDetecting)
return;
// Configure Image Analysis
imageAnalysis = setupImageAnalysis();
// Open CV startup check
if (!OpenCVLoader.initDebug()) {
Log.e("OpenCV", "Unable to load OpenCV!");
return;
} else
Log.d("OpenCV", "OpenCV loaded Successfully!");
// Get Process Camera Provider and start
final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
isDetecting = true;
bindCameraProvider();
} catch (ExecutionException | InterruptedException e) {}
}, ContextCompat.getMainExecutor(context));
// Disable Violation Calling for Setup Time
startViolationTimer(START_DELAY);
}

/** Starts the Recorder */
@SuppressLint("RestrictedApi")
public void startRecording() {
// Check States
if (isRecording){
return;
}

videoCapture = setupVideoCapture();

final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(context);
cameraProviderFuture.addListener(() -> {
try {
cameraProvider = cameraProviderFuture.get();
isRecording = true;
bindCameraProvider();

File vidFile = new File(context.getFilesDir() + "/" + generateFileName() + ".mp4");
if (ActivityCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
return;
}
videoCapture.startRecording(
new VideoCapture.OutputFileOptions.Builder(vidFile).build(),
context.getMainExecutor(),
new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
isRecording = false;
Toast.makeText(context, "video recording saved", Toast.LENGTH_SHORT).show();
}
@Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
isRecording = false;
Toast.makeText(context, "video recording failed", Toast.LENGTH_SHORT).show();
}
}
);
} catch (ExecutionException | InterruptedException ignored) {}
}, ContextCompat.getMainExecutor(context));
}

/** Stops the Video Detection */
@Override
public void stopDetection() {
if (!isDetecting || imageAnalysis == null)
return;
cameraProvider.unbind(imageAnalysis);
isDetecting = false;
allowReportViolation = false;
}

/** Stops the Recording */
@SuppressLint("RestrictedApi")
public void stopRecording(){
if(!isRecording)
return;

videoCapture.stopRecording();
cameraProvider.unbind(videoCapture);
isRecording = false;
}

/** Bind Camera Provider */
private void bindCameraProvider() {
// Specify which Camera to use
CameraSelector cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
cameraProvider.unbindAll();
cameraProvider.bindToLifecycle((LifecycleOwner) context, cameraSelector, imageAnalysis, videoCapture);
}

/** Setup Use Cases */
private ImageAnalysis setupImageAnalysis() {
// Configure and create Image Analysis
ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
builder.setTargetResolution(IMAGE_RES);
builder.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST);
builder.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_YUV_420_888);
builder.setTargetRotation(Surface.ROTATION_90);
ImageAnalysis imageAnalysis = builder.build();
// Set Analyzer
imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(context), imageProxy -> {
if (imageProxy.getFormat() == ImageFormat.YUV_420_888) {
Image image = imageProxy.getImage();
assert image != null;

// Violation Handling
Mat processed = processImage(imageProxy);

int n = OpenCVHelper.countNonZeroPixels(processed);
int pixelCount = image.getWidth() * image.getHeight();
float percentChanged = (float) n / pixelCount;

// Violation Condition
if (percentChanged * 100 > ALARM_THRESHOLD) {
if (allowReportViolation)
reportViolation("Video", percentChanged);
}
}
imageProxy.close();
});
return imageAnalysis;
}

@SuppressLint("RestrictedApi")
private VideoCapture setupVideoCapture() {
int rotation = getDisplayRotation();
return new VideoCapture.Builder()
.setTargetRotation(rotation)
.build();
}

/** Process Image to be used for Motion Detection */
private Mat processImage(ImageProxy imageProxy){
if (imageProxy == null)
return null;
// Image Transformation
Mat imageMat = OpenCVHelper.extractYChannel(imageProxy);
// Show Input Image
if (inputImageView != null)
OpenCVHelper.debugMat(imageMat, inputImageView);
// Preprocess Image
Mat preprocessed = imageMat;
preprocessed = OpenCVHelper.addGaussianBlur(preprocessed, new Size(21, 21));
preprocessed = OpenCVHelper.addBlur(preprocessed, new Size(3, 3));
// Set Previous Image
if (previousImage == null) {
previousImage = preprocessed;
return null;
}
// Process Image
Mat processed = preprocessed.clone();
processed = OpenCVHelper.thresholdPixels(processed, previousImage, 25);

for(int i = 0; i < DILATE_ITERATIONS; i++)
processed = OpenCVHelper.dilateBinaryMat(processed, new Size(3,3));

processed = OpenCVHelper.thresholdContourArea(processed, AREA_THRESHOLD);
// Output
previousImage = preprocessed.clone();
// Show Output Image
if (outputImageView != null)
OpenCVHelper.debugMat(processed, outputImageView);
return processed;
}


/** Debug input and result of processing */
public void debugProcessing(@NonNull ImageView inputImageView, @NonNull ImageView outputImageView){
this.inputImageView = inputImageView;
this.outputImageView = outputImageView;
}

/**
private void setPreviewView(@NonNull PreviewView previewView) {
// Create Preview
if (this.preview != null)
this.preview.setSurfaceProvider(previewView.getSurfaceProvider());
}
*/


/** Generate File Name */
private String generateFileName(){
// Get the current timestamp
LocalDateTime currentTime = LocalDateTime.now();
// Define the format for the timestamp
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd_HHmmss");
// Return the timestamp as a string
return currentTime.format(formatter);
}


/** Get current Display Rotation */
private int getDisplayRotation() {
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay();
return display.getRotation();
}

/** Start delay until Violation Report is allowed */
private void startViolationTimer(float setupTime) {
new CountDownTimer((long) (START_DELAY), 100) {
@Override
public void onTick(long millisUntilFinished) {
}
@Override
public void onFinish() {
allowReportViolation = true;
}
}.start();
}

public void setOutputDir(File outputDir) {
this.outputDir = outputDir;
}
}

+ 8
- 3
app/src/main/java/com/example/ueberwachungssystem/Fragments/Fragment1.java View File

package com.example.ueberwachungssystem.Fragments; package com.example.ueberwachungssystem.Fragments;


import android.content.Context;
import android.net.Uri;
import android.os.Bundle; import android.os.Bundle;
import android.os.Environment;
import android.util.Log; import android.util.Log;
import android.view.LayoutInflater; import android.view.LayoutInflater;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.widget.TextView; import android.widget.TextView;
import android.widget.VideoView;


import androidx.fragment.app.Fragment; import androidx.fragment.app.Fragment;


import com.example.ueberwachungssystem.R; import com.example.ueberwachungssystem.R;


import java.io.File;

public class Fragment1 extends Fragment { public class Fragment1 extends Fragment {
private String text; private String text;
private final static String KEY_TEXT = "KEY_TEXT"; private final static String KEY_TEXT = "KEY_TEXT";

private void log(String nachricht) { private void log(String nachricht) {
Log.d(this.getClass().getSimpleName(), nachricht); Log.d(this.getClass().getSimpleName(), nachricht);
} }
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle bundle) { public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle bundle) {
log("onCreateView"); log("onCreateView");
View view = inflater.inflate(R.layout.fragment1, container, false); View view = inflater.inflate(R.layout.fragment1, container, false);
TextView Sensor = (TextView) view.findViewById(R.id.Sensor);
TextView Sensor = (TextView) view.findViewById(R.id.Alarm);
Sensor.setText(text); Sensor.setText(text);
return view; return view;
} }
} }
@Override @Override
public void onCreate(Bundle bundle) { public void onCreate(Bundle bundle) {
super .onCreate(bundle);
super.onCreate(bundle);
Bundle args = getArguments(); Bundle args = getArguments();
if (args != null ) { if (args != null ) {
text = args.getString(KEY_TEXT); text = args.getString(KEY_TEXT);

+ 22
- 2
app/src/main/java/com/example/ueberwachungssystem/Fragments/Fragment2.java View File

package com.example.ueberwachungssystem.Fragments; package com.example.ueberwachungssystem.Fragments;


import android.content.Context;
import android.content.Intent;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.LayoutInflater; import android.view.LayoutInflater;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView; import android.widget.TextView;


import androidx.fragment.app.Fragment; import androidx.fragment.app.Fragment;


import com.example.ueberwachungssystem.MeinAdapter;
import com.example.ueberwachungssystem.R; import com.example.ueberwachungssystem.R;


import java.io.File;
import java.util.Arrays;
import java.util.List;

public class Fragment2 extends Fragment { public class Fragment2 extends Fragment {
private String text; private String text;
private Context c;
private final static String KEY_TEXT = "KEY_TEXT" ; private final static String KEY_TEXT = "KEY_TEXT" ;
private void log(String nachricht) { private void log(String nachricht) {
Log.d(this.getClass().getSimpleName(), nachricht); Log.d(this.getClass().getSimpleName(), nachricht);
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle bundle) { public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle bundle) {
log( "onCreateView" ); log( "onCreateView" );
View view = inflater.inflate(R.layout.fragment2, container, false ); View view = inflater.inflate(R.layout.fragment2, container, false );
TextView Sensor = (TextView) view.findViewById(R.id.Sensor);
Sensor.setText(text);
// TextView Sensor = (TextView) view.findViewById(R.id.Aufzeichnungen);
// Sensor.setText(text);
return view; return view;
} }
public static Fragment2 erstellen(String text) { public static Fragment2 erstellen(String text) {
public void onCreate(Bundle bundle) { public void onCreate(Bundle bundle) {
super.onCreate(bundle); super.onCreate(bundle);
Bundle args = getArguments(); Bundle args = getArguments();
c = getContext();
ListView listView = new ListView(c);
setContentView(R.layout.fragment2);
listView.setAdapter(new MeinAdapter(c, getVideoFiles()));
if (args != null) { if (args != null) {
text = args.getString(KEY_TEXT); text = args.getString(KEY_TEXT);
log("onCreate: text=" + text); log("onCreate: text=" + text);
log("onCreate"); log("onCreate");
} }
} }
public List<File> getVideoFiles(){
File directory = c.getFilesDir();
File[] files = directory.listFiles();
assert files != null;
return Arrays.asList(files);
}
} }

+ 41
- 41
app/src/main/java/com/example/ueberwachungssystem/MainActivity.java View File

package com.example.ueberwachungssystem; package com.example.ueberwachungssystem;


import androidx.annotation.NonNull;
import androidx.camera.core.ExperimentalGetImage; import androidx.camera.core.ExperimentalGetImage;
import androidx.fragment.app.Fragment; import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentTransaction; import androidx.fragment.app.FragmentTransaction;


import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.LayoutInflater;
import android.view.View; import android.view.View;
import android.view.ViewGroup;
import android.widget.Button; import android.widget.Button;
import android.widget.EditText;
import android.widget.ScrollView;
import android.widget.TextView; import android.widget.TextView;
import android.widget.ToggleButton; import android.widget.ToggleButton;


import com.example.ueberwachungssystem.Detection.DetectionReport;
import com.example.ueberwachungssystem.Detection.Detector;
import com.example.ueberwachungssystem.Detection.VideoDetector;
import com.example.ueberwachungssystem.Fragments.Fragment1; import com.example.ueberwachungssystem.Fragments.Fragment1;
import com.example.ueberwachungssystem.Fragments.Fragment2; import com.example.ueberwachungssystem.Fragments.Fragment2;
import com.example.ueberwachungssystem.Fragments.Fragment3;

import org.w3c.dom.Text;


@ExperimentalGetImage @ExperimentalGetImage
public class MainActivity extends AppCompatActivity implements View.OnClickListener { public class MainActivity extends AppCompatActivity implements View.OnClickListener {
//Fragmente
private Fragment aktuellesFragment; private Fragment aktuellesFragment;
private Fragment1 fragment1; private Fragment1 fragment1;
private Fragment2 fragment2; private Fragment2 fragment2;
private Fragment3 fragment3;


//Textviews
private TextView Auswahl;
private TextView AoderA;
private String auswahltext = "Wahl des Detektionsmodus";
private String auswahlAoderA = "Wahl von Alarmmeldungen oder Auswahl von Alarmaufzeichnungen";

//Sensoren und Kommunikation
WifiCommunication communication; WifiCommunication communication;
private TextView alarm;
private String text = "Das ist ein Alarm des Sensors";
//Buttons //Buttons
private ToggleButton toggleKamera; private ToggleButton toggleKamera;
private ToggleButton btnAudio;
private ToggleButton btnBewegung;
//Detektoren
VideoDetector vd = new VideoDetector(this);
private ToggleButton toggleAudio;
private ToggleButton toggleBewegung;

//Überprüfungswerte
boolean wahr;
private void log(String nachricht) { private void log(String nachricht) {
Log.d(this.getClass().getSimpleName(), nachricht); Log.d(this.getClass().getSimpleName(), nachricht);
} }
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
setTitle(this.getClass().getSimpleName()); setTitle(this.getClass().getSimpleName());
setContentView(R.layout.activity_main); setContentView(R.layout.activity_main);
alarm = findViewById(R.id.Alarm);
alarm.setText(text);
Auswahl = findViewById(R.id.textAuswahl);
Auswahl.setText(auswahltext);
AoderA = findViewById(R.id.textAoderA);
AoderA.setText(auswahlAoderA);
toggleKamera = findViewById(R.id.toggleKamera); toggleKamera = findViewById(R.id.toggleKamera);
toggleKamera.setOnClickListener(this); toggleKamera.setOnClickListener(this);
vd.setOnDetectionListener(new Detector.OnDetectionListener() {
@Override
public void onDetection(@NonNull DetectionReport detectionReport) {
DetectionReport dr = detectionReport;
String drString = dr.toString();
}
});

//boolean isRunning = vd.isRunning();
toggleAudio = findViewById(R.id.toggleAudio);
toggleAudio.setOnClickListener(this);
toggleBewegung = findViewById(R.id.toggleBewegung);
toggleBewegung.setOnClickListener(this);


} }
@Override @Override
public void onClick(View v) { public void onClick(View v) {
if (v == toggleKamera) { if (v == toggleKamera) {
if (toggleKamera.isChecked()) { if (toggleKamera.isChecked()) {
vd.startDetection();
wahr = true;
} else { } else {
vd.stopDetection();
wahr = false;
}
}
if (v == toggleAudio) {
if (toggleAudio.isChecked()) {
wahr = true;
} else {
wahr = false;
}
}
if (v == toggleBewegung) {
if (toggleBewegung.isChecked()) {
wahr = true;
} else {
wahr = false;
} }
} }
} }

@Override @Override
protected void onPause() { protected void onPause() {
super.onPause(); super.onPause();
@Override @Override
protected void onResume() { protected void onResume() {
super.onResume(); super.onResume();
communication = new WifiCommunication(MainActivity.this, 1234);
communication = new WifiCommunication(1234);
} }


public void onClickZeigeFragment1(View view) { public void onClickZeigeFragment1(View view) {
Button button = (Button) view; Button button = (Button) view;
log(button.getText() + " ausgewählt"); log(button.getText() + " ausgewählt");
zeigeFragment(fragment1.erstellen("Fragment 1 wurde angeklickt"));
}
public void onClickZeigeFragment2(View view) {
Button button = (Button) view;
log(button.getText() + " ausgewählt");
zeigeFragment(fragment2.erstellen("Fragment 2 wurde angeklickt"));
zeigeFragment(fragment1.erstellen("Hier stehen dann die Alarme"));
} }


public void onClickZeigeFragment3(View view) {
public void onClickZeigeFragment2(View view) {
Button button = (Button) view; Button button = (Button) view;
log(button.getText() + " ausgewählt"); log(button.getText() + " ausgewählt");
zeigeFragment(fragment3.erstellen("Fragment 3 wurde angeklickt"));
zeigeFragment(fragment2.erstellen("Hier stehen dann die Videos"));
} }


public void onClickEntferneFragment(View view) { public void onClickEntferneFragment(View view) {

+ 31
- 0
app/src/main/java/com/example/ueberwachungssystem/MeinAdapter.java View File

package com.example.ueberwachungssystem;

import android.content.Context;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;

import java.io.File;
import java.util.List;

public class MeinAdapter extends ArrayAdapter<File> {
private Context context;
private File[] files;
public MeinAdapter(Context context, List<File> listf) {
super(context, android.R.layout.simple_list_item_1, listf);
this.context = context;
}

@Override
public View getView(final int pos, View cv, ViewGroup vg) {
View v = super.getView(pos, cv, vg);
v.setOnClickListener( new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d("DerLog","nicht null");
}
});
return v;
}
}

+ 36
- 0
app/src/main/java/com/example/ueberwachungssystem/VideoListAdapter.java View File

package com.example.ueberwachungssystem;

import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;

import java.io.File;
import java.util.List;

public class VideoListAdapter extends ArrayAdapter<File> {

public VideoListAdapter(Context context, List<File> videoList) {
super(context, 0, videoList);
}

@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_video, parent, false);
}

File videoFile = getItem(position);

ImageView thumbnailImageView = convertView.findViewById(R.id.imageViewThumbnail);
TextView titleTextView = convertView.findViewById(R.id.textViewTitle);

// Set tag to identify the clicked item
convertView.setTag(videoFile.getAbsolutePath());

return convertView;
    }
}

+ 0
- 5
app/src/main/java/com/example/ueberwachungssystem/WifiCommunication.java View File

package com.example.ueberwachungssystem; package com.example.ueberwachungssystem;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.widget.Toast;


import androidx.annotation.NonNull; import androidx.annotation.NonNull;


import com.example.ueberwachungssystem.Detection.DetectionReport;
import com.example.ueberwachungssystem.Detection.Detector;
import com.example.ueberwachungssystem.Detection.DetectorService;

import java.io.IOException; import java.io.IOException;
import java.net.DatagramPacket; import java.net.DatagramPacket;
import java.net.DatagramSocket; import java.net.DatagramSocket;

+ 5
- 0
app/src/main/res/drawable/toggle_btn.xml View File

<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_checked="false" android:drawable="@color/redbright" />
<item android:state_checked="true" android:drawable="@color/greenbright" />
</selector>

+ 70
- 64
app/src/main/res/layout/activity_main.xml View File

xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:background="@android:color/holo_green_dark"
android:background="#010C49"
android:visibility="visible" android:visibility="visible"
tools:context="com.example.ueberwachungssystem.MainActivity" tools:context="com.example.ueberwachungssystem.MainActivity"
tools:visibility="visible"> tools:visibility="visible">


<TextView
android:id="@+id/textAuswahl"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_alignParentLeft="true"
android:textSize="20sp"
android:textColor="@color/white"/>

<TextView
android:id="@+id/textAoderA"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@+id/toggleAudio"
android:layout_alignParentLeft="true"
android:layout_marginTop="15dp"
android:textSize="20sp"
android:textColor="@color/white"/>

<ToggleButton <ToggleButton
android:id="@+id/toggleKamera" android:id="@+id/toggleKamera"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignBottom="@id/toggleAudio" android:layout_alignBottom="@id/toggleAudio"
android:layout_marginRight="15dp"
android:layout_toStartOf="@+id/toggleAudio"
android:layout_marginRight="30dp"
android:layout_toLeftOf="@id/toggleAudio" android:layout_toLeftOf="@id/toggleAudio"
android:text="Kamera" />
android:textColor="@color/yellow"
android:textOn="Kamera an"
android:textOff="Kamera aus"
android:background="@drawable/toggle_btn"/>


<ToggleButton <ToggleButton
android:id="@+id/toggleAudio" android:id="@+id/toggleAudio"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_below="@id/textAuswahl"
android:layout_centerHorizontal="true" android:layout_centerHorizontal="true"
android:text="Audio" />
android:layout_marginTop="5dp"
android:textColor="@color/yellow"
android:textOn="Audio an"
android:textOff="Audio aus"
android:background="@drawable/toggle_btn"/>


<ToggleButton <ToggleButton
android:id="@+id/toggleBewegung" android:id="@+id/toggleBewegung"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignBottom="@id/toggleAudio" android:layout_alignBottom="@id/toggleAudio"
android:layout_marginLeft="15dp"
android:layout_marginLeft="30dp"
android:layout_toEndOf="@+id/toggleAudio" android:layout_toEndOf="@+id/toggleAudio"
android:layout_toRightOf="@id/toggleAudio" android:layout_toRightOf="@id/toggleAudio"
android:text="Bewegung" />
android:textColor="@color/yellow"
android:textOn="Bewegung an"
android:textOff="Bewegung aus"
android:background="@drawable/toggle_btn"/>


<Button <Button
android:id="@+id/btnAudio"
android:id="@+id/btnAlarme"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_alignBottom="@id/btnKamera"
android:layout_alignBottom="@+id/btnAufnahmen"
android:layout_toLeftOf="@id/btnAufnahmen"
android:layout_marginRight="15dp" android:layout_marginRight="15dp"
android:layout_toStartOf="@+id/btnKamera"
android:theme="@style/Button.Green"
android:onClick="onClickZeigeFragment1" android:onClick="onClickZeigeFragment1"
android:text="Audio" />
android:text="Alarme" />


<Button <Button
android:id="@+id/btnKamera"
android:id="@+id/btnAufnahmen"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_below="@id/toggleAudio"
android:layout_below="@+id/textAoderA"
android:layout_centerHorizontal="true" android:layout_centerHorizontal="true"
android:layout_marginTop="15dp"
android:theme="@style/Button.Green"
android:onClick="onClickZeigeFragment2" android:onClick="onClickZeigeFragment2"
android:text="Kamera" />
android:text="Aufnahmen" />


<!--
<Button <Button
android:id="@+id/btnSensorWeg"
android:id="@+id/btnAnzeigeVerb"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_below="@+id/btn1"
android:text="Entferne Sensordarstellung"
android:onClick="onClickEntferneFragment"/>
-->
<Button
android:id="@+id/btnBewegung"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBottom="@id/btnKamera"
android:layout_alignBottom="@+id/btnAufnahmen"
android:layout_toRightOf="@id/btnAufnahmen"
android:layout_marginLeft="15dp" android:layout_marginLeft="15dp"
android:layout_toEndOf="@+id/btnKamera"
android:layout_toRightOf="@id/btnKamera"
android:onClick="onClickZeigeFragment3"
android:text="Bewegung" />

<Button
android:id="@+id/btnAufnahme"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/btnKamera"
android:layout_toLeftOf="@id/btnKamera"
android:layout_marginRight="15dp"
android:onClick="onClickEntferneFragment"
android:text="Aufnahme" />

<Button
android:id="@+id/btnWiedergabe"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/btnKamera"
android:layout_toRightOf="@id/btnKamera"
android:layout_marginLeft="15dp"
android:onClick="onClickEntferneFragment"
android:text="Wiedergabe" />
android:text="Anzeige verbergen"
android:onClick="onClickEntferneFragment"/>


<FrameLayout <FrameLayout
android:id="@+id/frame" android:id="@+id/frame"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="200dp"
android:layout_below="@+id/btnAufnahme"
android:layout_alignParentStart="true">
android:layout_height="match_parent"
android:layout_below="@+id/btnAufnahmen"
android:layout_marginTop="25dp"
android:layout_alignParentStart="true"
android:background="@color/white">
</FrameLayout> </FrameLayout>


<!--
<ScrollView <ScrollView
android:id= "@+id/scrollView1"
android:layout_width= "wrap_content"
android:layout_height= "wrap_content"
android:layout_below= "@id/frame">
android:id="@+id/scrollView1"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_below="@id/btnAufnahmen"
android:layout_marginTop="25dp">
<LinearLayout <LinearLayout
android:layout_width= "match_parent"
android:layout_height= "match_parent"
android:orientation= "vertical" >
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<TextView <TextView
android:id= "@+id/Alarm"
android:layout_width= "wrap_content"
android:layout_height= "wrap_content" />
android:id="@+id/Alarm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:background="@color/red"/>
</LinearLayout> </LinearLayout>
</ScrollView> </ScrollView>
-->


</RelativeLayout> </RelativeLayout>

+ 17
- 13
app/src/main/res/layout/fragment1.xml View File

android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="vertical" android:orientation="vertical"
android:background="@android:color/holo_green_light">
android:background="@color/bluedark">


<TextView
android:id="@+id/Sensor"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textAppearance="?android:attr/textAppearanceLarge"/>

<TextView
android:id="@+id/Alarm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/Sensor"
android:textAppearance="?android:attr/textAppearanceLarge"/>
<ScrollView
android:id="@+id/scrollView1"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginTop="25dp">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<TextView
android:id="@+id/Alarm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="@color/yellow"/>
</LinearLayout>
</ScrollView>


</RelativeLayout> </RelativeLayout>

+ 19
- 8
app/src/main/res/layout/fragment2.xml View File

android:orientation="vertical" android:orientation="vertical"
android:background="@android:color/holo_blue_light" > android:background="@android:color/holo_blue_light" >


<TextView
android:id="@+id/Sensor"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textAppearance="?android:attr/textAppearanceLarge"/>
<ScrollView
android:id="@+id/scrollView2"
android:layout_width="match_parent"
android:layout_height="250dp"
android:layout_marginTop="25dp">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<TextView
android:id="@+id/Aufzeichnungen"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="@color/yellow"/>
</LinearLayout>
</ScrollView>


<TextView
android:id="@+id/Alarm"
<VideoView
android:id="@+id/AusAuf"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_below="@id/Sensor"
android:layout_below="@id/scrollView2"
android:textAppearance="?android:attr/textAppearanceLarge"/> android:textAppearance="?android:attr/textAppearanceLarge"/>


</LinearLayout> </LinearLayout>

+ 5
- 0
app/src/main/res/values/colors.xml View File

<color name="teal_700">#FF018786</color> <color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color> <color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color> <color name="white">#FFFFFFFF</color>
<color name="red">#5C0000</color>
<color name="redbright">#EF3434</color>
<color name="greenbright">#469733</color>
<color name="bluedark">#053C8E</color>
<color name="yellow">#FFEB3B</color>
</resources> </resources>

+ 5
- 0
app/src/main/res/values/themes.xml View File

<item name="android:statusBarColor">?attr/colorPrimaryVariant</item> <item name="android:statusBarColor">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. --> <!-- Customize your theme here. -->
</style> </style>

<style name="Button.Green" parent="ThemeOverlay.AppCompat">
<item name="colorAccent">#0F3E01</item>
</style>

</resources> </resources>

Loading…
Cancel
Save