package com.example.greenwatch.alarmrecorder; import android.app.Activity; import android.content.Context; import android.media.MediaCodec; import android.media.MediaExtractor; import android.media.MediaFormat; import android.media.MediaMuxer; import android.media.MediaRecorder; import android.os.Environment; import android.view.SurfaceHolder; import android.widget.Toast; import com.example.greenwatch.MainActivity; import com.example.greenwatch.alarmrecorder.runnables.AudioRecorder; import com.example.greenwatch.alarmrecorder.runnables.VideoRecorder; import com.example.greenwatch.sensors.AccelerometerSensor; import com.example.greenwatch.sensors.CameraSensor; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; public class AlarmRecorder { private static AlarmRecorder alarmRecorderInstance; private boolean isRecording; private String videoPath; private String audioPath; private MediaFormat videoFormat; private MediaFormat audioFormat; private MediaExtractor videoExtractor; private MediaExtractor audioExtractor; private AudioRecorder audioRecorderRunnable; private VideoRecorder videoRecorderRunnable; private SurfaceHolder previewHolder; private MediaRecorder videoRecorder; //private MediaRecorder audioRecorder; private Thread videoThread; // Video-Thread als Instanzvariable private Thread audioThread; // Audio-Thread als Instanzvariable private AlarmRecorder() { //audioRecorder = new MediaRecorder(); //videoRecorder = new MediaRecorder(); videoExtractor = new MediaExtractor(); audioExtractor = new MediaExtractor(); audioRecorderRunnable = new AudioRecorder(); videoRecorderRunnable = new VideoRecorder(); } public static synchronized AlarmRecorder getInstance() { if (alarmRecorderInstance == null){ alarmRecorderInstance = new AlarmRecorder(); } return alarmRecorderInstance; } public void setPreviewHolder(SurfaceHolder previewHolder) { this.previewHolder = previewHolder; } public void startRecording() { createStoragePaths(); //Speicherort und -namen für Audio- und Video-Datei setAudioPath(audioPath); setVideoPath(videoPath); setVideoRecorderPreviewHolder(previewHolder); //audioThread = new Thread(audioRecorderRunnable); videoThread = new Thread(videoRecorderRunnable); //Threads starten videoThread.start(); //audioThread.start(); } public void stopRecording(Context context) { try { //stopAudioRecording(); stopVideoRecording(); Toast.makeText(context, "Video- und Audioaufzeichnung beendet", Toast.LENGTH_SHORT).show(); waitTillThreadsStopped(); File videoFile = new File(videoPath); //Speichere das aufgenommene Video File audioFile = new File(audioPath); //Speichere die aufgenommene Audio if (videoFile.exists() && audioFile.exists()) { //Wenn Video- und Audioaufzeichnung gestoppt und abgespeichert sind, beginne mit dem Mergeprozess der beiden // mergeVideoWithAudio(); Toast.makeText(context, "Video und Audio erfolgreich zusammengeführt", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(context, "Dateien wurden nicht gefunden!", Toast.LENGTH_SHORT).show(); } } catch (RuntimeException stopException) { stopException.printStackTrace(); } } private void createStoragePaths(){ //Pfade zum Zwischenspeichern der aufgenommenen Audio und Video-Datei String externalStorageDirectory = Environment.getExternalStorageDirectory().getAbsolutePath(); String dcimDirectory = externalStorageDirectory + "/DCIM"; videoPath = dcimDirectory + "/video.mp4"; audioPath = dcimDirectory + "/audio.mp3"; } private void setVideoPath(String videoPath) { videoRecorderRunnable.setVideoPath(videoPath); } private void setVideoRecorderPreviewHolder(SurfaceHolder previewHolder) { videoRecorderRunnable.setPreviewHolder(previewHolder); } private void stopVideoRecording(){ videoRecorderRunnable.stopVideoRecording(); } private void setAudioPath(String audioPath) { audioRecorderRunnable.setAudioPath(audioPath); } private void stopAudioRecording(){ audioRecorderRunnable.stopAudioRecording(); } private void waitTillThreadsStopped(){ try { videoThread.join(); audioThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } private void mergeVideoWithAudio() { try { setVideoExtractorDataSource(); //extrahieren der Video Datei, die zuvor zwischengespeichert wurde setAudioExtractorDataSource(); //extrahieren der Audio Datei, die zuvor zwischengespeichert wurde //Speicherort der später zusammengeführten Datei String outputFilePath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath() + "/merged_video.mp4"; //MediaMuxer zum Zusammenführen einer Audio- und einer Videodatei MediaMuxer muxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); int videoTrack = muxer.addTrack(videoFormat); int audioTrack = muxer.addTrack(audioFormat); muxer.start(); ByteBuffer buffer = ByteBuffer.allocate(1024 * 1024); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); videoExtraction(buffer, videoTrack, bufferInfo, muxer); audioExtraction(buffer, audioTrack, bufferInfo, muxer); muxer.stop(); muxer.release(); // Löschen der separaten Video- und Audio-Dateien deleteVideoFile(); deleteAudioFile(); } catch (IOException e) { e.printStackTrace(); } } private void setVideoExtractorDataSource() { try { videoExtractor.setDataSource(videoPath); int videoTrackIndex = getTrackIndex(videoExtractor, "video/"); if (videoTrackIndex < 0) { // Video-Track nicht gefunden return; } videoExtractor.selectTrack(videoTrackIndex); videoFormat = videoExtractor.getTrackFormat(videoTrackIndex); } catch (IOException e) { throw new RuntimeException(e); } } private void setAudioExtractorDataSource() { try { audioExtractor.setDataSource(audioPath); int audioTrackIndex = getTrackIndex(audioExtractor, "audio/"); if (audioTrackIndex < 0) { // Audio-Track nicht gefunden return; } audioExtractor.selectTrack(audioTrackIndex); audioFormat = audioExtractor.getTrackFormat(audioTrackIndex); } catch (IOException e) { throw new RuntimeException(e); } } private int getTrackIndex(MediaExtractor extractor, String mimeType) { int trackCount = extractor.getTrackCount(); for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.getTrackFormat(i); String trackMimeType = format.getString(MediaFormat.KEY_MIME); if (trackMimeType.startsWith(mimeType)) { return i; } } return -1; } private void videoExtraction(ByteBuffer buffer, int videoTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) { videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); while (true) { int sampleSize = videoExtractor.readSampleData(buffer, 0); if (sampleSize < 0) { break; } long presentationTimeUs = videoExtractor.getSampleTime(); bufferInfo.offset = 0; bufferInfo.size = sampleSize; bufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME; bufferInfo.presentationTimeUs = presentationTimeUs; muxer.writeSampleData(videoTrack, buffer, bufferInfo); videoExtractor.advance(); } } private void audioExtraction(ByteBuffer buffer, int audioTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) { audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); while (true) { int sampleSize = audioExtractor.readSampleData(buffer, 0); if (sampleSize < 0) { break; } long presentationTimeUs = audioExtractor.getSampleTime(); bufferInfo.offset = 0; bufferInfo.size = sampleSize; bufferInfo.flags = 0; // or MediaCodec.BUFFER_FLAG_KEY_FRAME bufferInfo.presentationTimeUs = presentationTimeUs; muxer.writeSampleData(audioTrack, buffer, bufferInfo); audioExtractor.advance(); } } private void deleteVideoFile(){ File videoFile = new File(videoPath); if (videoFile.exists()) { videoFile.delete(); } } private void deleteAudioFile(){ File audioFile = new File(audioPath); if (audioFile.exists()) { audioFile.delete(); } } }