Compare commits

...

6 Commits

Author SHA1 Message Date
316e19236f TextureView anstatt SurfaceView 2023-06-21 09:19:03 +02:00
b337d86655 Wenn Permission erteilt, wird die Aufzeichnung direkt von der Funktion onRequestPermissionsResult gestartet.
Fehlerbehebung in Zeile 61: if (requestCode == REQUEST_PERMISSION) {
davor: if (requestCode == REQUEST_PERMISSIONS) {
2023-06-19 21:09:16 +02:00
18eb62b034 Implementierung einer Runnable Class mit Preview in einer SurfaceView in der MainActivity 2023-06-18 17:22:55 +02:00
23ac26d0a4 Aufzeichnung von Video und Ton in MainActivity
ToDo: in Fragment aufrufen
ToDo: Gespeichertes Video um 90° gedreht
2023-06-17 12:15:39 +02:00
518e207032 Runnable test implementation 2023-06-17 10:33:54 +02:00
7ed3408d01 Aufzeichnung von Video und Ton in MainActivity
ToDo: in Fragment aufrufen
ToDo: Gespeichertes Video um 90° gedreht
2023-06-16 20:34:15 +02:00
9 changed files with 410 additions and 21 deletions

View File

@ -2,6 +2,13 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.greenwatch"> package="com.example.greenwatch">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_MEDIA_AUDIO" />
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.CAMERA" />
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@mipmap/ic_launcher"

View File

@ -0,0 +1,225 @@
package com.example.greenwatch;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Environment;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
public class AlarmRecorder {
private String videoPath;
private String audioPath;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private MediaExtractor videoExtractor;
private MediaExtractor audioExtractor;
private Runnable audioRecorderRunnable;
private Runnable videoRecorderRunnable;
private MediaRecorder videoRecorder;
private MediaRecorder audioRecorder;
//private SurfaceHolder previewHolder;
private Surface surface;
private Thread videoThread; // Video-Thread als Instanzvariable
private Thread audioThread; // Audio-Thread als Instanzvariable
public AlarmRecorder(SurfaceTexture surfaceTexture) {
audioRecorder = new MediaRecorder();
videoRecorder = new MediaRecorder();
videoExtractor = new MediaExtractor();
audioExtractor = new MediaExtractor();
this.surface = new Surface(surfaceTexture);
}
public void startRecording() {
createStoragePaths(); //Speicherort und -namen für Audio- und Video-Datei
audioRecorderRunnable = new AudioRecorder(audioRecorder, audioPath);
videoRecorderRunnable = new VideoRecorder(videoRecorder, videoPath, surface);
audioThread = new Thread(audioRecorderRunnable);
videoThread = new Thread(videoRecorderRunnable);
//Threads starten
videoThread.start();
audioThread.start();
}
public void stopRecording() {
try {
stopVideoRecording();
stopAudioRecording();
waitTillThreadsStopped();
File videoFile = new File(videoPath); //Speichere das aufgenommene Video
File audioFile = new File(audioPath); //Speichere die aufgenommene Audio
if (videoFile.exists() && audioFile.exists()) {
//Wenn Video- und Audioaufzeichnung gestoppt und abgespeichert sind, beginne mit dem Mergeprozess der beiden
mergeVideoWithAudio();
}
} catch (RuntimeException stopException) {
stopException.printStackTrace();
}
}
private void createStoragePaths(){
//Pfade zum Zwischenspeichern der aufgenommenen Audio und Video-Datei
String externalStorageDirectory = Environment.getExternalStorageDirectory().getAbsolutePath();
String dcimDirectory = externalStorageDirectory + "/DCIM";
videoPath = dcimDirectory + "/video.mp4";
audioPath = dcimDirectory + "/audio.mp3";
}
private void stopVideoRecording(){
if (videoRecorder != null) {
videoRecorder.stop();
videoRecorder.release();
videoRecorder = null;
}
}
private void stopAudioRecording(){
if (audioRecorder != null) {
audioRecorder.stop();
audioRecorder.release();
audioRecorder = null;
}
}
private void waitTillThreadsStopped(){
try {
videoThread.join();
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void mergeVideoWithAudio() {
try {
setVideoExtractorDataSource(); //extrahieren der Video Datei, die zuvor zwischengespeichert wurde
setAudioExtractorDataSource(); //extrahieren der Audio Datei, die zuvor zwischengespeichert wurde
//Speicherort der später zusammengeführten Datei
String outputFilePath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath() + "/merged_video.mp4";
//MediaMuxer zum Zusammenführen einer Audio- und einer Videodatei
MediaMuxer muxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int videoTrack = muxer.addTrack(videoFormat);
int audioTrack = muxer.addTrack(audioFormat);
muxer.start();
ByteBuffer buffer = ByteBuffer.allocate(1024 * 1024);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
videoExtraction(buffer, videoTrack, bufferInfo, muxer);
audioExtraction(buffer, audioTrack, bufferInfo, muxer);
muxer.stop();
muxer.release();
// Löschen der separaten Video- und Audio-Dateien
deleteVideoFile();
deleteAudioFile();
} catch (IOException e) {
e.printStackTrace();
}
}
private void setVideoExtractorDataSource() {
try {
videoExtractor.setDataSource(videoPath);
int videoTrackIndex = getTrackIndex(videoExtractor, "video/");
if (videoTrackIndex < 0) {
// Video-Track nicht gefunden
return;
}
videoExtractor.selectTrack(videoTrackIndex);
videoFormat = videoExtractor.getTrackFormat(videoTrackIndex);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void setAudioExtractorDataSource() {
try {
audioExtractor.setDataSource(audioPath);
int audioTrackIndex = getTrackIndex(audioExtractor, "audio/");
if (audioTrackIndex < 0) {
// Audio-Track nicht gefunden
return;
}
audioExtractor.selectTrack(audioTrackIndex);
audioFormat = audioExtractor.getTrackFormat(audioTrackIndex);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private int getTrackIndex(MediaExtractor extractor, String mimeType) {
int trackCount = extractor.getTrackCount();
for (int i = 0; i < trackCount; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String trackMimeType = format.getString(MediaFormat.KEY_MIME);
if (trackMimeType.startsWith(mimeType)) {
return i;
}
}
return -1;
}
private void videoExtraction(ByteBuffer buffer, int videoTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = videoExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = videoExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(videoTrack, buffer, bufferInfo);
videoExtractor.advance();
}
}
private void audioExtraction(ByteBuffer buffer, int audioTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = audioExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = audioExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = 0; // or MediaCodec.BUFFER_FLAG_KEY_FRAME
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(audioTrack, buffer, bufferInfo);
audioExtractor.advance();
}
}
private void deleteVideoFile(){
File videoFile = new File(videoPath);
if (videoFile.exists()) {
videoFile.delete();
}
}
private void deleteAudioFile(){
File audioFile = new File(audioPath);
if (audioFile.exists()) {
audioFile.delete();
}
}
}

View File

@ -0,0 +1,30 @@
package com.example.greenwatch;
import android.media.MediaRecorder;
import java.io.IOException;
public class AudioRecorder implements Runnable {
private final MediaRecorder audioRecorder;
private final String audioPath;
public AudioRecorder(MediaRecorder audioRecorder, String audioPath) {
this.audioRecorder = audioRecorder;
this.audioPath = audioPath;
}
@Override
public void run() {
audioRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
audioRecorder.setOutputFile(audioPath);
try {
audioRecorder.prepare();
audioRecorder.start();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -1,4 +0,0 @@
package com.example.greenwatch;
public class Beschleunigungssensor {
}

View File

@ -1,4 +0,0 @@
package com.example.greenwatch;
public class Kamera {
}

View File

@ -1,14 +1,101 @@
package com.example.greenwatch; package com.example.greenwatch;
import androidx.appcompat.app.AppCompatActivity; import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.os.Bundle; import android.os.Bundle;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity { import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener{
private boolean isRecording = false;
private static final int REQUEST_PERMISSION = 200;
private Button button;
private TextureView textureView;
private AlarmRecorder alarmRecorder;
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main); setContentView(R.layout.activity_main);
button = findViewById(R.id.button);
textureView = findViewById(R.id.textureView);
textureView.setSurfaceTextureListener(this);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (!isRecording) {
if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.RECORD_AUDIO)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO},
REQUEST_PERMISSION);
} else {
isRecording = true;
alarmRecorder.startRecording();
}
} else {
isRecording = false;
alarmRecorder.stopRecording();
}
}
});
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
alarmRecorder = new AlarmRecorder(surfaceTexture);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Die Größe der SurfaceTexture hat sich geändert
// Hier können entsprechende Anpassungen vorgenommen werden
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
// SurfaceTexture wurde zerstört
// Hier können entsprechende Bereinigungen durchgeführt werden
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// SurfaceTexture wurde aktualisiert
// Hier können entsprechende Aktionen ausgeführt werden, wenn gewünscht
}
private SurfaceTexture getSurfaceTexture() {
return textureView.getSurfaceTexture();
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_PERMISSION) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// Berechtigungen wurden gewährt
Toast.makeText(this, "Berechtigungen gewährt", Toast.LENGTH_SHORT).show();
isRecording = true;
alarmRecorder.startRecording();
} else {
// Berechtigungen wurden nicht gewährt
Toast.makeText(this, "Berechtigungen nicht gewährt", Toast.LENGTH_SHORT).show();
}
}
} }
} }

View File

@ -1,4 +0,0 @@
package com.example.greenwatch;
public class Mikrofon {
}

View File

@ -0,0 +1,39 @@
package com.example.greenwatch;
import android.media.MediaRecorder;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.io.IOException;
public class VideoRecorder implements Runnable{
private final MediaRecorder videoRecorder;
private final String videoPath;
//private SurfaceHolder previewHolder;
private Surface surface;
public VideoRecorder(MediaRecorder videoRecorder, String videoPath, Surface surface) {
this.videoRecorder = videoRecorder;
this.videoPath = videoPath;
this.surface = surface;
}
@Override
public void run() {
videoRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
videoRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
videoRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
videoRecorder.setOutputFile(videoPath);
videoRecorder.setOrientationHint(90);
videoRecorder.setPreviewDisplay(surface);
try {
videoRecorder.prepare();
videoRecorder.start();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -6,13 +6,26 @@
android:layout_height="match_parent" android:layout_height="match_parent"
tools:context=".MainActivity"> tools:context=".MainActivity">
<TextView <TextureView
android:id="@+id/textureView"
android:layout_width="1sp"
android:layout_height="1sp"
android:visibility="visible"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="1.0"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/button"
app:layout_constraintVertical_bias="0.0" />
<Button
android:id="@+id/button"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text="Hello World!" android:layout_marginEnd="339dp"
app:layout_constraintBottom_toBottomOf="parent" android:text="Record Video"
app:layout_constraintLeft_toLeftOf="parent" app:layout_constraintHorizontal_bias="0.5"
app:layout_constraintRight_toRightOf="parent" app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"/> app:layout_constraintTop_toTopOf="parent"/>
</androidx.constraintlayout.widget.ConstraintLayout> </androidx.constraintlayout.widget.ConstraintLayout>