Browse Source

Implementierung einer Runnable Class mit Preview in einer SurfaceView in der MainActivity

Aufzeichnung
Maria Nutz 1 year ago
parent
commit
18eb62b034

+ 3
- 1
app/src/main/AndroidManifest.xml View File

<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.greenwatch"> package="com.example.greenwatch">


<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_MEDIA_AUDIO" />
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
<uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.CAMERA" /> <uses-permission android:name="android.permission.CAMERA" />



+ 222
- 0
app/src/main/java/com/example/greenwatch/AlarmRecorder.java View File

package com.example.greenwatch;

import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Environment;
import android.view.SurfaceHolder;

import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;

public class AlarmRecorder {
private String videoPath;
private String audioPath;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private MediaExtractor videoExtractor;
private MediaExtractor audioExtractor;
private Runnable audioRecorderRunnable;
private Runnable videoRecorderRunnable;
private MediaRecorder videoRecorder;
private MediaRecorder audioRecorder;
private SurfaceHolder previewHolder;
private Thread videoThread; // Video-Thread als Instanzvariable
private Thread audioThread; // Audio-Thread als Instanzvariable

public AlarmRecorder(SurfaceHolder previewHolder) {
audioRecorder = new MediaRecorder();
videoRecorder = new MediaRecorder();
videoExtractor = new MediaExtractor();
audioExtractor = new MediaExtractor();
this.previewHolder = previewHolder;
}

public void startRecording() {
createStoragePaths(); //Speicherort und -namen für Audio- und Video-Datei

audioRecorderRunnable = new AudioRecorder(audioRecorder, audioPath);
videoRecorderRunnable = new VideoRecorder(videoRecorder, videoPath, previewHolder);
audioThread = new Thread(audioRecorderRunnable);
videoThread = new Thread(videoRecorderRunnable);

//Threads starten
videoThread.start();
audioThread.start();
}

public void stopRecording() {
try {
stopVideoRecording();
stopAudioRecording();

waitTillThreadsStopped();
File videoFile = new File(videoPath); //Speichere das aufgenommene Video
File audioFile = new File(audioPath); //Speichere die aufgenommene Audio

if (videoFile.exists() && audioFile.exists()) {
//Wenn Video- und Audioaufzeichnung gestoppt und abgespeichert sind, beginne mit dem Mergeprozess der beiden
mergeVideoWithAudio();
} else { }
} catch (RuntimeException stopException) {
stopException.printStackTrace();
}
}

private void createStoragePaths(){
//Pfade zum Zwischenspeichern der aufgenommenen Audio und Video-Datei
String externalStorageDirectory = Environment.getExternalStorageDirectory().getAbsolutePath();
String dcimDirectory = externalStorageDirectory + "/DCIM";
videoPath = dcimDirectory + "/video.mp4";
audioPath = dcimDirectory + "/audio.mp3";
}

private void stopVideoRecording(){
if (videoRecorder != null) {
videoRecorder.stop();
videoRecorder.release();
videoRecorder = null;
}
}

private void stopAudioRecording(){
if (audioRecorder != null) {
audioRecorder.stop();
audioRecorder.release();
audioRecorder = null;
}
}

private void waitTillThreadsStopped(){
try {
videoThread.join();
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}

private void mergeVideoWithAudio() {
try {
setVideoExtractorDataSource(); //extrahieren der Video Datei, die zuvor zwischengespeichert wurde
setAudioExtractorDataSource(); //extrahieren der Audio Datei, die zuvor zwischengespeichert wurde

//Speicherort der später zusammengeführten Datei
String outputFilePath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath() + "/merged_video.mp4";
//MediaMuxer zum Zusammenführen einer Audio- und einer Videodatei
MediaMuxer muxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int videoTrack = muxer.addTrack(videoFormat);
int audioTrack = muxer.addTrack(audioFormat);
muxer.start();

ByteBuffer buffer = ByteBuffer.allocate(1024 * 1024);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

videoExtraction(buffer, videoTrack, bufferInfo, muxer);
audioExtraction(buffer, audioTrack, bufferInfo, muxer);

muxer.stop();
muxer.release();

// Löschen der separaten Video- und Audio-Dateien
deleteVideoFile();
deleteAudioFile();
} catch (IOException e) {
e.printStackTrace();
}
}

private void setVideoExtractorDataSource() {
try {
videoExtractor.setDataSource(videoPath);
int videoTrackIndex = getTrackIndex(videoExtractor, "video/");
if (videoTrackIndex < 0) {
// Video-Track nicht gefunden
return;
}
videoExtractor.selectTrack(videoTrackIndex);
videoFormat = videoExtractor.getTrackFormat(videoTrackIndex);
} catch (IOException e) {
throw new RuntimeException(e);
}
}

private void setAudioExtractorDataSource() {
try {
audioExtractor.setDataSource(audioPath);
int audioTrackIndex = getTrackIndex(audioExtractor, "audio/");
if (audioTrackIndex < 0) {
// Audio-Track nicht gefunden
return;
}
audioExtractor.selectTrack(audioTrackIndex);
audioFormat = audioExtractor.getTrackFormat(audioTrackIndex);

} catch (IOException e) {
throw new RuntimeException(e);
}
}

private int getTrackIndex(MediaExtractor extractor, String mimeType) {
int trackCount = extractor.getTrackCount();
for (int i = 0; i < trackCount; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String trackMimeType = format.getString(MediaFormat.KEY_MIME);
if (trackMimeType.startsWith(mimeType)) {
return i;
}
}
return -1;
}

private void videoExtraction(ByteBuffer buffer, int videoTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = videoExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = videoExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(videoTrack, buffer, bufferInfo);
videoExtractor.advance();
}
}

private void audioExtraction(ByteBuffer buffer, int audioTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = audioExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = audioExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = 0; // or MediaCodec.BUFFER_FLAG_KEY_FRAME
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(audioTrack, buffer, bufferInfo);
audioExtractor.advance();
}
}

private void deleteVideoFile(){
File videoFile = new File(videoPath);
if (videoFile.exists()) {
videoFile.delete();
}
}

private void deleteAudioFile(){
File audioFile = new File(audioPath);
if (audioFile.exists()) {
audioFile.delete();
}
}
}

+ 30
- 0
app/src/main/java/com/example/greenwatch/AudioRecorder.java View File

package com.example.greenwatch;

import android.media.MediaRecorder;

import java.io.IOException;

public class AudioRecorder implements Runnable {

private final MediaRecorder audioRecorder;
private final String audioPath;
public AudioRecorder(MediaRecorder audioRecorder, String audioPath) {
this.audioRecorder = audioRecorder;
this.audioPath = audioPath;
}
@Override
public void run() {
audioRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
audioRecorder.setOutputFile(audioPath);

try {
audioRecorder.prepare();
audioRecorder.start();
} catch (IOException e) {
e.printStackTrace();
}
}

}

+ 0
- 4
app/src/main/java/com/example/greenwatch/Beschleunigungssensor.java View File

package com.example.greenwatch;

public class Beschleunigungssensor {
}

+ 0
- 4
app/src/main/java/com/example/greenwatch/Kamera.java View File

package com.example.greenwatch;

public class Kamera {
}

+ 18
- 347
app/src/main/java/com/example/greenwatch/MainActivity.java View File



import android.Manifest; import android.Manifest;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Bundle; import android.os.Bundle;
import android.os.Environment;
import android.view.SurfaceView;
import android.view.View; import android.view.View;
import android.widget.Button; import android.widget.Button;
import android.widget.Toast; import android.widget.Toast;
import android.widget.VideoView;


import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat; import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat; import androidx.core.content.ContextCompat;


import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;

public class MainActivity extends AppCompatActivity { public class MainActivity extends AppCompatActivity {


private boolean alarm_on = false; //ToDo diese Variable durch die Alarm-Variable=true setzen und nicht über einen Button
private boolean alarm_off = false; //ToDo diese Variable durch die Alarm-Variable=false setzen und nicht über einen Button
private boolean isRecording = false;
private static final int REQUEST_PERMISSION = 200; private static final int REQUEST_PERMISSION = 200;
private Button button; private Button button;
private VideoView videoView;
private MediaRecorder mediaRecorder;
private MediaRecorder audioRecorder;
private MediaFormat videoFormat;
private MediaFormat audioFormat;
private MediaExtractor videoExtractor;
private MediaExtractor audioExtractor;
private String videoPath;
private String audioPath;
private boolean isRecording = false;

private Thread videoThread; // Video-Thread als Instanzvariable
private Thread audioThread; // Audio-Thread als Instanzvariable

private SurfaceView surfaceView;
private AlarmRecorder alarmRecorder;
private static final int REQUEST_PERMISSIONS = 123; private static final int REQUEST_PERMISSIONS = 123;


@Override @Override
setContentView(R.layout.activity_main); setContentView(R.layout.activity_main);


button = findViewById(R.id.button); button = findViewById(R.id.button);
videoView = findViewById(R.id.videoView);
surfaceView = findViewById(R.id.surfaceView);
alarmRecorder = new AlarmRecorder(surfaceView.getHolder());


button.setOnClickListener(new View.OnClickListener() { button.setOnClickListener(new View.OnClickListener() {
@Override @Override
ActivityCompat.requestPermissions(MainActivity.this, ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO}, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO},
REQUEST_PERMISSION); REQUEST_PERMISSION);


} else { } else {
alarm_on = true;
onAlarmStateChanged();
isRecording = true;
alarmRecorder.startRecording();
} }
} else { } else {
alarm_off = true;
onAlarmStateChanged();
isRecording = false;
alarmRecorder.stopRecording();
} }
} }
}); });
@Override @Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults); super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_PERMISSION) {
boolean allPermissionsGranted = true;
for (int grantResult : grantResults) {
if (grantResult != PackageManager.PERMISSION_GRANTED) {
allPermissionsGranted = false;
break;
}
}
if (allPermissionsGranted) {
startRecording(); // Starte die Aufnahme, da alle Berechtigungen erteilt wurden
if (requestCode == REQUEST_PERMISSIONS) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// Berechtigungen wurden gewährt
Toast.makeText(this, "Berechtigungen gewährt", Toast.LENGTH_SHORT).show();
isRecording = true;
alarmRecorder.startRecording();
} else { } else {
Toast.makeText(this, "Berechtigung verweigert.", Toast.LENGTH_SHORT).show();
}
}
}


public void onAlarmStateChanged() {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (alarm_on) {
alarm_on = false; //Variable zurücksetzen
startRecording();
} else if (alarm_off) {
alarm_off = false; //Variable zurücksetzen
stopRecording();
}
}
});
}

private void cache(){
//Pfade zum Zwischenspeichern der aufgenommenen Audio und Video-Datei
isRecording = true;
String externalStorageDirectory = Environment.getExternalStorageDirectory().getAbsolutePath();
String dcimDirectory = externalStorageDirectory + "/DCIM";
videoPath = dcimDirectory + "/video.mp4";
audioPath = dcimDirectory + "/audio.mp3";
}

private void VideoThread(){
videoThread = new Thread(new Runnable() {
@Override
public void run() {
mediaRecorder = new MediaRecorder();
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
mediaRecorder.setOutputFile(videoPath);
mediaRecorder.setOrientationHint(90);
mediaRecorder.setPreviewDisplay(videoView.getHolder().getSurface());

try {
mediaRecorder.prepare();
mediaRecorder.start();
/*runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Videoaufzeichnung gestartet", Toast.LENGTH_SHORT).show();
}
});*/
} catch (IOException e) {
e.printStackTrace();
}
}
});
}

public class AudioTask implements Runnable {

@Override
public void run() {
audioRecorder = new MediaRecorder();
audioRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
audioRecorder.setOutputFile(audioPath);

try {
audioRecorder.prepare();
audioRecorder.start();
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Audioaufzeichnung gestartet", Toast.LENGTH_SHORT).show();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
}

/*private void AudioThread(){
audioThread = new Thread(new Runnable() {
@Override
public void run() {
audioRecorder = new MediaRecorder();
audioRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
audioRecorder.setOutputFile(audioPath);

try {
audioRecorder.prepare();
audioRecorder.start();
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Audioaufzeichnung gestartet", Toast.LENGTH_SHORT).show();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
});
}*/

private void startRecording() {
cache(); //Speicherort und -namen für Audio- und Video-Datei

VideoThread(); //Videoaufzeichnungs-Thread anlegen
Runnable audiotask = new AudioTask();
audioThread = new Thread(audiotask);
//AudioThread(); //Audioaufzeichnungs-Thread anlegen

//Threads starten
videoThread.start();
audioThread.start();
}

private void stopVideoRecording(){
if (mediaRecorder != null) {
mediaRecorder.stop();
mediaRecorder.release();
mediaRecorder = null;
}
}

private void stopAudioRecording(){
if (audioRecorder != null) {
audioRecorder.stop();
audioRecorder.release();
audioRecorder = null;
}
}

private void waitTillThreadsStopped(){
try {
videoThread.join();
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}

private void stopRecording() {
isRecording = false;
try {
stopVideoRecording();
stopAudioRecording();

runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Video- und Audioaufzeichnung beendet", Toast.LENGTH_SHORT).show();
}
});

waitTillThreadsStopped();
boolean test = videoThread.isAlive();
File videoFile = new File(videoPath); //Speichere das aufgenommene Video
File audioFile = new File(audioPath); //Speichere die aufgenommene Audio

if (videoFile.exists() && audioFile.exists()) {
//Wenn Video- und Audioaufzeichnung gestoppt und abgespeichert sind, beginne mit dem Mergeprozess der beiden
Toast.makeText(MainActivity.this, String.valueOf(test) , Toast.LENGTH_SHORT).show();
mergeVideoWithAudio();
} else {
Toast.makeText(MainActivity.this, "Dateien wurden nicht gefunden!", Toast.LENGTH_SHORT).show();
}
} catch (RuntimeException stopException) {
stopException.printStackTrace();
}
}

private void newMediaExtractor() {
videoExtractor = new MediaExtractor();
try {
videoExtractor.setDataSource(videoPath);
int videoTrackIndex = getTrackIndex(videoExtractor, "video/");
if (videoTrackIndex < 0) {
// Video-Track nicht gefunden
return;
}
videoExtractor.selectTrack(videoTrackIndex);
videoFormat = videoExtractor.getTrackFormat(videoTrackIndex);
} catch (IOException e) {
throw new RuntimeException(e);
}
}

private void newAudioExtractor() {
audioExtractor = new MediaExtractor();
try {
audioExtractor.setDataSource(audioPath);
int audioTrackIndex = getTrackIndex(audioExtractor, "audio/");
if (audioTrackIndex < 0) {
// Audio-Track nicht gefunden
return;
}
audioExtractor.selectTrack(audioTrackIndex);
audioFormat = audioExtractor.getTrackFormat(audioTrackIndex);

} catch (IOException e) {
throw new RuntimeException(e);
}
}

private void mediaExtraction(ByteBuffer buffer, int videoTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = videoExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = videoExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(videoTrack, buffer, bufferInfo);
videoExtractor.advance();
}
}

private void audioExtraction(ByteBuffer buffer, int audioTrack, MediaCodec.BufferInfo bufferInfo, MediaMuxer muxer) {
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
while (true) {
int sampleSize = audioExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
break;
}
long presentationTimeUs = audioExtractor.getSampleTime();
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.flags = 0; // or MediaCodec.BUFFER_FLAG_KEY_FRAME
bufferInfo.presentationTimeUs = presentationTimeUs;
muxer.writeSampleData(audioTrack, buffer, bufferInfo);
audioExtractor.advance();
}
}

private void mergeVideoWithAudio() {
try {
newMediaExtractor(); //extrahieren der Video Datei, die zuvor zwischengespeichert wurde
newAudioExtractor(); //extrahieren der Audio Datei, die zuvor zwischengespeichert wurde

//Speicherort der später zusammengeführten Datei
String outputFilePath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath() + "/merged_video.mp4";
//MediaMuxer zum Zusammenführen einer Audio- und einer Videodatei
MediaMuxer muxer = new MediaMuxer(outputFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int videoTrack = muxer.addTrack(videoFormat);
int audioTrack = muxer.addTrack(audioFormat);
muxer.start();

ByteBuffer buffer = ByteBuffer.allocate(1024 * 1024);
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

mediaExtraction(buffer, videoTrack, bufferInfo, muxer);
audioExtraction(buffer, audioTrack, bufferInfo, muxer);

muxer.stop();
muxer.release();

runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Video und Audio erfolgreich zusammengeführt", Toast.LENGTH_SHORT).show();
}
});

// Löschen der separaten Video- und Audio-Dateien
deleteVideoFile();
deleteAudioFile();
} catch (IOException e) {
e.printStackTrace();
}
}

private void deleteVideoFile(){
File videoFile = new File(videoPath);
if (videoFile.exists()) {
videoFile.delete();
}
}

private void deleteAudioFile(){
File audioFile = new File(audioPath);
if (audioFile.exists()) {
audioFile.delete();
}
}

private int getTrackIndex(MediaExtractor extractor, String mimeType) {
int trackCount = extractor.getTrackCount();
for (int i = 0; i < trackCount; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String trackMimeType = format.getString(MediaFormat.KEY_MIME);
if (trackMimeType.startsWith(mimeType)) {
return i;
// Berechtigungen wurden nicht gewährt
Toast.makeText(this, "Berechtigungen nicht gewährt", Toast.LENGTH_SHORT).show();
} }
} }
return -1;
} }
} }

+ 0
- 4
app/src/main/java/com/example/greenwatch/Mikrofon.java View File

package com.example.greenwatch;

public class Mikrofon {
}

+ 36
- 0
app/src/main/java/com/example/greenwatch/VideoRecorder.java View File

package com.example.greenwatch;

import android.media.MediaRecorder;
import android.view.SurfaceHolder;

import java.io.IOException;

public class VideoRecorder implements Runnable{
private final MediaRecorder videoRecorder;
private final String videoPath;
private SurfaceHolder previewHolder;

public VideoRecorder(MediaRecorder videoRecorder, String videoPath, SurfaceHolder previewHolder) {
this.videoRecorder = videoRecorder;
this.videoPath = videoPath;
this.previewHolder = previewHolder;
}
@Override
public void run() {
videoRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
videoRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
videoRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
videoRecorder.setOutputFile(videoPath);
videoRecorder.setOrientationHint(90);
videoRecorder.setPreviewDisplay(previewHolder.getSurface());

try {
videoRecorder.prepare();
videoRecorder.start();

} catch (IOException e) {
e.printStackTrace();
}
}
}


+ 2
- 2
app/src/main/res/layout/activity_main.xml View File

android:layout_height="match_parent" android:layout_height="match_parent"
tools:context=".MainActivity"> tools:context=".MainActivity">


<VideoView
android:id="@+id/videoView"
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="1dp" android:layout_width="1dp"
android:layout_height="1dp" android:layout_height="1dp"
android:visibility="visible" android:visibility="visible"

Loading…
Cancel
Save