private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
private MediaFrame[] mediaFrames = new MediaFrame[SECS_TO_BUFFER*frameRate];
private int currentMediaFrame = 0;
class MediaFrame {
long timestamp;
byte[] videoFrame;
short[] audioFrame;
}
//for now called from another button, but will be called from onCreate
public void startRecording() {
recording = true;
audioThread.start();
}
//called from stop button, should record only whats inside buffer (5 secs)
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.start();
// Assuming we recorded at least a full buffer full of frames, the currentMediaFrame%length will be the oldest one
startTime = mediaFrames[currentMediaFrame%mediaFrames.length].timestamp;
for (int f = 0; f < mediaFrames.length; f++) {
if (mediaFrames[(currentMediaFrame+f)%mediaFrames.length].videoFrame != null) {
long t = mediaFrames[(currentMediaFrame+f)%mediaFrames.length].timestamp - startTime;
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
//Video
Log.v(LOG_TAG,"Adding in frame: " + ((currentMediaFrame+f)%mediaFrames.length));
yuvIplimage.getByteBuffer().put(mediaFrames[(currentMediaFrame+f)%mediaFrames.length].videoFrame);
recorder.record(yuvIplimage);
//Audio
short[] audioData = mediaFrames[currentMediaFrame%mediaFrames.length].audioFrame;
if (audioData!=null){
int bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
////AUDIO THREAD
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
if (recording) {
mediaFrames[currentMediaFrame%mediaFrames.length].audioFrame = new short[audioData.length];
System.arraycopy(audioData, 0, mediaFrames[currentMediaFrame%mediaFrames.length].audioFrame, 0, audioData.length );
//This is commmented, tested overwriting the timestamp and advancing the currentMediaFrame, does not work, video gets crazy
//maybe save different audio timestamp and video timestamp?
//mediaFrames[currentMediaFrame%mediaFrames.length].timestamp = 1000 * System.currentTimeMillis();
//currentMediaFrame++;
}
}
}
}
}
Method in class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(recording){
mediaFrames[currentMediaFrame%mediaFrames.length].timestamp = 1000 * System.currentTimeMillis();
mediaFrames[currentMediaFrame%mediaFrames.length].videoFrame = data;
Log.v(LOG_TAG,"Buffered " + currentMediaFrame + " " + (currentMediaFrame%mediaFrames.length));
currentMediaFrame++;
}
}
}
Sorry for the formating and thank you very much
...