package com.opencameravideo;
import static org.bytedeco.javacpp.opencv_core.IPL_DEPTH_8U;
import static org.bytedeco.javacpp.opencv_core.CV_8UC1;
import java.io.File;
import java.io.IOException;
import java.nio.ShortBuffer;
import java.util.List;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Toast;
public class VideoMainActivity extends Activity implements
CvCameraViewListener2 {
// , PictureCallback {
private JavaOpenCvCameraView javaOpenCvCameraView;
private Mat edgesMat;
private final Scalar greenScalar = new Scalar(0, 255, 0);
private int resolutionIndex = 0;
Button startVideo, stopVideo;
private opencv_core.IplImage videoImage = null;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
// default
// private int sampleAudioRateInHz = 44100;
// private int imageWidth = 320;
// private int imageHeight = 240;
// private int frameRate = 30;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 60;
private String RECIEVE_BYTE_BUFFER = "";
private Thread audioThread;
volatile boolean runAudioThread = true;
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private String ffmpeg_link;
long startTime = 0;
private String LOG_TAG = "VideoTest";
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i("VideoTest", "OpenCV loaded successfully");
javaOpenCvCameraView.enableView();
break;
default:
super.onManagerConnected(status);
break;
}
}
};
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
startVideo = (Button) findViewById(R.id.startVideo);
stopVideo = (Button) findViewById(R.id.stopVideo);
javaOpenCvCameraView = (JavaOpenCvCameraView) findViewById(R.id.surface_view);
javaOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
javaOpenCvCameraView.setCvCameraViewListener(this);
LocalBroadcastManager.getInstance(VideoMainActivity.this)
.registerReceiver(recieverByteBuffere,
new IntentFilter(RECIEVE_BYTE_BUFFER));
startVideo.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
startVideo(startVideo);
}
});
stopVideo.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
stopRecording();
}
});
initRecorder();
}
private void initRecorder() {
Log.w(LOG_TAG, "initRecorder");
// int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
int channels = 4;
// if (yuvIplimage == null) {
// Recreated after frame size is set in surface change method
// videoImage = IplImage.create(imageWidth, imageHeight, depth,
// channels);
// yuvIplimage = IplImage
// .create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
videoImage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
Log.v(LOG_TAG, "IplImage.create");
// }
File videoFile = new File("/mnt/sdcard",
"VideoTest/images/video.mp4");
boolean mk = videoFile.getParentFile().mkdirs();
Log.v(LOG_TAG, "Mkdir: " + mk);
boolean del = videoFile.delete();
Log.v(LOG_TAG, "del: " + del);
try {
boolean created = videoFile.createNewFile();
Log.v(LOG_TAG, "Created: " + created);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ffmpeg_link = videoFile.getAbsolutePath();
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth,
imageHeight, 1);
Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: "
+ imageWidth + " imageHeight " + imageHeight);
recorder.setFormat("mp4");
Log.v(LOG_TAG, "recorder.setFormat(\"mp4\")");
recorder.setSampleRate(sampleAudioRateInHz);
Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");
// re-set in the surface changed method as well
recorder.setFrameRate(frameRate);
Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");
// Create audio recording thread
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
@Override
public void onPause() {
super.onPause();
if (javaOpenCvCameraView != null) {
javaOpenCvCameraView.disableView();
}
}
@Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (javaOpenCvCameraView != null)
javaOpenCvCameraView.disableView();
if (recieverByteBuffere != null) {
LocalBroadcastManager.getInstance(VideoMainActivity.this)
.unregisterReceiver(recieverByteBuffere);
}
}
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Log.e("", "onCameraFrame is call");
Mat rgba = inputFrame.rgba();
org.opencv.core.Size sizeRgba = rgba.size();
// int rows = (int) sizeRgba.height;
// int cols = (int) sizeRgba.width;
//
// int left = cols / 8;
// int top = rows / 8;
// int width = cols * 3 / 4;
// int height = rows * 3 / 4;
//
// // get sub-image
// Mat rgbaInnerWindow = rgba
// .submat(top, top + height, left, left + width);
//
// // create edgesMat from sub-image
// Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100);
//
// Mat colorEdges = new Mat();
// Mat killMe = colorEdges;
// edgesMat.copyTo(colorEdges);
// Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA);
//
// colorEdges = colorEdges.setTo(greenScalar, edgesMat);
// colorEdges.copyTo(rgbaInnerWindow, edgesMat);
//
// killMe.release();
// colorEdges.release();
//
// rgbaInnerWindow.release();
if (recording) {
byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())];
rgba.get(0, 0, byteFrame);
onFrame(byteFrame);
}
return rgba;
}
public void stopRecording() {
// This should stop the audio thread from running
runAudioThread = false;
if (recorder != null) {
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
Toast.makeText(VideoMainActivity.this,
"saved ffmpeg_link::" + ffmpeg_link, Toast.LENGTH_SHORT)
.show();
recorder = null;
recording = false;
}
MediaScannerConnection.scanFile(VideoMainActivity.this,
new String[] { ffmpeg_link }, null, null);
}
public void changeResolution(View v) {
List<android.hardware.Camera.Size> cameraResolutionList = javaOpenCvCameraView
.getResolutionList();
resolutionIndex++;
if (resolutionIndex >= cameraResolutionList.size()) {
resolutionIndex = 0;
}
android.hardware.Camera.Size resolution = cameraResolutionList
.get(resolutionIndex);
javaOpenCvCameraView.setResolution(resolution.width, resolution.height);
resolution = javaOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x"
+ Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
imageWidth = resolution.width;
imageHeight = resolution.height;
// frameRate = cameraView.getFrameRate();
initRecorder();
}
int frames = 0;
private void onFrame(byte[] data) {
Log.e("", "data frame::" + data.length);
if (videoImage != null && recording) {
long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
// Put the camera preview frame right into the yuvIplimage object
videoImage.getByteBuffer().put(data);
// videoImage = IplImage.createFrom(data);
// videoImage = cvDecodeImage(cvMat(1, data.length, CV_8UC1,
// new BytePointer(data)));
try {
if (recorder != null) {
// Get the correct time
recorder.setTimestamp(videoTimestamp);
// Record the image into FFmpegFrameRecorder
// recorder.record(videoImage);
recorder.record(videoImage);
frames++;
Log.i(LOG_TAG, "Wrote Frame: " + frames);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
public void startVideo(View v) {
recording = !recording;
Log.i(LOG_TAG, "Recording: " + recording);
if (recording) {
startTime = System.currentTimeMillis();
try {
recorder.start();
Log.i(LOG_TAG, "STARTED RECORDING.");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
stopRecording();
}
}
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
// Set the thread priority
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
// Audio Capture/Encoding Loop
while (runAudioThread) {
// Read from audioRecord
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
// Log.v(LOG_TAG,"audioRecord bufferReadResult: " +
// bufferReadResult);
// Changes in this variable may not be picked up despite it
// being "volatile"
if (recording) {
try {
// Write to FFmpegFrameRecorder
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished");
/* Capture/Encoding finished, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
MediaScannerConnection.scanFile(VideoMainActivity.this,
new String[] { ffmpeg_link }, null, null);
Log.v(LOG_TAG, "audioRecord released");
}
}
}
BroadcastReceiver recieverByteBuffere = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
System.out.println("recieverByteBuffere is call");
if (intent != null && intent.getExtras() != null
&& intent.getExtras().containsKey("byte_data_arrays")) {
byte[] data = intent.getExtras().getByteArray(
"byte_data_arrays");
Log.e("", "data size::" + data.length);
onFrame(data);
}
}
};
// @Override
// public void onPictureTaken(byte[] data, Camera camera) {
// onFrame(data);
// }
}