package com.objectdetectionpoc;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.CvType;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import org.opencv.features2d.FeatureDetector;
import org.opencv.features2d.DescriptorExtractor;
import org.opencv.features2d.Features2d;
import org.opencv.features2d.KeyPoint;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.DMatch;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.SurfaceHolder;
import android.util.Log;
class Sample1View extends SampleViewBase {
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Mat mIntermediateMat2;
private Mat img1;
private Mat descriptors;
private List<KeyPoint> keypoints;
private FeatureDetector detector;
private DescriptorExtractor descriptor;
private DescriptorMatcher matcher;
private static final double THETA = Math.PI/180;
private static final Scalar GREEN = new Scalar(0, 255, 0);
private static final Scalar RED = new Scalar(255, 0, 0);
private static final Scalar BLUE = new Scalar(0, 0, 255);
private static final List<Byte> MATCH_MASK = new ArrayList<Byte>();
public Sample1View(Context context) {
super(context);
try {
img1=Utils.loadResource(getContext(), R.drawable.wings);
} catch (IOException e) {
// TODO Auto-generated catch block
Log.w("Activity::LoadResource","Unable to load resource R.drawable.wings");
e.printStackTrace();
}
descriptors = new Mat();
keypoints = new ArrayList<KeyPoint>();
detector = FeatureDetector.create(FeatureDetector.FAST);
detector.detect(img1, keypoints);
descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
descriptor.compute(img1, keypoints, descriptors);
matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
}
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mIntermediateMat2= new Mat();
}
}
@Override
protected Bitmap processFrame(byte[] data) {
mYuv.put(0, 0, data);
switch (AndroidObjectDetectionPOCActivity.viewMode) {
case AndroidObjectDetectionPOCActivity.VIEW_MODE_GRAY:
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case AndroidObjectDetectionPOCActivity.VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
break;
case AndroidObjectDetectionPOCActivity.VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case AndroidObjectDetectionPOCActivity.VIEW_MODE_CIRCLE:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.HoughCircles(mIntermediateMat, mIntermediateMat2, Imgproc.CV_HOUGH_GRADIENT, 16, 300);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
Log.i("CircleDraw::mIntermediateMat", mIntermediateMat2.cols()+" "+mIntermediateMat2.rows());
for(int i=0; i<mIntermediateMat2.cols(); i++){
double vCircle[]=mIntermediateMat2.get(0,i);
Log.i("CircleDraw::vCircle0", vCircle[0]+"");
Log.i("CircleDraw::vCircle1", vCircle[1]+"");
Log.i("CircleDraw::vCircle2", vCircle[2]+"");
int radius = (int)vCircle[2];
Core.circle(mRgba,new Point(vCircle[0],vCircle[1]), radius, GREEN);
}
break;
case AndroidObjectDetectionPOCActivity.VIEW_MODE_LINE:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100, 3);
Imgproc.HoughLinesP(mIntermediateMat, mIntermediateMat2, 1, THETA, 100, 500, 10);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
Log.i("LineDraw::mIntermediateMat", mIntermediateMat2.cols()+" "+mIntermediateMat2.rows());
for(int i=0; i<mIntermediateMat2.cols(); i++){
double vec[]=mIntermediateMat2.get(0,i);
Log.i("LineDraw::vec0", vec[0]+"");
Log.i("LineDraw::vec1", vec[1]+"");
Log.i("LineDraw::vec2", vec[2]+"");
Log.i("LineDraw::vec3", vec[3]+"");
Point start = new Point(vec[0], vec[1]);
Point end = new Point(vec[2], vec[3]);
Core.line(mRgba, start, end, RED, 3);
}
break;
case AndroidObjectDetectionPOCActivity.VIEW_MODE_DETECT:
// List<KeyPoint> mKeyPoints = new ArrayList<KeyPoint>();
// List<DMatch> matches = new ArrayList<DMatch>();
// detector.detect(mGraySubmat, mKeyPoints);
// descriptor.compute(mGraySubmat, mKeyPoints, mIntermediateMat);
// matcher.match(mIntermediateMat, descriptors, matches);
// Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
// Features2d.drawMatches(img1, keypoints, mGraySubmat, mKeyPoints, matches, mRgba, GREEN, RED, MATCH_MASK, 0);
Bitmap bm1 = Bitmap.createBitmap(img1.cols(),img1.rows(),Bitmap.Config.RGB_565);
Utils.matToBitmap(img1, bm1);
Bitmap bm2 = bm1.copy(Bitmap.Config.ARGB_8888, false);
mRgba = Utils.bitmapToMat(bm2);
//Imgproc.resize(img1, mRgba, mRgba.size());
break;
}
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
if (Utils.matToBitmap(mRgba, bmp))
return bmp;
bmp.recycle();
return null;
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
}
}