Thanks a lot for your response. It turns out that not using buf.unmap() stops the streaming of video as well. Of course, the error goes away. On the other hand, using buf.unmap() is allowing the streaming to happen. Please see below the full code in order to better understand the problem.
/*
* Used jars: jna-4.4.0.jar; opencv-330.jar; gst1-java-core-0.9.3.jar
* Gstreamer version - 1.12.4
*
*/
import java.nio.ByteBuffer;
import java.util.LinkedList;
import org.freedesktop.gstreamer.Buffer;
import org.freedesktop.gstreamer.Bus;
import org.freedesktop.gstreamer.Caps;
import org.freedesktop.gstreamer.ClockTime;
import org.freedesktop.gstreamer.Element;
import org.freedesktop.gstreamer.ElementFactory;
import org.freedesktop.gstreamer.Format;
import org.freedesktop.gstreamer.Gst;
import org.freedesktop.gstreamer.Message;
import org.freedesktop.gstreamer.Pipeline;
import org.freedesktop.gstreamer.elements.AppSrc;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;
import javafx.application.Application;
import javafx.stage.Stage;
public class AppSrcTest3 extends Application {
static {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
}
private Element convert;
private Element videoRate;
private Element sink;
private Element videoFilter;
private Bus bus;
private Pipeline pipe;
////////// Add AppSrc to replace autoSrc of autovideosrc type
private AppSrc appSrc;
private Element queue;
private Mat matImage;
private VideoCapture capture;
private LinkedList<byte[]> preQueue;
private int sourceWidth;
private int sourceHeight;
private int fps = 30;
private boolean isInited = false;
private Caps videoCaps;
private Caps videoCaps2;
private int SRC_QUEUE_SIZE = 30;
public AppSrcTest3() {
initIfNeeded();
}
public void initIfNeeded() {
if (isInited) {
return;
}
//we are using openCV VideoCapture to provide input frames for appSrc. new VideoCapture(0) launches the default webcam in this case.
capture = new VideoCapture(0);
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 640);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 480);
capture.set(Videoio.CAP_PROP_FRAME_COUNT, fps);
capture.set(Videoio.CAP_PROP_FORMAT, Videoio.CAP_MODE_BGR);
matImage = new Mat();
///////////////////////////////
preQueue = new LinkedList<byte[]>();
sourceWidth = 640;
sourceHeight = 480;
videoCaps = Caps.fromString("video/x-raw,format=BGRA,width=" + 640 + ", height=" + 480);
appSrc = (AppSrc) ElementFactory.make("appsrc", "appSrc");
appSrc.setLive(true);
appSrc.setFormat(Format.BUFFERS);
// appSrc.setLatency(-1, 0);
appSrc.setSize(-1);
appSrc.setMaxBytes(SRC_QUEUE_SIZE * sourceWidth * sourceHeight * 4);
appSrc.setCaps(videoCaps);
/////////////////////////////
queue = ElementFactory.make("queue", "queue");
videoFilter = ElementFactory.make("capsfilter", "filter");
videoFilter = ElementFactory.make("capsfilter", "filter");
videoRate = ElementFactory.make("videorate", "videoRate");
convert = ElementFactory.make("autovideoconvert", "convert");
sink = ElementFactory.make("autovideosink", "sink");
videoCaps2 = Caps.fromString("video/x-raw,format=BGRA,width=" + 640 + ", height=" + 480);
videoFilter.setCaps(videoCaps2);
videoRate.setCaps(videoCaps);
convert.setCaps(videoCaps);
pipe = new Pipeline();
pipe.addMany(appSrc, queue, convert, sink);
Pipeline.linkMany(appSrc, queue, convert, sink);
/////////////////////////////
bus = pipe.getBus();
bus.connect(new Bus.MESSAGE() {
@Override
public void busMessage(Bus arg0, Message arg1) {
System.out.println(arg1.getStructure());
}
});
appSrc.connect(new AppSrc.NEED_DATA() {
@Override
public void needData(AppSrc appSrc1, int size) {
while (true) {
input(getImageBytes());
}
}
});
isInited = true;
}
public void input(byte[] imageBytes) {
preQueue.add(imageBytes);
byte[] imgBytes = preQueue.removeFirst();