fade=t=in:st=2.5:d=1
ThanksI am not using ffmpeg on the command line, rather I am using the javacv library in an android app. However, below is the command to produce theThanks.
desired effect for a video.
ffmpeg -i slide.mp4 -y -vf fade=t=in:st=2.5:d=1 slide_fade_in.mp4
1. I want to know what exactly is the usage of FFmpegFrameFilter ?
2. Does it process the video as a whole or frame by frame.
try {
FrameGrabber grabber1 = new FFmpegFrameGrabber(paths.get(0));
FrameGrabber grabber2 = new FFmpegFrameGrabber(recordings.get(0));
grabber1.start();
grabber2.start();
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(video, 320,
240, grabber2.getAudioChannels());
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
recorder.setFrameRate(10);
recorder.setVideoBitrate(10 * 1024 * 1024);
recorder.setFormat("mp4");
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.start();
Frame frame1, frame2;
for (int i = 0; i < paths.size(); i++) {
frame1 = grabber1 == null ? null : grabber1.grabFrame();
frame2 = grabber2 == null ? null : grabber2.grabFrame();
long duration = grabber2.getLengthInTime();
FFmpegFrameFilter fFmpegFrameFilter = new FFmpegFrameFilter("setpts=N,fade=t=in:st=0.5:d=1,fade=t=out:st="+((duration/1000000)-1)+":d=1"
,grabber1.getImageWidth(),grabber1.getImageHeight());
fFmpegFrameFilter.start();
long startTime = System.currentTimeMillis();
boolean first = true;
recorder.setTimestamp(1000 * startTime);
while((System.currentTimeMillis() - startTime) < (grabber2.getLengthInTime()/1000)){
fFmpegFrameFilter.push(frame1);
// while((frame1 = fFmpegFrameFilter.pull()) != null){
frame1 = fFmpegFrameFilter.pull();
Log.d("frame1", "" +grabber2.getLengthInTime()+" " + (System.currentTimeMillis() - startTime));
recorder.record(frame1);
}
System.out.println(recorder.getTimestamp());
while (frame2 != null) {
if (first) {
recorder.setTimestamp(1000 * startTime);
}
recorder.record(frame2);
System.out.println(recorder.getTimestamp());
frame2 = grabber2.grabFrame();
first = false;
}
if (i < paths.size() - 1) {
if (paths.get(i + 1) != null) {
grabber1.stop();
grabber1 = new FFmpegFrameGrabber(paths.get(i + 1));
grabber1.start();
} else
grabber1 = null;
if (recordings.get(i + 1) != null) {
grabber2.stop();
grabber2 = new FFmpegFrameGrabber(recordings.get(i + 1));
grabber2.start();
} else
grabber2 = null;
}
}
recorder.stop();
grabber1.stop();
grabber2.stop();
}
// apply filter
FrameGrabber grabber = new FFmpegFrameGrabber(temp.getAbsolutePath());
grabber.start();
FFmpegFrameRecorder recorder1 = new FFmpegFrameRecorder(video, 320,
240, grabber.getAudioChannels());
Frame frame;
recorder1.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
recorder1.setFrameRate(10);
recorder1.setVideoBitrate(10 * 1024 * 1024);
recorder1.setFormat("mp4");
recorder1.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder1.start();
FFmpegFrameFilter fFmpegFrameFilter = new FFmpegFrameFilter("setpts=N,fade=t=in:st=2.5:d=4",320, 240);
//"setpts=N,fade=t=in:st=0.5:d=1,fade=t=out:st="+((duration/1000000)-1)+":d=1"
fFmpegFrameFilter.setFrameRate(10);
fFmpegFrameFilter.setFilters("setpts=N,fade=t=in:st=2.5:d=4");
fFmpegFrameFilter.start();
while((frame = grabber.grabFrame()) != null){
fFmpegFrameFilter.push(frame, grabber.getPixelFormat());
Frame frame3;
while ((frame3 = fFmpegFrameFilter.pull()) != null) {
recorder1.record(frame3, grabber.getPixelFormat());
}
}
recorder1.stop();
grabber.stop();
while((frame = grabber.grabFrame()) != null)
the error is: java.lang.NullPointerException: Attempt to invoke virtual method 'java.nio.Buffer java.nio.Buffer.limit(int)' on a null object reference
the grabber is not null (I have checked it), the path for the video is correct, but even grabber.getImageWidth/Height is returning 0 which was causing
a problem with the initialisation of the FfmpegFrameFilter before.
That's about all there is when it comes to "format"... Interested?
Now I am getting an error on this line:
while((frame = grabber.grabFrame()) != null) the error is: java.lang.NullPointerException: Attempt to invoke virtual method 'java.nio.Buffer java.nio.Buffer.limit(int)' on a null object reference the grabber is not null (I have checked it), the path for the video is correct, but even grabber.getImageWidth/Height is returning 0 which was causing a problem with the initialisation of the FfmpegFrameFilter before.
Well, it can't allocate an image of width and height 0, so if your stream doesn't provide that info, try to call setImageWidth() and setImageHeight() before start().
Samuel
--
---
You received this message because you are subscribed to the Google Groups "javacv" group.
To unsubscribe from this group and stop receiving emails from it, send an email to javacv+un...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
grabber.setImageWidth(240);
grabber.setImageHeight(320);
grabber.setFormat("mp4");
grabber.setFrameRate(10);
grabber.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
grabber.setVideoBitrate(10 * 1024 * 1024);
grabber.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
grabber.start();
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(temp, 320,
240, grabber2.getAudioChannels());
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
recorder.setFrameRate(10);
recorder.setVideoBitrate(10 * 1024 * 1024);
recorder.setFormat("mp4");
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.start();
ffprobe version N-77455-g4707497 Copyright (c) 2007-2015 the FFmpeg developers
built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04)
configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libdcadec --enable-libfreetype --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvo-aacenc --enable-libvidstab
libavutil 55. 11.100 / 55. 11.100
libavcodec 57. 20.100 / 57. 20.100
libavformat 57. 20.100 / 57. 20.100
libavdevice 57. 0.100 / 57. 0.100
libavfilter 6. 21.101 / 6. 21.101
libavresample 3. 0. 0 / 3. 0. 0
libswscale 4. 0.100 / 4. 0.100
libswresample 2. 0.101 / 2. 0.101
libpostproc 54. 0.100 / 54. 0.100
[mov,mp4,m4a,3gp,3g2,mj2 @ 0x3697c00] Could not find codec parameters for stream 0 (Video: mpeg4 (mp4v / 0x7634706D), none, 11 kb/s): unspecified size
Consider increasing the value for the 'analyzeduration' and 'probesize' options
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'temp.mp4':
Metadata:
major_brand : isom
minor_version : 512
compatible_brands: isomiso2mp41
encoder : Lavf56.40.101
Duration: 00:00:18.60, start: 0.023242, bitrate: 50 kb/s
Stream #0:0(und): Video: mpeg4 (mp4v / 0x7634706D), none, 11 kb/s, 1.45 fps, 10 tbr, 10240 tbn, 10240 tbc (default)
Metadata:
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 40 kb/s (default)
Metadata:
handler_name : SoundHandler
[mov,mp4,m4a,3gp,3g2,mj2 @ 0x3697c00] Could not find codec
parameters for stream 0 (Video: mpeg4 (mp4v / 0x7634706D), none, 11 kb/s): unspecified
size
Consider increasing
the value for the 'analyzeduration' and 'probesize' options
BTW, with what player are you able to playback that file??
--
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(temp, 960,
540, grabber2.getAudioChannels());
recorder.setInterleaved(true);
recorder.setVideoOption("tune", "zerolatency");
recorder.setVideoOption("preset", "ultrafast");
recorder.setVideoBitrate(68*1024*8);
recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
recorder.setFormat("mp4");
recorder.setFrameRate(30);
recorder.setAudioBitrate(125*1024*8);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
recorder.setSampleRate(44100);
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.start();
Could not find codec parameters for stream 0 (Video: h264 (avc1 / 0x31637661), none, 368x240, 25 kb/s): unspecified pixel format
Consider increasing the value for the 'analyzeduration' and 'probesize' options
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'temp.mp4':
Metadata:
major_brand : isom
minor_version : 512
compatible_brands: isomiso2avc1mp41
encoder : Lavf56.40.101
Duration: 00:00:18.47, start: 0.023242, bitrate: 61 kb/s
Stream #0:0(und): Video: h264 (avc1 / 0x31637661), none, 368x240, 25 kb/s, 1.35 fps, 30 tbr, 15360 tbn, 30720 tbc (default)
Metadata:
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 37 kb/s (default)
Metadata:
handler_name : SoundHandler
Are you calling recorder.record() and recorder.stop() somewhere in your
code?
)? I have tried changing those png files from rgb format to yuv but either I am not doing it the right way or the problem is something else.ffprobe -analyzeduration 100000000 temp.mp4
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'test.mp4':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: isommp42
creation_time : 2016-01-08 22:19:59
Duration: 00:00:38.20, start: 0.000000, bitrate: 3416 kb/s
Stream #0:0(eng): Video: h264 (Baseline) (avc1 / 0x31637661), yuv420p, 1280x720, 3293 kb/s, SAR 1:1 DAR 16:9, 30 fps, 30 tbr, 90k tbn, 180k tbc (default)
Metadata:
creation_time : 2016-01-08 22:19:59
handler_name : VideoHandle
Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 128 kb/s (default)
Metadata:
creation_time : 2016-01-08 22:19:59
handler_name : SoundHandle
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'temp.mp4':
Metadata:
major_brand : isom
minor_version : 512
compatible_brands: isomiso2avc1mp41
encoder : Lavf56.40.101
Duration: 00:00:17.14, start: 0.023220, bitrate: 72 kb/s
Stream #0:0(und): Video: h264 (Constrained Baseline) (avc1 / 0x31637661), yuv420p, 960x540, 586 kb/s, 30 fps, 30 tbr, 15360 tbn, 60 tbc (default)
Metadata:
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 37 kb/s (default)
Metadata:
handler_name : SoundHandler
Enter code here...