From 89d952c80b4171ab0e4a047849cd23aaeda57957 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Fri, 4 Jun 2021 15:41:04 +0800 Subject: [PATCH 01/15] add a realtimeStreaming example --- samples/FFmpegPushStreamingRealTime.java | 55 ++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 samples/FFmpegPushStreamingRealTime.java diff --git a/samples/FFmpegPushStreamingRealTime.java b/samples/FFmpegPushStreamingRealTime.java new file mode 100644 index 00000000..4dbcdb0f --- /dev/null +++ b/samples/FFmpegPushStreamingRealTime.java @@ -0,0 +1,55 @@ +import org.bytedeco.ffmpeg.global.avcodec; +import org.bytedeco.javacv.FFmpegFrameGrabber; +import org.bytedeco.javacv.FFmpegFrameRecorder; +import org.bytedeco.javacv.Frame; + + +/** + * @author wangxi <346461036@qq.com> + * it is a example for simulate ffmpeg param "-re" + * Read input at native frame rate. + * Mainly used to simulate a grab device, or live input stream (e.g. when reading from a file). + * Should not be used with actual grab devices or live input streams (where it can cause packet loss). + */ +public class FFmpegPushStreamingRealTime { + + public static final String RTMP_SERVER_URL = ""; + + public static final String LOCAL_FILE = ""; + + public static void main(String[] args) throws Exception { + FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(RTMP_SERVER_URL); + grabber.start(); + FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(LOCAL_FILE, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); + try { + recorder.setFormat("flv"); + recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); + recorder.setFrameRate(grabber.getFrameRate()); + recorder.setTimestamp(grabber.getTimestamp()); + recorder.start(); + + /** + * grab() may take some time,so we should record by real timestamp + */ + long begin = System.currentTimeMillis(); + Frame frame = null; + while ((frame = grabber.grab()) != null) { + long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - begin); + /** + * If the streaming is too fast, we sleep for a period of time according to the delay + */ + if (delay > 0) { + Thread.sleep(delay); + } + recorder.record(frame); + } + } finally { + recorder.stop(); + recorder.release(); + grabber.stop(); + grabber.release(); + } + + } + +} \ No newline at end of file From 9b6e4cea2bbf162f6db7eadd344cfccd41627e42 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Fri, 4 Jun 2021 17:36:09 +0800 Subject: [PATCH 02/15] add atFrame Option to grabber --- .../bytedeco/javacv/FFmpegFrameGrabber.java | 338 ++++++++++-------- 1 file changed, 180 insertions(+), 158 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index 5dafb479..8df206b0 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -388,7 +388,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private int samples_channels, samples_format, samples_rate; private boolean frameGrabbed; private Frame frame; - + private long startTime; private volatile boolean started = false; public boolean isCloseInputStream() { @@ -1291,165 +1291,187 @@ public Frame grabKeyFrame() throws Exception { public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames) throws Exception { return grabFrame(doAudio, doVideo, doProcessing, keyFrames, true); } - public synchronized Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { - try (PointerScope scope = new PointerScope()) { - - if (oc == null || oc.isNull()) { - throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); - } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null)) { - return null; - } - if (!started) { - throw new Exception("start() was not called successfully!"); - } - - boolean videoFrameGrabbed = frameGrabbed && frame.image != null; - boolean audioFrameGrabbed = frameGrabbed && frame.samples != null; - frameGrabbed = false; - frame.keyFrame = false; - frame.imageWidth = 0; - frame.imageHeight = 0; - frame.imageDepth = 0; - frame.imageChannels = 0; - frame.imageStride = 0; - frame.image = null; - frame.sampleRate = 0; - frame.audioChannels = 0; - frame.samples = null; - frame.data = null; - frame.opaque = null; - if (doVideo && videoFrameGrabbed) { - if (doProcessing) { - processImage(); - } - frame.keyFrame = picture.key_frame() != 0; - return frame; - } else if (doAudio && audioFrameGrabbed) { - if (doProcessing) { - processSamples(); - } - frame.keyFrame = samples_frame.key_frame() != 0; - return frame; - } - boolean done = false; - boolean readPacket = pkt.stream_index() == -1; - while (!done) { - int ret = 0; - if (readPacket) { - if (pkt.stream_index() != -1) { - // Free the packet that was allocated by av_read_frame - av_packet_unref(pkt); - } - if ((ret = av_read_frame(oc, pkt)) < 0) { - if (doVideo && video_st != null) { - // The video codec may have buffered some frames - pkt.stream_index(video_st.index()); - pkt.flags(AV_PKT_FLAG_KEY); - pkt.data(null); - pkt.size(0); - } else { - pkt.stream_index(-1); - return null; - } - } - } - - frame.streamIndex = pkt.stream_index(); - - // Is this a packet from the video stream? - if (doVideo && video_st != null && pkt.stream_index() == video_st.index() - && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) { - // Decode video frame - if (readPacket) { - ret = avcodec_send_packet(video_c, pkt); - if (pkt.data() == null && pkt.size() == 0) { - pkt.stream_index(-1); - if (ret < 0) { - return null; - } - } - if (ret < 0) { - throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding."); - } - } - - // Did we get a video frame? - got_frame[0] = 0; - while (ret >= 0 && !done) { - ret = avcodec_receive_frame(video_c, picture); - if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { - readPacket = true; - break; - } else if (ret < 0) { - throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding."); - } - got_frame[0] = 1; - - if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) { - long pts = picture.best_effort_timestamp(); - AVRational time_base = video_st.time_base(); - timestamp = 1000000L * pts * time_base.num() / time_base.den(); - // best guess, AVCodecContext.frame_number = number of decoded frames... - frameNumber = (int)Math.round(timestamp * getFrameRate() / 1000000L); - frame.image = image_buf; - if (doProcessing) { - processImage(); - } + public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { + return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, false); + } + public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData,boolean atFrameRate) throws Exception { + return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, atFrameRate); + } + + private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData ,boolean atFrameRate)throws Exception{ + try (PointerScope scope = new PointerScope()) { + + if (oc == null || oc.isNull()) { + throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); + } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null)) { + return null; + } + if (!started) { + throw new Exception("start() was not called successfully!"); + } + + boolean videoFrameGrabbed = frameGrabbed && frame.image != null; + boolean audioFrameGrabbed = frameGrabbed && frame.samples != null; + frameGrabbed = false; + frame.keyFrame = false; + frame.imageWidth = 0; + frame.imageHeight = 0; + frame.imageDepth = 0; + frame.imageChannels = 0; + frame.imageStride = 0; + frame.image = null; + frame.sampleRate = 0; + frame.audioChannels = 0; + frame.samples = null; + frame.data = null; + frame.opaque = null; + if (doVideo && videoFrameGrabbed) { + if (doProcessing) { + processImage(); + } + frame.keyFrame = picture.key_frame() != 0; + return frame; + } else if (doAudio && audioFrameGrabbed) { + if (doProcessing) { + processSamples(); + } + frame.keyFrame = samples_frame.key_frame() != 0; + return frame; + } + boolean done = false; + boolean readPacket = pkt.stream_index() == -1; + while (!done) { + int ret = 0; + if (readPacket) { + if (pkt.stream_index() != -1) { + // Free the packet that was allocated by av_read_frame + av_packet_unref(pkt); + } + if ((ret = av_read_frame(oc, pkt)) < 0) { + if (doVideo && video_st != null) { + // The video codec may have buffered some frames + pkt.stream_index(video_st.index()); + pkt.flags(AV_PKT_FLAG_KEY); + pkt.data(null); + pkt.size(0); + } else { + pkt.stream_index(-1); + return null; + } + } + } + + frame.streamIndex = pkt.stream_index(); + + // Is this a packet from the video stream? + if (doVideo && video_st != null && pkt.stream_index() == video_st.index() + && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) { + // Decode video frame + if (readPacket) { + ret = avcodec_send_packet(video_c, pkt); + if (pkt.data() == null && pkt.size() == 0) { + pkt.stream_index(-1); + if (ret < 0) { + return null; + } + } + if (ret < 0) { + throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding."); + } + } + + // Did we get a video frame? + got_frame[0] = 0; + while (ret >= 0 && !done) { + ret = avcodec_receive_frame(video_c, picture); + if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { + readPacket = true; + break; + } else if (ret < 0) { + throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding."); + } + got_frame[0] = 1; + + if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) { + long pts = picture.best_effort_timestamp(); + AVRational time_base = video_st.time_base(); + timestamp = 1000000L * pts * time_base.num() / time_base.den(); + // best guess, AVCodecContext.frame_number = number of decoded frames... + frameNumber = (int)Math.round(timestamp * getFrameRate() / 1000000L); + frame.image = image_buf; + if (doProcessing) { + processImage(); + } /* the picture is allocated by the decoder. no need to free it */ - done = true; - frame.timestamp = timestamp; - frame.keyFrame = picture.key_frame() != 0; - } - } - } else if (doAudio && audio_st != null && pkt.stream_index() == audio_st.index()) { - // Decode audio frame - if (readPacket) { - ret = avcodec_send_packet(audio_c, pkt); - if (ret < 0) { - throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding."); - } - } - - // Did we get an audio frame? - got_frame[0] = 0; - while (ret >= 0 && !done) { - ret = avcodec_receive_frame(audio_c, samples_frame); - if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { - readPacket = true; - break; - } else if (ret < 0) { - throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding."); - } - got_frame[0] = 1; - - long pts = samples_frame.best_effort_timestamp(); - AVRational time_base = audio_st.time_base(); - timestamp = 1000000L * pts * time_base.num() / time_base.den(); - frame.samples = samples_buf; - /* if a frame has been decoded, output it */ - if (doProcessing) { - processSamples(); - } - done = true; - frame.timestamp = timestamp; - frame.keyFrame = samples_frame.key_frame() != 0; - } - } else if (doData) { - if (!readPacket) { - readPacket = true; - continue; - } - // Export the stream byte data for non audio / video frames - frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer(); - done = true; - } - } - return frame; - - } - } - + done = true; + frame.timestamp = timestamp; + frame.keyFrame = picture.key_frame() != 0; + } + } + } else if (doAudio && audio_st != null && pkt.stream_index() == audio_st.index()) { + // Decode audio frame + if (readPacket) { + ret = avcodec_send_packet(audio_c, pkt); + if (ret < 0) { + throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding."); + } + } + + // Did we get an audio frame? + got_frame[0] = 0; + while (ret >= 0 && !done) { + ret = avcodec_receive_frame(audio_c, samples_frame); + if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { + readPacket = true; + break; + } else if (ret < 0) { + throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding."); + } + got_frame[0] = 1; + + long pts = samples_frame.best_effort_timestamp(); + AVRational time_base = audio_st.time_base(); + timestamp = 1000000L * pts * time_base.num() / time_base.den(); + frame.samples = samples_buf; + /* if a frame has been decoded, output it */ + if (doProcessing) { + processSamples(); + } + done = true; + frame.timestamp = timestamp; + frame.keyFrame = samples_frame.key_frame() != 0; + } + } else if (doData) { + if (!readPacket) { + readPacket = true; + continue; + } + // Export the stream byte data for non audio / video frames + frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer(); + done = true; + } + } + + // Simulate the "-re" parameter in ffmpeg + if (atFrameRate) { + if (startTime == 0) { + startTime = System.currentTimeMillis(); + } else { + long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - startTime); + if (delay > 0) { + Thread.sleep(delay); + } + } + } + return frame; + + } catch (InterruptedException e) { + throw new RuntimeException(e.getMessage()); + } + + } + public synchronized AVPacket grabPacket() throws Exception { if (oc == null || oc.isNull()) { throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); From 6838c854d7e340befcae20d144c2138f68799ec2 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Fri, 4 Jun 2021 17:48:59 +0800 Subject: [PATCH 03/15] use "re" option add global real time grab like ffmpeg --- .../org/bytedeco/javacv/FFmpegFrameGrabber.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index 8df206b0..e55b1781 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -56,10 +56,7 @@ import java.io.InputStream; import java.nio.Buffer; import java.nio.ByteBuffer; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacpp.DoublePointer; @@ -389,6 +386,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private boolean frameGrabbed; private Frame frame; private long startTime; + private boolean globalAtFrameRate; private volatile boolean started = false; public boolean isCloseInputStream() { @@ -850,6 +848,11 @@ public void startUnsafe() throws Exception { startUnsafe(true); } public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { + String reValue = this.getOption("re"); + if (Objects.equals(reValue,"true")) { + this.globalAtFrameRate = true; + } + try (PointerScope scope = new PointerScope()) { if (oc != null && !oc.isNull()) { @@ -1052,7 +1055,7 @@ public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { samples_buf = new Buffer[] { null }; } started = true; - + } } @@ -1454,7 +1457,7 @@ private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean } // Simulate the "-re" parameter in ffmpeg - if (atFrameRate) { + if (atFrameRate||globalAtFrameRate) { if (startTime == 0) { startTime = System.currentTimeMillis(); } else { From c93de18fd2a04157d619bffd8d9c7ff5fdcffd80 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Fri, 4 Jun 2021 18:42:31 +0800 Subject: [PATCH 04/15] add two unit test for realtime change --- .../javacv/FrameGrabberRealTimeTest.java | 58 +++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java new file mode 100644 index 00000000..3083f418 --- /dev/null +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java @@ -0,0 +1,58 @@ +package org.bytedeco.javacv; + +import org.bytedeco.ffmpeg.global.avcodec; +import org.bytedeco.javacpp.Loader; +import org.junit.Test; + +import java.io.File; + +public class FrameGrabberRealTimeTest { + public static final String url = ""; + public static final String file = ""; + + public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { + FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); + grabber.start(); + FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); + try { + recorder.setFormat("flv"); + recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); + recorder.setFrameRate(grabber.getFrameRate()); + recorder.setTimestamp(grabber.getTimestamp()); + recorder.start(); + Frame frame = null; + while ((frame = grabber.grabFrame(true,true,true,false,true,true)) != null) { + recorder.record(frame); + } + } finally { + recorder.stop(); + recorder.release(); + grabber.stop(); + grabber.release(); + } + + } + public void testWithGlobalConfig() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { + FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); + grabber.setOption("re","true"); + grabber.start(); + FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); + try { + recorder.setFormat("flv"); + recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); + recorder.setFrameRate(grabber.getFrameRate()); + recorder.setTimestamp(grabber.getTimestamp()); + recorder.start(); + Frame frame = null; + while ((frame = grabber.grabFrame(true,true,true,false,true,true)) != null) { + recorder.record(frame); + } + } finally { + recorder.stop(); + recorder.release(); + grabber.stop(); + grabber.release(); + } + + } +} From e3e33a7d686f068d55549e44f08d34e60a6b2865 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Fri, 4 Jun 2021 19:09:17 +0800 Subject: [PATCH 05/15] fix the test bug --- .../java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java index 3083f418..741709a6 100644 --- a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java @@ -10,6 +10,7 @@ public class FrameGrabberRealTimeTest { public static final String url = ""; public static final String file = ""; + @Test public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); grabber.start(); @@ -32,6 +33,7 @@ public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFram } } + @Test public void testWithGlobalConfig() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); grabber.setOption("re","true"); @@ -44,7 +46,7 @@ public void testWithGlobalConfig() throws FFmpegFrameGrabber.Exception, FFmpegFr recorder.setTimestamp(grabber.getTimestamp()); recorder.start(); Frame frame = null; - while ((frame = grabber.grabFrame(true,true,true,false,true,true)) != null) { + while ((frame = grabber.grab()) != null) { recorder.record(frame); } } finally { From 415f84f5346565c93feb98ab392690008785a051 Mon Sep 17 00:00:00 2001 From: wangxi Date: Fri, 4 Jun 2021 21:03:17 +0800 Subject: [PATCH 06/15] remove atFrameRate from grab() remove "re" check from startUnsafe() add a atFrameRate field --- .../javacv/FrameGrabberRealTimeTest.java | 29 ++++--------------- .../bytedeco/javacv/FFmpegFrameGrabber.java | 25 ++++++++-------- 2 files changed, 18 insertions(+), 36 deletions(-) diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java index 741709a6..26603309 100644 --- a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java @@ -2,6 +2,7 @@ import org.bytedeco.ffmpeg.global.avcodec; import org.bytedeco.javacpp.Loader; +import org.junit.Assert; import org.junit.Test; import java.io.File; @@ -10,33 +11,10 @@ public class FrameGrabberRealTimeTest { public static final String url = ""; public static final String file = ""; - @Test - public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { - FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); - grabber.start(); - FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); - try { - recorder.setFormat("flv"); - recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); - recorder.setFrameRate(grabber.getFrameRate()); - recorder.setTimestamp(grabber.getTimestamp()); - recorder.start(); - Frame frame = null; - while ((frame = grabber.grabFrame(true,true,true,false,true,true)) != null) { - recorder.record(frame); - } - } finally { - recorder.stop(); - recorder.release(); - grabber.stop(); - grabber.release(); - } - - } @Test public void testWithGlobalConfig() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); - grabber.setOption("re","true"); + grabber.setAtFrameRate(true); grabber.start(); FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); try { @@ -46,7 +24,10 @@ public void testWithGlobalConfig() throws FFmpegFrameGrabber.Exception, FFmpegFr recorder.setTimestamp(grabber.getTimestamp()); recorder.start(); Frame frame = null; + long startTime=System.currentTimeMillis(); while ((frame = grabber.grab()) != null) { + //delay < 10ms + Assert.assertTrue(frame.timestamp / 1000-(System.currentTimeMillis()-startTime)<10000); recorder.record(frame); } } finally { diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index e55b1781..913fd8d1 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -386,7 +386,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private boolean frameGrabbed; private Frame frame; private long startTime; - private boolean globalAtFrameRate; + private boolean atFrameRate=false; private volatile boolean started = false; public boolean isCloseInputStream() { @@ -421,6 +421,14 @@ public boolean hasAudio() { } } + public boolean isAtFrameRate() { + return atFrameRate; + } + + public void setAtFrameRate(boolean atFrameRate) { + this.atFrameRate = atFrameRate; + } + @Override public String getFormat() { if (oc == null) { return super.getFormat(); @@ -848,11 +856,6 @@ public void startUnsafe() throws Exception { startUnsafe(true); } public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { - String reValue = this.getOption("re"); - if (Objects.equals(reValue,"true")) { - this.globalAtFrameRate = true; - } - try (PointerScope scope = new PointerScope()) { if (oc != null && !oc.isNull()) { @@ -1295,13 +1298,11 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b return grabFrame(doAudio, doVideo, doProcessing, keyFrames, true); } public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { - return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, false); + return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData); } - public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData,boolean atFrameRate) throws Exception { - return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, atFrameRate); - } + - private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData ,boolean atFrameRate)throws Exception{ + private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData)throws Exception{ try (PointerScope scope = new PointerScope()) { if (oc == null || oc.isNull()) { @@ -1457,7 +1458,7 @@ private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean } // Simulate the "-re" parameter in ffmpeg - if (atFrameRate||globalAtFrameRate) { + if (atFrameRate) { if (startTime == 0) { startTime = System.currentTimeMillis(); } else { From 2038bcc5fd8c348b342c4c3a11d0b19a36fc5e25 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:14:57 +0800 Subject: [PATCH 07/15] add grabAtFrameRate to FrameGrabber.java --- .../javacv/FrameGrabberRealTimeTest.java | 35 +++++++++++++++++++ .../org/bytedeco/javacv/FrameGrabber.java | 16 +++++++++ 2 files changed, 51 insertions(+) create mode 100644 platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java new file mode 100644 index 00000000..54b85e31 --- /dev/null +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java @@ -0,0 +1,35 @@ +package org.bytedeco.javacv; + +import org.bytedeco.ffmpeg.global.avcodec; +import org.junit.Test; + +public class FrameGrabberRealTimeTest { + public static final String url = "rtmp://10.69.180.26:21935/stream/sister"; + public static final String file = "E:\\CloudMusic\\MV\\好妹妹 - 云野.mp4"; + + @Test + public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { + FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); + grabber.start(); + FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); + try { + recorder.setFormat("flv"); + recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); + recorder.setFrameRate(grabber.getFrameRate()); + recorder.setTimestamp(grabber.getTimestamp()); + recorder.start(); + Frame frame = null; + while ((frame = grabber.grabAtFrameRate()) != null) { + recorder.record(frame); + } + } catch (InterruptedException | FrameGrabber.Exception e) { + e.printStackTrace(); + } finally { + recorder.stop(); + recorder.release(); + grabber.stop(); + grabber.release(); + } + + } +} diff --git a/src/main/java/org/bytedeco/javacv/FrameGrabber.java b/src/main/java/org/bytedeco/javacv/FrameGrabber.java index b15f34fb..5e10cf32 100644 --- a/src/main/java/org/bytedeco/javacv/FrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FrameGrabber.java @@ -22,6 +22,8 @@ package org.bytedeco.javacv; +import org.bytedeco.librealsense.frame; + import java.beans.PropertyEditorSupport; import java.io.Closeable; import java.io.File; @@ -201,6 +203,7 @@ public static enum SampleMode { protected int frameNumber = 0; protected long timestamp = 0; protected int maxDelay = -1; + protected long startTime = 0; public int getVideoStream() { return videoStream; @@ -724,4 +727,17 @@ public void release() throws Exception { public Array createArray(FrameGrabber[] frameGrabbers) { return new Array(frameGrabbers); } + + public Frame grabAtFrameRate() throws Exception, InterruptedException { + Frame frame = grab(); + if (startTime == 0) { + startTime = System.currentTimeMillis(); + } else { + long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - startTime); + if (delay > 0) { + Thread.sleep(delay); + } + } + return frame; + } } From 9fe083686212c146ecf583fbc33b34273c18107f Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:16:41 +0800 Subject: [PATCH 08/15] remove unused import --- src/main/java/org/bytedeco/javacv/FrameGrabber.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FrameGrabber.java b/src/main/java/org/bytedeco/javacv/FrameGrabber.java index 5e10cf32..cdd59c93 100644 --- a/src/main/java/org/bytedeco/javacv/FrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FrameGrabber.java @@ -22,8 +22,6 @@ package org.bytedeco.javacv; -import org.bytedeco.librealsense.frame; - import java.beans.PropertyEditorSupport; import java.io.Closeable; import java.io.File; From b74706d1dbc502ba4e526a9a656df9d8ef26557e Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:19:10 +0800 Subject: [PATCH 09/15] Revert "remove atFrameRate from grab() remove "re" check from startUnsafe() add a atFrameRate field" This reverts commit 415f84f5 --- .../bytedeco/javacv/FFmpegFrameGrabber.java | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index 913fd8d1..e55b1781 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -386,7 +386,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private boolean frameGrabbed; private Frame frame; private long startTime; - private boolean atFrameRate=false; + private boolean globalAtFrameRate; private volatile boolean started = false; public boolean isCloseInputStream() { @@ -421,14 +421,6 @@ public boolean hasAudio() { } } - public boolean isAtFrameRate() { - return atFrameRate; - } - - public void setAtFrameRate(boolean atFrameRate) { - this.atFrameRate = atFrameRate; - } - @Override public String getFormat() { if (oc == null) { return super.getFormat(); @@ -856,6 +848,11 @@ public void startUnsafe() throws Exception { startUnsafe(true); } public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { + String reValue = this.getOption("re"); + if (Objects.equals(reValue,"true")) { + this.globalAtFrameRate = true; + } + try (PointerScope scope = new PointerScope()) { if (oc != null && !oc.isNull()) { @@ -1298,11 +1295,13 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b return grabFrame(doAudio, doVideo, doProcessing, keyFrames, true); } public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { - return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData); + return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, false); } - + public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData,boolean atFrameRate) throws Exception { + return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, atFrameRate); + } - private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData)throws Exception{ + private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData ,boolean atFrameRate)throws Exception{ try (PointerScope scope = new PointerScope()) { if (oc == null || oc.isNull()) { @@ -1458,7 +1457,7 @@ private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean } // Simulate the "-re" parameter in ffmpeg - if (atFrameRate) { + if (atFrameRate||globalAtFrameRate) { if (startTime == 0) { startTime = System.currentTimeMillis(); } else { From b97799ce6eb72fc417c0440aa5aedee2d04ba3b3 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:19:37 +0800 Subject: [PATCH 10/15] Revert "use "re" option add global real time grab like ffmpeg" This reverts commit 6838c854 --- .../org/bytedeco/javacv/FFmpegFrameGrabber.java | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index e55b1781..8df206b0 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -56,7 +56,10 @@ import java.io.InputStream; import java.nio.Buffer; import java.nio.ByteBuffer; -import java.util.*; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; import java.util.Map.Entry; import org.bytedeco.javacpp.BytePointer; import org.bytedeco.javacpp.DoublePointer; @@ -386,7 +389,6 @@ static class SeekCallback extends Seek_Pointer_long_int { private boolean frameGrabbed; private Frame frame; private long startTime; - private boolean globalAtFrameRate; private volatile boolean started = false; public boolean isCloseInputStream() { @@ -848,11 +850,6 @@ public void startUnsafe() throws Exception { startUnsafe(true); } public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { - String reValue = this.getOption("re"); - if (Objects.equals(reValue,"true")) { - this.globalAtFrameRate = true; - } - try (PointerScope scope = new PointerScope()) { if (oc != null && !oc.isNull()) { @@ -1055,7 +1052,7 @@ public synchronized void startUnsafe(boolean findStreamInfo) throws Exception { samples_buf = new Buffer[] { null }; } started = true; - + } } @@ -1457,7 +1454,7 @@ private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean } // Simulate the "-re" parameter in ffmpeg - if (atFrameRate||globalAtFrameRate) { + if (atFrameRate) { if (startTime == 0) { startTime = System.currentTimeMillis(); } else { From 81223eef90788eb0996cd18878d769bbba55f22e Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:19:48 +0800 Subject: [PATCH 11/15] Revert "add atFrame Option to grabber" This reverts commit 9b6e4cea --- .../bytedeco/javacv/FFmpegFrameGrabber.java | 338 ++++++++---------- 1 file changed, 158 insertions(+), 180 deletions(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index 8df206b0..5dafb479 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -388,7 +388,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private int samples_channels, samples_format, samples_rate; private boolean frameGrabbed; private Frame frame; - private long startTime; + private volatile boolean started = false; public boolean isCloseInputStream() { @@ -1291,187 +1291,165 @@ public Frame grabKeyFrame() throws Exception { public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames) throws Exception { return grabFrame(doAudio, doVideo, doProcessing, keyFrames, true); } - public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { - return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, false); - } - public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData,boolean atFrameRate) throws Exception { - return _grabFrame(doAudio, doVideo, doProcessing, keyFrames, doData, atFrameRate); - } - - private synchronized Frame _grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData ,boolean atFrameRate)throws Exception{ - try (PointerScope scope = new PointerScope()) { - - if (oc == null || oc.isNull()) { - throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); - } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null)) { - return null; - } - if (!started) { - throw new Exception("start() was not called successfully!"); - } - - boolean videoFrameGrabbed = frameGrabbed && frame.image != null; - boolean audioFrameGrabbed = frameGrabbed && frame.samples != null; - frameGrabbed = false; - frame.keyFrame = false; - frame.imageWidth = 0; - frame.imageHeight = 0; - frame.imageDepth = 0; - frame.imageChannels = 0; - frame.imageStride = 0; - frame.image = null; - frame.sampleRate = 0; - frame.audioChannels = 0; - frame.samples = null; - frame.data = null; - frame.opaque = null; - if (doVideo && videoFrameGrabbed) { - if (doProcessing) { - processImage(); - } - frame.keyFrame = picture.key_frame() != 0; - return frame; - } else if (doAudio && audioFrameGrabbed) { - if (doProcessing) { - processSamples(); - } - frame.keyFrame = samples_frame.key_frame() != 0; - return frame; - } - boolean done = false; - boolean readPacket = pkt.stream_index() == -1; - while (!done) { - int ret = 0; - if (readPacket) { - if (pkt.stream_index() != -1) { - // Free the packet that was allocated by av_read_frame - av_packet_unref(pkt); - } - if ((ret = av_read_frame(oc, pkt)) < 0) { - if (doVideo && video_st != null) { - // The video codec may have buffered some frames - pkt.stream_index(video_st.index()); - pkt.flags(AV_PKT_FLAG_KEY); - pkt.data(null); - pkt.size(0); - } else { - pkt.stream_index(-1); - return null; - } - } - } - - frame.streamIndex = pkt.stream_index(); - - // Is this a packet from the video stream? - if (doVideo && video_st != null && pkt.stream_index() == video_st.index() - && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) { - // Decode video frame - if (readPacket) { - ret = avcodec_send_packet(video_c, pkt); - if (pkt.data() == null && pkt.size() == 0) { - pkt.stream_index(-1); - if (ret < 0) { - return null; - } - } - if (ret < 0) { - throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding."); - } - } - - // Did we get a video frame? - got_frame[0] = 0; - while (ret >= 0 && !done) { - ret = avcodec_receive_frame(video_c, picture); - if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { - readPacket = true; - break; - } else if (ret < 0) { - throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding."); - } - got_frame[0] = 1; - - if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) { - long pts = picture.best_effort_timestamp(); - AVRational time_base = video_st.time_base(); - timestamp = 1000000L * pts * time_base.num() / time_base.den(); - // best guess, AVCodecContext.frame_number = number of decoded frames... - frameNumber = (int)Math.round(timestamp * getFrameRate() / 1000000L); - frame.image = image_buf; - if (doProcessing) { - processImage(); - } + public synchronized Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, boolean keyFrames, boolean doData) throws Exception { + try (PointerScope scope = new PointerScope()) { + + if (oc == null || oc.isNull()) { + throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); + } else if ((!doVideo || video_st == null) && (!doAudio || audio_st == null)) { + return null; + } + if (!started) { + throw new Exception("start() was not called successfully!"); + } + + boolean videoFrameGrabbed = frameGrabbed && frame.image != null; + boolean audioFrameGrabbed = frameGrabbed && frame.samples != null; + frameGrabbed = false; + frame.keyFrame = false; + frame.imageWidth = 0; + frame.imageHeight = 0; + frame.imageDepth = 0; + frame.imageChannels = 0; + frame.imageStride = 0; + frame.image = null; + frame.sampleRate = 0; + frame.audioChannels = 0; + frame.samples = null; + frame.data = null; + frame.opaque = null; + if (doVideo && videoFrameGrabbed) { + if (doProcessing) { + processImage(); + } + frame.keyFrame = picture.key_frame() != 0; + return frame; + } else if (doAudio && audioFrameGrabbed) { + if (doProcessing) { + processSamples(); + } + frame.keyFrame = samples_frame.key_frame() != 0; + return frame; + } + boolean done = false; + boolean readPacket = pkt.stream_index() == -1; + while (!done) { + int ret = 0; + if (readPacket) { + if (pkt.stream_index() != -1) { + // Free the packet that was allocated by av_read_frame + av_packet_unref(pkt); + } + if ((ret = av_read_frame(oc, pkt)) < 0) { + if (doVideo && video_st != null) { + // The video codec may have buffered some frames + pkt.stream_index(video_st.index()); + pkt.flags(AV_PKT_FLAG_KEY); + pkt.data(null); + pkt.size(0); + } else { + pkt.stream_index(-1); + return null; + } + } + } + + frame.streamIndex = pkt.stream_index(); + + // Is this a packet from the video stream? + if (doVideo && video_st != null && pkt.stream_index() == video_st.index() + && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) { + // Decode video frame + if (readPacket) { + ret = avcodec_send_packet(video_c, pkt); + if (pkt.data() == null && pkt.size() == 0) { + pkt.stream_index(-1); + if (ret < 0) { + return null; + } + } + if (ret < 0) { + throw new Exception("avcodec_send_packet() error " + ret + ": Error sending a video packet for decoding."); + } + } + + // Did we get a video frame? + got_frame[0] = 0; + while (ret >= 0 && !done) { + ret = avcodec_receive_frame(video_c, picture); + if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { + readPacket = true; + break; + } else if (ret < 0) { + throw new Exception("avcodec_receive_frame() error " + ret + ": Error during video decoding."); + } + got_frame[0] = 1; + + if (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I) { + long pts = picture.best_effort_timestamp(); + AVRational time_base = video_st.time_base(); + timestamp = 1000000L * pts * time_base.num() / time_base.den(); + // best guess, AVCodecContext.frame_number = number of decoded frames... + frameNumber = (int)Math.round(timestamp * getFrameRate() / 1000000L); + frame.image = image_buf; + if (doProcessing) { + processImage(); + } /* the picture is allocated by the decoder. no need to free it */ - done = true; - frame.timestamp = timestamp; - frame.keyFrame = picture.key_frame() != 0; - } - } - } else if (doAudio && audio_st != null && pkt.stream_index() == audio_st.index()) { - // Decode audio frame - if (readPacket) { - ret = avcodec_send_packet(audio_c, pkt); - if (ret < 0) { - throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding."); - } - } - - // Did we get an audio frame? - got_frame[0] = 0; - while (ret >= 0 && !done) { - ret = avcodec_receive_frame(audio_c, samples_frame); - if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { - readPacket = true; - break; - } else if (ret < 0) { - throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding."); - } - got_frame[0] = 1; - - long pts = samples_frame.best_effort_timestamp(); - AVRational time_base = audio_st.time_base(); - timestamp = 1000000L * pts * time_base.num() / time_base.den(); - frame.samples = samples_buf; - /* if a frame has been decoded, output it */ - if (doProcessing) { - processSamples(); - } - done = true; - frame.timestamp = timestamp; - frame.keyFrame = samples_frame.key_frame() != 0; - } - } else if (doData) { - if (!readPacket) { - readPacket = true; - continue; - } - // Export the stream byte data for non audio / video frames - frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer(); - done = true; - } - } - - // Simulate the "-re" parameter in ffmpeg - if (atFrameRate) { - if (startTime == 0) { - startTime = System.currentTimeMillis(); - } else { - long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - startTime); - if (delay > 0) { - Thread.sleep(delay); - } - } - } - return frame; - - } catch (InterruptedException e) { - throw new RuntimeException(e.getMessage()); - } - - } - + done = true; + frame.timestamp = timestamp; + frame.keyFrame = picture.key_frame() != 0; + } + } + } else if (doAudio && audio_st != null && pkt.stream_index() == audio_st.index()) { + // Decode audio frame + if (readPacket) { + ret = avcodec_send_packet(audio_c, pkt); + if (ret < 0) { + throw new Exception("avcodec_send_packet() error " + ret + ": Error sending an audio packet for decoding."); + } + } + + // Did we get an audio frame? + got_frame[0] = 0; + while (ret >= 0 && !done) { + ret = avcodec_receive_frame(audio_c, samples_frame); + if (ret == AVERROR_EAGAIN() || ret == AVERROR_EOF()) { + readPacket = true; + break; + } else if (ret < 0) { + throw new Exception("avcodec_receive_frame() error " + ret + ": Error during audio decoding."); + } + got_frame[0] = 1; + + long pts = samples_frame.best_effort_timestamp(); + AVRational time_base = audio_st.time_base(); + timestamp = 1000000L * pts * time_base.num() / time_base.den(); + frame.samples = samples_buf; + /* if a frame has been decoded, output it */ + if (doProcessing) { + processSamples(); + } + done = true; + frame.timestamp = timestamp; + frame.keyFrame = samples_frame.key_frame() != 0; + } + } else if (doData) { + if (!readPacket) { + readPacket = true; + continue; + } + // Export the stream byte data for non audio / video frames + frame.data = pkt.data().position(0).capacity(pkt.size()).asByteBuffer(); + done = true; + } + } + return frame; + + } + } + public synchronized AVPacket grabPacket() throws Exception { if (oc == null || oc.isNull()) { throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); From 40406b52822b3fab860a38b360d658142f5f6933 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:19:58 +0800 Subject: [PATCH 12/15] Revert "add a realtimeStreaming example" This reverts commit 89d952c8 --- samples/FFmpegPushStreamingRealTime.java | 55 ------------------------ 1 file changed, 55 deletions(-) delete mode 100644 samples/FFmpegPushStreamingRealTime.java diff --git a/samples/FFmpegPushStreamingRealTime.java b/samples/FFmpegPushStreamingRealTime.java deleted file mode 100644 index 4dbcdb0f..00000000 --- a/samples/FFmpegPushStreamingRealTime.java +++ /dev/null @@ -1,55 +0,0 @@ -import org.bytedeco.ffmpeg.global.avcodec; -import org.bytedeco.javacv.FFmpegFrameGrabber; -import org.bytedeco.javacv.FFmpegFrameRecorder; -import org.bytedeco.javacv.Frame; - - -/** - * @author wangxi <346461036@qq.com> - * it is a example for simulate ffmpeg param "-re" - * Read input at native frame rate. - * Mainly used to simulate a grab device, or live input stream (e.g. when reading from a file). - * Should not be used with actual grab devices or live input streams (where it can cause packet loss). - */ -public class FFmpegPushStreamingRealTime { - - public static final String RTMP_SERVER_URL = ""; - - public static final String LOCAL_FILE = ""; - - public static void main(String[] args) throws Exception { - FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(RTMP_SERVER_URL); - grabber.start(); - FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(LOCAL_FILE, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); - try { - recorder.setFormat("flv"); - recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); - recorder.setFrameRate(grabber.getFrameRate()); - recorder.setTimestamp(grabber.getTimestamp()); - recorder.start(); - - /** - * grab() may take some time,so we should record by real timestamp - */ - long begin = System.currentTimeMillis(); - Frame frame = null; - while ((frame = grabber.grab()) != null) { - long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - begin); - /** - * If the streaming is too fast, we sleep for a period of time according to the delay - */ - if (delay > 0) { - Thread.sleep(delay); - } - recorder.record(frame); - } - } finally { - recorder.stop(); - recorder.release(); - grabber.stop(); - grabber.release(); - } - - } - -} \ No newline at end of file From 49024a22c8c84ff56951af2be6f0920f93479bd8 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:21:30 +0800 Subject: [PATCH 13/15] remove something --- .../javacv/FrameGrabberRealTimeTest.java | 4 +- .../javacv/FlyCaptureFrameGrabber.java | 480 ------------------ 2 files changed, 2 insertions(+), 482 deletions(-) delete mode 100644 src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java index 54b85e31..4aa9efb1 100644 --- a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java @@ -4,8 +4,8 @@ import org.junit.Test; public class FrameGrabberRealTimeTest { - public static final String url = "rtmp://10.69.180.26:21935/stream/sister"; - public static final String file = "E:\\CloudMusic\\MV\\好妹妹 - 云野.mp4"; + public static final String url = ""; + public static final String file = ""; @Test public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { diff --git a/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java deleted file mode 100644 index 01501227..00000000 --- a/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright (C) 2009-2012 Samuel Audet - * - * Licensed either under the Apache License, Version 2.0, or (at your option) - * under the terms of the GNU General Public License as published by - * the Free Software Foundation (subject to the "Classpath" exception), - * either version 2, or any later version (collectively, the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * http://www.gnu.org/licenses/ - * http://www.gnu.org/software/classpath/license.html - * - * or as provided in the LICENSE.txt file that accompanied this code. - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.bytedeco.javacv; - -import java.io.File; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.ShortBuffer; -import org.bytedeco.javacpp.BytePointer; -import org.bytedeco.javacpp.Loader; - -import org.bytedeco.flycapture.PGRFlyCapture.*; -import org.bytedeco.opencv.opencv_core.*; -import org.bytedeco.opencv.opencv_imgproc.*; -import static org.bytedeco.flycapture.global.PGRFlyCapture.*; -import static org.bytedeco.opencv.global.opencv_core.*; -import static org.bytedeco.opencv.global.opencv_imgproc.*; - -/** - * - * @author Samuel Audet - */ -public class FlyCaptureFrameGrabber extends FrameGrabber { - public static String[] getDeviceDescriptions() throws Exception { - tryLoad(); - - int[] count = new int[1]; - int error = flycaptureBusCameraCount(count); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureBusCameraCount() Error " + error); - } - int c = count[0]; - String[] descriptions = new String[c]; - - if (c > 0) { - FlyCaptureInfoEx info = new FlyCaptureInfoEx(c); - error = flycaptureBusEnumerateCamerasEx(info, count); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureBusEnumerateCamerasEx() Error " + error); - } - - for (int i = 0; i < descriptions.length; i++) { - info.position(i); - descriptions[i] = info.pszVendorName() + " " + - info.pszModelName() + " " + info.SerialNumber(); - } - } - - return descriptions; - } - - public static FlyCaptureFrameGrabber createDefault(File deviceFile) throws Exception { throw new Exception(FlyCaptureFrameGrabber.class + " does not support device files."); } - public static FlyCaptureFrameGrabber createDefault(String devicePath) throws Exception { throw new Exception(FlyCaptureFrameGrabber.class + " does not support device paths."); } - public static FlyCaptureFrameGrabber createDefault(int deviceNumber) throws Exception { return new FlyCaptureFrameGrabber(deviceNumber); } - - private static Exception loadingException = null; - public static void tryLoad() throws Exception { - if (loadingException != null) { - throw loadingException; - } else { - try { - Loader.load(org.bytedeco.javacpp.PGRFlyCapture.class); - } catch (Throwable t) { - throw loadingException = new Exception("Failed to load " + FlyCaptureFrameGrabber.class, t); - } - } - } - - public FlyCaptureFrameGrabber(int deviceNumber) throws Exception { - int error = flycaptureCreateContext(context); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureCreateContext() Error " + error); - } - error = flycaptureInitializePlus(context, deviceNumber, numBuffers, (BytePointer)null); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureInitialize() Error " + error); - } - } - public void release() throws Exception { - if (context != null) { - stop(); - int error = flycaptureDestroyContext(context); - context = null; - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureDestroyContext() Error " + error); - } - } - } - @Override protected void finalize() throws Throwable { - super.finalize(); - release(); - } - - public static final int - INITIALIZE = 0x000, - TRIGGER_INQ = 0x530, - IS_CAMERA_POWER = 0x400, - CAMERA_POWER = 0x610, - SOFTWARE_TRIGGER = 0x62C, - SOFT_ASYNC_TRIGGER = 0x102C, - IMAGE_DATA_FORMAT = 0x1048; - - private FlyCaptureContext context = new FlyCaptureContext(null); - private FlyCaptureImage raw_image = new FlyCaptureImage(); - private FlyCaptureImage conv_image = new FlyCaptureImage(); - private IplImage temp_image, return_image = null; - private FrameConverter converter = new OpenCVFrameConverter.ToIplImage(); - private final int[] regOut = new int[1]; - private final float[] outFloat = new float[1]; - private final float[] gammaOut = new float[1]; - - @Override public double getGamma() { - return Float.isNaN(gammaOut[0]) || Float.isInfinite(gammaOut[0]) || gammaOut[0] == 0.0f ? 2.2 : gammaOut[0]; - } - - @Override public int getImageWidth() { - return return_image == null ? super.getImageWidth() : return_image.width(); - } - - @Override public int getImageHeight() { - return return_image == null ? super.getImageHeight() : return_image.height(); - } - - @Override public double getFrameRate() { - if (context == null || context.isNull()) { - return super.getFrameRate(); - } else { - flycaptureGetCameraAbsProperty(context, FLYCAPTURE_FRAME_RATE, outFloat); - return outFloat[0]; - } - } - - @Override public void setImageMode(ImageMode imageMode) { - if (imageMode != this.imageMode) { - temp_image = null; - return_image = null; - } - super.setImageMode(imageMode); - } - - public void start() throws Exception { - int f = FLYCAPTURE_FRAMERATE_ANY; - if (frameRate <= 0) { - f = FLYCAPTURE_FRAMERATE_ANY; - } else if (frameRate <= 1.876) { - f = FLYCAPTURE_FRAMERATE_1_875; - } else if (frameRate <= 3.76) { - f = FLYCAPTURE_FRAMERATE_3_75; - } else if (frameRate <= 7.51) { - f = FLYCAPTURE_FRAMERATE_7_5; - } else if (frameRate <= 15.01) { - f = FLYCAPTURE_FRAMERATE_15; - } else if (frameRate <= 30.01) { - f = FLYCAPTURE_FRAMERATE_30; - } else if (frameRate <= 60.01) { - f = FLYCAPTURE_FRAMERATE_60; - } else if (frameRate <= 120.01) { - f = FLYCAPTURE_FRAMERATE_120; - } else if (frameRate <= 240.01) { - f = FLYCAPTURE_FRAMERATE_240; - } - - int c = FLYCAPTURE_VIDEOMODE_ANY; - if (imageMode == ImageMode.COLOR || imageMode == ImageMode.RAW) { - if (imageWidth <= 0 || imageHeight <= 0) { - c = FLYCAPTURE_VIDEOMODE_ANY; - } else if (imageWidth <= 640 && imageHeight <= 480) { - c = FLYCAPTURE_VIDEOMODE_640x480RGB; - } else if (imageWidth <= 800 && imageHeight <= 600) { - c = FLYCAPTURE_VIDEOMODE_800x600RGB; - } else if (imageWidth <= 1024 && imageHeight <= 768) { - c = FLYCAPTURE_VIDEOMODE_1024x768RGB; - } else if (imageWidth <= 1280 && imageHeight <= 960) { - c = FLYCAPTURE_VIDEOMODE_1280x960RGB; - } else if (imageWidth <= 1600 && imageHeight <= 1200) { - c = FLYCAPTURE_VIDEOMODE_1600x1200RGB; - } - } else if (imageMode == ImageMode.GRAY) { - if (imageWidth <= 0 || imageHeight <= 0) { - c = FLYCAPTURE_VIDEOMODE_ANY; - } else if (imageWidth <= 640 && imageHeight <= 480) { - c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_640x480Y16 : FLYCAPTURE_VIDEOMODE_640x480Y8; - } else if (imageWidth <= 800 && imageHeight <= 600) { - c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_800x600Y16 : FLYCAPTURE_VIDEOMODE_800x600Y8; - } else if (imageWidth <= 1024 && imageHeight <= 768) { - c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1024x768Y16 : FLYCAPTURE_VIDEOMODE_1024x768Y8; - } else if (imageWidth <= 1280 && imageHeight <= 960) { - c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1280x960Y16 : FLYCAPTURE_VIDEOMODE_1280x960Y8; - } else if (imageWidth <= 1600 && imageHeight <= 1200) { - c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1600x1200Y16 : FLYCAPTURE_VIDEOMODE_1600x1200Y8; - } - } - - // set or reset trigger mode - int[] iPolarity = new int[1]; - int[] iSource = new int[1]; - int[] iRawValue = new int[1]; - int[] iMode = new int[1]; - int error = flycaptureGetTrigger(context, (boolean[])null, iPolarity, iSource, iRawValue, iMode, null); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureGetTrigger() Error " + error); - } - error = flycaptureSetTrigger(context, triggerMode, iPolarity[0], 7, 14, 0); - if (error != FLYCAPTURE_OK) { - // try with trigger mode 0 instead - error = flycaptureSetTrigger(context, true, iPolarity[0], 7, 0, 0); - } - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetTrigger() Error " + error); - } - if (triggerMode) { - waitForTriggerReady(); - } - - // try to match the endianness to our platform - error = flycaptureGetCameraRegister(context, IMAGE_DATA_FORMAT, regOut); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureGetCameraRegister() Error " + error); - } - int reg; - if (ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN)) { - reg = regOut[0] | 0x1; - } else { - reg = regOut[0] & ~0x1; - } - error = flycaptureSetCameraRegister(context, IMAGE_DATA_FORMAT, reg); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetCameraRegister() Error " + error); - } - - error = flycaptureSetBusSpeed(context, FLYCAPTURE_S_FASTEST, FLYCAPTURE_S_FASTEST); - if (error != FLYCAPTURE_OK) { - error = flycaptureSetBusSpeed(context, - FLYCAPTURE_ANY, FLYCAPTURE_ANY); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetBusSpeed() Error " + error); - } - } - - if (gamma != 0.0) { - error = flycaptureSetCameraAbsProperty(context, FLYCAPTURE_GAMMA, (float)gamma); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetCameraAbsProperty() Error " + error + ": Could not set gamma."); - } - } - error = flycaptureGetCameraAbsProperty(context, FLYCAPTURE_GAMMA, gammaOut); - if (error != FLYCAPTURE_OK) { - gammaOut[0] = 2.2f; - } - - error = flycaptureStart(context, c, f); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureStart() Error " + error); - } - error = flycaptureSetGrabTimeoutEx(context, timeout); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetGrabTimeoutEx() Error " + error); - } - } - - private void waitForTriggerReady() throws Exception { - // wait for trigger to be ready... - long time = System.currentTimeMillis(); - do { - int error = flycaptureGetCameraRegister(context, SOFTWARE_TRIGGER, regOut); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureGetCameraRegister() Error " + error); - } - if (System.currentTimeMillis() - time > timeout) { - break; - //throw new Exception("waitForTriggerReady() Error: Timeout occured."); - } - } while((regOut[0] >>> 31) != 0); - } - - public void stop() throws Exception { - int error = flycaptureStop(context); - if (error != FLYCAPTURE_OK && error != FLYCAPTURE_FAILED) { - throw new Exception("flycaptureStop() Error " + error); - } - temp_image = null; - return_image = null; - timestamp = 0; - frameNumber = 0; - } - - public void trigger() throws Exception { - waitForTriggerReady(); - int error = flycaptureSetCameraRegister(context, SOFT_ASYNC_TRIGGER, 0x80000000); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureSetCameraRegister() Error " + error); - } - } - - private int getNumChannels(int pixelFormat) { - switch (pixelFormat) { - case FLYCAPTURE_BGR: - case FLYCAPTURE_RGB8: - case FLYCAPTURE_RGB16: - case FLYCAPTURE_S_RGB16: - return 3; - - case FLYCAPTURE_MONO8: - case FLYCAPTURE_MONO16: - case FLYCAPTURE_RAW8: - case FLYCAPTURE_RAW16: - case FLYCAPTURE_S_MONO16: - return 1; - - case FLYCAPTURE_BGRU: - return 4; - - case FLYCAPTURE_411YUV8: - case FLYCAPTURE_422YUV8: - case FLYCAPTURE_444YUV8: - default: - return -1; - } - } - private int getDepth(int pixelFormat) { - switch (pixelFormat) { - case FLYCAPTURE_BGR: - case FLYCAPTURE_RGB8: - case FLYCAPTURE_MONO8: - case FLYCAPTURE_RAW8: - case FLYCAPTURE_BGRU: - return IPL_DEPTH_8U; - - case FLYCAPTURE_MONO16: - case FLYCAPTURE_RAW16: - case FLYCAPTURE_RGB16: - return IPL_DEPTH_16U; - - case FLYCAPTURE_S_MONO16: - case FLYCAPTURE_S_RGB16: - return IPL_DEPTH_16S; - - case FLYCAPTURE_411YUV8: - case FLYCAPTURE_422YUV8: - case FLYCAPTURE_444YUV8: - default: - return IPL_DEPTH_8U; - } - } - - public Frame grab() throws Exception { - int error = flycaptureGrabImage2(context, raw_image); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)"); - } - - int w = raw_image.iCols(); - int h = raw_image.iRows(); - int format = raw_image.pixelFormat(); - int depth = getDepth(format); - int stride = raw_image.iRowInc(); - int size = h*stride; - int numChannels = getNumChannels(format); - error = flycaptureGetCameraRegister(context, IMAGE_DATA_FORMAT, regOut); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureGetCameraRegister() Error " + error); - } - ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? - ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; - boolean alreadySwapped = false; - boolean colorbayer = raw_image.bStippled(); - boolean colorrgb = format == FLYCAPTURE_RGB8 || format == FLYCAPTURE_RGB16 || - format == FLYCAPTURE_BGR || format == FLYCAPTURE_BGRU; - boolean coloryuv = format == FLYCAPTURE_411YUV8 || format == FLYCAPTURE_422YUV8 || - format == FLYCAPTURE_444YUV8; - BytePointer imageData = raw_image.pData(); - - if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && - (imageMode == ImageMode.RAW || (imageMode == ImageMode.COLOR && numChannels == 3) || - (imageMode == ImageMode.GRAY && numChannels == 1 && !colorbayer))) { - if (return_image == null) { - return_image = IplImage.createHeader(w, h, depth, numChannels); - } - return_image.widthStep(stride); - return_image.imageSize(size); - return_image.imageData(imageData); - } else { - if (return_image == null) { - return_image = IplImage.create(w, h, depth, imageMode == ImageMode.COLOR ? 3 : 1); - } - if (temp_image == null) { - if (imageMode == ImageMode.COLOR && - (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) { - temp_image = IplImage.create(w, h, depth, numChannels); - } else if (imageMode == ImageMode.GRAY && colorbayer) { - temp_image = IplImage.create(w, h, depth, 3); - } else if (imageMode == ImageMode.GRAY && colorrgb) { - temp_image = IplImage.createHeader(w, h, depth, 3); - } else if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) { - temp_image = IplImage.createHeader(w, h, depth, 1); - } else { - temp_image = return_image; - } - } - conv_image.iRowInc(temp_image.widthStep()); - conv_image.pData(temp_image.imageData()); - if (depth == IPL_DEPTH_8U) { - conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW8 : - temp_image.nChannels() == 1 ? FLYCAPTURE_MONO8 : FLYCAPTURE_BGR); - } else { - conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW16 : - temp_image.nChannels() == 1 ? FLYCAPTURE_MONO16 : FLYCAPTURE_RGB16); - } - - if (depth != IPL_DEPTH_8U && conv_image.pixelFormat() == format && conv_image.iRowInc() == stride) { - // we just need a copy to swap bytes.. - ShortBuffer in = raw_image.getByteBuffer().order(frameEndian).asShortBuffer(); - ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer(); - out.put(in); - alreadySwapped = true; - } else if ((imageMode == ImageMode.GRAY && colorrgb) || - (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) { - temp_image.widthStep(stride); - temp_image.imageSize(size); - temp_image.imageData(imageData); - } else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) { - error = flycaptureConvertImage(context, raw_image, conv_image); - if (error != FLYCAPTURE_OK) { - throw new Exception("flycaptureConvertImage() Error " + error); - } - } - - if (!alreadySwapped && depth != IPL_DEPTH_8U && - !frameEndian.equals(ByteOrder.nativeOrder())) { - // ack, the camera's endianness doesn't correspond to our machine ... - // swap bytes of 16-bit images - ByteBuffer bb = temp_image.getByteBuffer(); - ShortBuffer in = bb.order(frameEndian).asShortBuffer(); - ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer(); - out.put(in); - } - - if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) { - cvCvtColor(temp_image, return_image, CV_GRAY2BGR); - } else if (imageMode == ImageMode.GRAY && (colorbayer || colorrgb)) { - cvCvtColor(temp_image, return_image, CV_BGR2GRAY); - } - } - - error = flycaptureGetColorTileFormat(context, regOut); - if (error != FLYCAPTURE_OK) { - sensorPattern = -1L; - } else switch (regOut[0]) { - case FLYCAPTURE_STIPPLEDFORMAT_BGGR: sensorPattern = SENSOR_PATTERN_BGGR; break; - case FLYCAPTURE_STIPPLEDFORMAT_GBRG: sensorPattern = SENSOR_PATTERN_GBRG; break; - case FLYCAPTURE_STIPPLEDFORMAT_GRBG: sensorPattern = SENSOR_PATTERN_GRBG; break; - case FLYCAPTURE_STIPPLEDFORMAT_RGGB: sensorPattern = SENSOR_PATTERN_RGGB; break; - default: sensorPattern = -1L; - } - - FlyCaptureTimestamp timeStamp = raw_image.timeStamp(); - timestamp = timeStamp.ulSeconds() * 1000000L + timeStamp.ulMicroSeconds(); - return converter.convert(return_image); - } -} From 5f62a4f27f85df314b48a26c708f4e68f56e8cf4 Mon Sep 17 00:00:00 2001 From: wangxi3 Date: Wed, 9 Jun 2021 12:23:05 +0800 Subject: [PATCH 14/15] 1 --- .../javacv/FlyCaptureFrameGrabber.java | 480 ++++++++++++++++++ 1 file changed, 480 insertions(+) create mode 100644 src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java diff --git a/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java new file mode 100644 index 00000000..01501227 --- /dev/null +++ b/src/main/java/org/bytedeco/javacv/FlyCaptureFrameGrabber.java @@ -0,0 +1,480 @@ +/* + * Copyright (C) 2009-2012 Samuel Audet + * + * Licensed either under the Apache License, Version 2.0, or (at your option) + * under the terms of the GNU General Public License as published by + * the Free Software Foundation (subject to the "Classpath" exception), + * either version 2, or any later version (collectively, the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.gnu.org/licenses/ + * http://www.gnu.org/software/classpath/license.html + * + * or as provided in the LICENSE.txt file that accompanied this code. + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.bytedeco.javacv; + +import java.io.File; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.ShortBuffer; +import org.bytedeco.javacpp.BytePointer; +import org.bytedeco.javacpp.Loader; + +import org.bytedeco.flycapture.PGRFlyCapture.*; +import org.bytedeco.opencv.opencv_core.*; +import org.bytedeco.opencv.opencv_imgproc.*; +import static org.bytedeco.flycapture.global.PGRFlyCapture.*; +import static org.bytedeco.opencv.global.opencv_core.*; +import static org.bytedeco.opencv.global.opencv_imgproc.*; + +/** + * + * @author Samuel Audet + */ +public class FlyCaptureFrameGrabber extends FrameGrabber { + public static String[] getDeviceDescriptions() throws Exception { + tryLoad(); + + int[] count = new int[1]; + int error = flycaptureBusCameraCount(count); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureBusCameraCount() Error " + error); + } + int c = count[0]; + String[] descriptions = new String[c]; + + if (c > 0) { + FlyCaptureInfoEx info = new FlyCaptureInfoEx(c); + error = flycaptureBusEnumerateCamerasEx(info, count); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureBusEnumerateCamerasEx() Error " + error); + } + + for (int i = 0; i < descriptions.length; i++) { + info.position(i); + descriptions[i] = info.pszVendorName() + " " + + info.pszModelName() + " " + info.SerialNumber(); + } + } + + return descriptions; + } + + public static FlyCaptureFrameGrabber createDefault(File deviceFile) throws Exception { throw new Exception(FlyCaptureFrameGrabber.class + " does not support device files."); } + public static FlyCaptureFrameGrabber createDefault(String devicePath) throws Exception { throw new Exception(FlyCaptureFrameGrabber.class + " does not support device paths."); } + public static FlyCaptureFrameGrabber createDefault(int deviceNumber) throws Exception { return new FlyCaptureFrameGrabber(deviceNumber); } + + private static Exception loadingException = null; + public static void tryLoad() throws Exception { + if (loadingException != null) { + throw loadingException; + } else { + try { + Loader.load(org.bytedeco.javacpp.PGRFlyCapture.class); + } catch (Throwable t) { + throw loadingException = new Exception("Failed to load " + FlyCaptureFrameGrabber.class, t); + } + } + } + + public FlyCaptureFrameGrabber(int deviceNumber) throws Exception { + int error = flycaptureCreateContext(context); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureCreateContext() Error " + error); + } + error = flycaptureInitializePlus(context, deviceNumber, numBuffers, (BytePointer)null); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureInitialize() Error " + error); + } + } + public void release() throws Exception { + if (context != null) { + stop(); + int error = flycaptureDestroyContext(context); + context = null; + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureDestroyContext() Error " + error); + } + } + } + @Override protected void finalize() throws Throwable { + super.finalize(); + release(); + } + + public static final int + INITIALIZE = 0x000, + TRIGGER_INQ = 0x530, + IS_CAMERA_POWER = 0x400, + CAMERA_POWER = 0x610, + SOFTWARE_TRIGGER = 0x62C, + SOFT_ASYNC_TRIGGER = 0x102C, + IMAGE_DATA_FORMAT = 0x1048; + + private FlyCaptureContext context = new FlyCaptureContext(null); + private FlyCaptureImage raw_image = new FlyCaptureImage(); + private FlyCaptureImage conv_image = new FlyCaptureImage(); + private IplImage temp_image, return_image = null; + private FrameConverter converter = new OpenCVFrameConverter.ToIplImage(); + private final int[] regOut = new int[1]; + private final float[] outFloat = new float[1]; + private final float[] gammaOut = new float[1]; + + @Override public double getGamma() { + return Float.isNaN(gammaOut[0]) || Float.isInfinite(gammaOut[0]) || gammaOut[0] == 0.0f ? 2.2 : gammaOut[0]; + } + + @Override public int getImageWidth() { + return return_image == null ? super.getImageWidth() : return_image.width(); + } + + @Override public int getImageHeight() { + return return_image == null ? super.getImageHeight() : return_image.height(); + } + + @Override public double getFrameRate() { + if (context == null || context.isNull()) { + return super.getFrameRate(); + } else { + flycaptureGetCameraAbsProperty(context, FLYCAPTURE_FRAME_RATE, outFloat); + return outFloat[0]; + } + } + + @Override public void setImageMode(ImageMode imageMode) { + if (imageMode != this.imageMode) { + temp_image = null; + return_image = null; + } + super.setImageMode(imageMode); + } + + public void start() throws Exception { + int f = FLYCAPTURE_FRAMERATE_ANY; + if (frameRate <= 0) { + f = FLYCAPTURE_FRAMERATE_ANY; + } else if (frameRate <= 1.876) { + f = FLYCAPTURE_FRAMERATE_1_875; + } else if (frameRate <= 3.76) { + f = FLYCAPTURE_FRAMERATE_3_75; + } else if (frameRate <= 7.51) { + f = FLYCAPTURE_FRAMERATE_7_5; + } else if (frameRate <= 15.01) { + f = FLYCAPTURE_FRAMERATE_15; + } else if (frameRate <= 30.01) { + f = FLYCAPTURE_FRAMERATE_30; + } else if (frameRate <= 60.01) { + f = FLYCAPTURE_FRAMERATE_60; + } else if (frameRate <= 120.01) { + f = FLYCAPTURE_FRAMERATE_120; + } else if (frameRate <= 240.01) { + f = FLYCAPTURE_FRAMERATE_240; + } + + int c = FLYCAPTURE_VIDEOMODE_ANY; + if (imageMode == ImageMode.COLOR || imageMode == ImageMode.RAW) { + if (imageWidth <= 0 || imageHeight <= 0) { + c = FLYCAPTURE_VIDEOMODE_ANY; + } else if (imageWidth <= 640 && imageHeight <= 480) { + c = FLYCAPTURE_VIDEOMODE_640x480RGB; + } else if (imageWidth <= 800 && imageHeight <= 600) { + c = FLYCAPTURE_VIDEOMODE_800x600RGB; + } else if (imageWidth <= 1024 && imageHeight <= 768) { + c = FLYCAPTURE_VIDEOMODE_1024x768RGB; + } else if (imageWidth <= 1280 && imageHeight <= 960) { + c = FLYCAPTURE_VIDEOMODE_1280x960RGB; + } else if (imageWidth <= 1600 && imageHeight <= 1200) { + c = FLYCAPTURE_VIDEOMODE_1600x1200RGB; + } + } else if (imageMode == ImageMode.GRAY) { + if (imageWidth <= 0 || imageHeight <= 0) { + c = FLYCAPTURE_VIDEOMODE_ANY; + } else if (imageWidth <= 640 && imageHeight <= 480) { + c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_640x480Y16 : FLYCAPTURE_VIDEOMODE_640x480Y8; + } else if (imageWidth <= 800 && imageHeight <= 600) { + c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_800x600Y16 : FLYCAPTURE_VIDEOMODE_800x600Y8; + } else if (imageWidth <= 1024 && imageHeight <= 768) { + c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1024x768Y16 : FLYCAPTURE_VIDEOMODE_1024x768Y8; + } else if (imageWidth <= 1280 && imageHeight <= 960) { + c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1280x960Y16 : FLYCAPTURE_VIDEOMODE_1280x960Y8; + } else if (imageWidth <= 1600 && imageHeight <= 1200) { + c = bpp > 8 ? FLYCAPTURE_VIDEOMODE_1600x1200Y16 : FLYCAPTURE_VIDEOMODE_1600x1200Y8; + } + } + + // set or reset trigger mode + int[] iPolarity = new int[1]; + int[] iSource = new int[1]; + int[] iRawValue = new int[1]; + int[] iMode = new int[1]; + int error = flycaptureGetTrigger(context, (boolean[])null, iPolarity, iSource, iRawValue, iMode, null); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureGetTrigger() Error " + error); + } + error = flycaptureSetTrigger(context, triggerMode, iPolarity[0], 7, 14, 0); + if (error != FLYCAPTURE_OK) { + // try with trigger mode 0 instead + error = flycaptureSetTrigger(context, true, iPolarity[0], 7, 0, 0); + } + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetTrigger() Error " + error); + } + if (triggerMode) { + waitForTriggerReady(); + } + + // try to match the endianness to our platform + error = flycaptureGetCameraRegister(context, IMAGE_DATA_FORMAT, regOut); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureGetCameraRegister() Error " + error); + } + int reg; + if (ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN)) { + reg = regOut[0] | 0x1; + } else { + reg = regOut[0] & ~0x1; + } + error = flycaptureSetCameraRegister(context, IMAGE_DATA_FORMAT, reg); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetCameraRegister() Error " + error); + } + + error = flycaptureSetBusSpeed(context, FLYCAPTURE_S_FASTEST, FLYCAPTURE_S_FASTEST); + if (error != FLYCAPTURE_OK) { + error = flycaptureSetBusSpeed(context, + FLYCAPTURE_ANY, FLYCAPTURE_ANY); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetBusSpeed() Error " + error); + } + } + + if (gamma != 0.0) { + error = flycaptureSetCameraAbsProperty(context, FLYCAPTURE_GAMMA, (float)gamma); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetCameraAbsProperty() Error " + error + ": Could not set gamma."); + } + } + error = flycaptureGetCameraAbsProperty(context, FLYCAPTURE_GAMMA, gammaOut); + if (error != FLYCAPTURE_OK) { + gammaOut[0] = 2.2f; + } + + error = flycaptureStart(context, c, f); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureStart() Error " + error); + } + error = flycaptureSetGrabTimeoutEx(context, timeout); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetGrabTimeoutEx() Error " + error); + } + } + + private void waitForTriggerReady() throws Exception { + // wait for trigger to be ready... + long time = System.currentTimeMillis(); + do { + int error = flycaptureGetCameraRegister(context, SOFTWARE_TRIGGER, regOut); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureGetCameraRegister() Error " + error); + } + if (System.currentTimeMillis() - time > timeout) { + break; + //throw new Exception("waitForTriggerReady() Error: Timeout occured."); + } + } while((regOut[0] >>> 31) != 0); + } + + public void stop() throws Exception { + int error = flycaptureStop(context); + if (error != FLYCAPTURE_OK && error != FLYCAPTURE_FAILED) { + throw new Exception("flycaptureStop() Error " + error); + } + temp_image = null; + return_image = null; + timestamp = 0; + frameNumber = 0; + } + + public void trigger() throws Exception { + waitForTriggerReady(); + int error = flycaptureSetCameraRegister(context, SOFT_ASYNC_TRIGGER, 0x80000000); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureSetCameraRegister() Error " + error); + } + } + + private int getNumChannels(int pixelFormat) { + switch (pixelFormat) { + case FLYCAPTURE_BGR: + case FLYCAPTURE_RGB8: + case FLYCAPTURE_RGB16: + case FLYCAPTURE_S_RGB16: + return 3; + + case FLYCAPTURE_MONO8: + case FLYCAPTURE_MONO16: + case FLYCAPTURE_RAW8: + case FLYCAPTURE_RAW16: + case FLYCAPTURE_S_MONO16: + return 1; + + case FLYCAPTURE_BGRU: + return 4; + + case FLYCAPTURE_411YUV8: + case FLYCAPTURE_422YUV8: + case FLYCAPTURE_444YUV8: + default: + return -1; + } + } + private int getDepth(int pixelFormat) { + switch (pixelFormat) { + case FLYCAPTURE_BGR: + case FLYCAPTURE_RGB8: + case FLYCAPTURE_MONO8: + case FLYCAPTURE_RAW8: + case FLYCAPTURE_BGRU: + return IPL_DEPTH_8U; + + case FLYCAPTURE_MONO16: + case FLYCAPTURE_RAW16: + case FLYCAPTURE_RGB16: + return IPL_DEPTH_16U; + + case FLYCAPTURE_S_MONO16: + case FLYCAPTURE_S_RGB16: + return IPL_DEPTH_16S; + + case FLYCAPTURE_411YUV8: + case FLYCAPTURE_422YUV8: + case FLYCAPTURE_444YUV8: + default: + return IPL_DEPTH_8U; + } + } + + public Frame grab() throws Exception { + int error = flycaptureGrabImage2(context, raw_image); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)"); + } + + int w = raw_image.iCols(); + int h = raw_image.iRows(); + int format = raw_image.pixelFormat(); + int depth = getDepth(format); + int stride = raw_image.iRowInc(); + int size = h*stride; + int numChannels = getNumChannels(format); + error = flycaptureGetCameraRegister(context, IMAGE_DATA_FORMAT, regOut); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureGetCameraRegister() Error " + error); + } + ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? + ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; + boolean alreadySwapped = false; + boolean colorbayer = raw_image.bStippled(); + boolean colorrgb = format == FLYCAPTURE_RGB8 || format == FLYCAPTURE_RGB16 || + format == FLYCAPTURE_BGR || format == FLYCAPTURE_BGRU; + boolean coloryuv = format == FLYCAPTURE_411YUV8 || format == FLYCAPTURE_422YUV8 || + format == FLYCAPTURE_444YUV8; + BytePointer imageData = raw_image.pData(); + + if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && + (imageMode == ImageMode.RAW || (imageMode == ImageMode.COLOR && numChannels == 3) || + (imageMode == ImageMode.GRAY && numChannels == 1 && !colorbayer))) { + if (return_image == null) { + return_image = IplImage.createHeader(w, h, depth, numChannels); + } + return_image.widthStep(stride); + return_image.imageSize(size); + return_image.imageData(imageData); + } else { + if (return_image == null) { + return_image = IplImage.create(w, h, depth, imageMode == ImageMode.COLOR ? 3 : 1); + } + if (temp_image == null) { + if (imageMode == ImageMode.COLOR && + (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) { + temp_image = IplImage.create(w, h, depth, numChannels); + } else if (imageMode == ImageMode.GRAY && colorbayer) { + temp_image = IplImage.create(w, h, depth, 3); + } else if (imageMode == ImageMode.GRAY && colorrgb) { + temp_image = IplImage.createHeader(w, h, depth, 3); + } else if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) { + temp_image = IplImage.createHeader(w, h, depth, 1); + } else { + temp_image = return_image; + } + } + conv_image.iRowInc(temp_image.widthStep()); + conv_image.pData(temp_image.imageData()); + if (depth == IPL_DEPTH_8U) { + conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW8 : + temp_image.nChannels() == 1 ? FLYCAPTURE_MONO8 : FLYCAPTURE_BGR); + } else { + conv_image.pixelFormat(imageMode == ImageMode.RAW ? FLYCAPTURE_RAW16 : + temp_image.nChannels() == 1 ? FLYCAPTURE_MONO16 : FLYCAPTURE_RGB16); + } + + if (depth != IPL_DEPTH_8U && conv_image.pixelFormat() == format && conv_image.iRowInc() == stride) { + // we just need a copy to swap bytes.. + ShortBuffer in = raw_image.getByteBuffer().order(frameEndian).asShortBuffer(); + ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer(); + out.put(in); + alreadySwapped = true; + } else if ((imageMode == ImageMode.GRAY && colorrgb) || + (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) { + temp_image.widthStep(stride); + temp_image.imageSize(size); + temp_image.imageData(imageData); + } else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) { + error = flycaptureConvertImage(context, raw_image, conv_image); + if (error != FLYCAPTURE_OK) { + throw new Exception("flycaptureConvertImage() Error " + error); + } + } + + if (!alreadySwapped && depth != IPL_DEPTH_8U && + !frameEndian.equals(ByteOrder.nativeOrder())) { + // ack, the camera's endianness doesn't correspond to our machine ... + // swap bytes of 16-bit images + ByteBuffer bb = temp_image.getByteBuffer(); + ShortBuffer in = bb.order(frameEndian).asShortBuffer(); + ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer(); + out.put(in); + } + + if (imageMode == ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) { + cvCvtColor(temp_image, return_image, CV_GRAY2BGR); + } else if (imageMode == ImageMode.GRAY && (colorbayer || colorrgb)) { + cvCvtColor(temp_image, return_image, CV_BGR2GRAY); + } + } + + error = flycaptureGetColorTileFormat(context, regOut); + if (error != FLYCAPTURE_OK) { + sensorPattern = -1L; + } else switch (regOut[0]) { + case FLYCAPTURE_STIPPLEDFORMAT_BGGR: sensorPattern = SENSOR_PATTERN_BGGR; break; + case FLYCAPTURE_STIPPLEDFORMAT_GBRG: sensorPattern = SENSOR_PATTERN_GBRG; break; + case FLYCAPTURE_STIPPLEDFORMAT_GRBG: sensorPattern = SENSOR_PATTERN_GRBG; break; + case FLYCAPTURE_STIPPLEDFORMAT_RGGB: sensorPattern = SENSOR_PATTERN_RGGB; break; + default: sensorPattern = -1L; + } + + FlyCaptureTimestamp timeStamp = raw_image.timeStamp(); + timestamp = timeStamp.ulSeconds() * 1000000L + timeStamp.ulMicroSeconds(); + return converter.convert(return_image); + } +} From 339f51daadcee5268a997f0e76689cd95eab0aa9 Mon Sep 17 00:00:00 2001 From: Samuel Audet Date: Fri, 11 Jun 2021 21:38:51 +0900 Subject: [PATCH 15/15] Update CHANGELOG.md, extract waitForTimestamp() from grabAtFrameRate(), enhance test, and fix nits --- CHANGELOG.md | 1 + .../javacv/FrameGrabberRealTimeTest.java | 35 ---------------- .../org/bytedeco/javacv/FrameGrabberTest.java | 5 ++- .../org/bytedeco/javacv/FrameGrabber.java | 40 ++++++++++++------- 4 files changed, 30 insertions(+), 51 deletions(-) delete mode 100644 platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java diff --git a/CHANGELOG.md b/CHANGELOG.md index e02c8815..320fbcda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ + * Add `FrameGrabber.grabAtFrameRate()` to simulate a device or stream when reading from files ([pull #1659](https://github.com/bytedeco/javacv/pull/1659)) * Update `FFmpegFrameGrabber` and `FFmpegFrameRecorder` with new `avcodec` API ([issue #1498](https://github.com/bytedeco/javacv/issues/1498)) * Add new `Similarity` sample with PSNR and MSSIM ([pull #1622](https://github.com/bytedeco/javacv/pull/1622)) * Avoid crash in `FFmpegFrameRecorder.stop()` by moving `av_write_trailer()` out of `flush()` ([issue #1616](https://github.com/bytedeco/javacv/issues/1616)) diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java deleted file mode 100644 index 4aa9efb1..00000000 --- a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberRealTimeTest.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.bytedeco.javacv; - -import org.bytedeco.ffmpeg.global.avcodec; -import org.junit.Test; - -public class FrameGrabberRealTimeTest { - public static final String url = ""; - public static final String file = ""; - - @Test - public void testWithMethodCall() throws FFmpegFrameGrabber.Exception, FFmpegFrameRecorder.Exception { - FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file); - grabber.start(); - FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(url, grabber.getImageWidth(), grabber.getImageHeight(), grabber.getAudioChannels()); - try { - recorder.setFormat("flv"); - recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); - recorder.setFrameRate(grabber.getFrameRate()); - recorder.setTimestamp(grabber.getTimestamp()); - recorder.start(); - Frame frame = null; - while ((frame = grabber.grabAtFrameRate()) != null) { - recorder.record(frame); - } - } catch (InterruptedException | FrameGrabber.Exception e) { - e.printStackTrace(); - } finally { - recorder.stop(); - recorder.release(); - grabber.stop(); - grabber.release(); - } - - } -} diff --git a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberTest.java b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberTest.java index 700c07b9..5d24322d 100644 --- a/platform/src/test/java/org/bytedeco/javacv/FrameGrabberTest.java +++ b/platform/src/test/java/org/bytedeco/javacv/FrameGrabberTest.java @@ -93,7 +93,8 @@ public void testFFmpegFrameGrabber() { int n = 0, m = 0; Frame frame2; - while ((frame2 = grabber.grab()) != null) { + long startTime = System.nanoTime(); + while ((frame2 = grabber.grabAtFrameRate()) != null) { Frame clone2 = frame2.clone(); if (frame2.image != null) { Frame frame = frames[n++]; @@ -127,6 +128,8 @@ public void testFFmpegFrameGrabber() { } clone2.close(); } + long stopTime = System.nanoTime(); + assertEquals(n, (stopTime - startTime) * grabber.getFrameRate() / 1_000_000_000, 10.0); assertEquals(frames.length, n); assertEquals(null, grabber.grab()); grabber.restart(); diff --git a/src/main/java/org/bytedeco/javacv/FrameGrabber.java b/src/main/java/org/bytedeco/javacv/FrameGrabber.java index cdd59c93..1c77ea60 100644 --- a/src/main/java/org/bytedeco/javacv/FrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FrameGrabber.java @@ -45,7 +45,7 @@ public abstract class FrameGrabber implements Closeable { public static final List list = new LinkedList(Arrays.asList(new String[] { - "DC1394", "FlyCapture", "FlyCapture2", "OpenKinect", "OpenKinect2", "RealSense", "RealSense2", "PS3Eye", "VideoInput", "OpenCV", "FFmpeg", "IPCamera" })); + "DC1394", "FlyCapture", "FlyCapture2", "OpenKinect", "OpenKinect2", "RealSense", "RealSense2", "PS3Eye", "VideoInput", "OpenCV", "FFmpeg", "IPCamera" })); public static void init() { for (String name : list) { try { @@ -201,7 +201,7 @@ public static enum SampleMode { protected int frameNumber = 0; protected long timestamp = 0; protected int maxDelay = -1; - protected long startTime = 0; + protected long startTime = 0; public int getVideoStream() { return videoStream; @@ -725,17 +725,27 @@ public void release() throws Exception { public Array createArray(FrameGrabber[] frameGrabbers) { return new Array(frameGrabbers); } - - public Frame grabAtFrameRate() throws Exception, InterruptedException { - Frame frame = grab(); - if (startTime == 0) { - startTime = System.currentTimeMillis(); - } else { - long delay = frame.timestamp / 1000 - (System.currentTimeMillis() - startTime); - if (delay > 0) { - Thread.sleep(delay); - } - } - return frame; - } + + /** Returns {@code frame = grab()} after {@code waitForTimestamp(frame)}. */ + public Frame grabAtFrameRate() throws Exception, InterruptedException { + Frame frame = grab(); + if (frame != null) { + waitForTimestamp(frame); + } + return frame; + } + + /** Returns true if {@code Thread.sleep()} had to be called. */ + public boolean waitForTimestamp(Frame frame) throws InterruptedException { + if (startTime == 0) { + startTime = System.nanoTime() / 1000 - frame.timestamp; + } else { + long delay = frame.timestamp - (System.nanoTime() / 1000 - startTime); + if (delay > 0) { + Thread.sleep(delay / 1000, (int)(delay % 1000) * 1000); + return true; + } + } + return false; + } }