From 8b85ac1a6c96d1ec89fe5aee98c8040dcb3f2793 Mon Sep 17 00:00:00 2001 From: Josh Bultman Date: Wed, 4 Dec 2024 15:25:04 -0600 Subject: [PATCH] Fix bug where swr_context wasn't getting flushed. Add wrote samples check to avoid infinite loop when Vorbis attempts to flush encoder before writing any samples. --- .../bytedeco/javacv/FFmpegFrameRecorder.java | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameRecorder.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameRecorder.java index aeb89260..aee3cd5c 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameRecorder.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameRecorder.java @@ -396,6 +396,7 @@ static class SeekCallback extends Seek_Pointer_long_int { private PointerPointer plane_ptr, plane_ptr2; private AVPacket video_pkt, audio_pkt; private int[] got_video_packet, got_audio_packet; + private boolean wrote_samples = false; private AVFormatContext ifmt_ctx; private IntPointer display_matrix; private AVChannelLayout default_layout; @@ -467,6 +468,7 @@ public synchronized void startUnsafe() throws Exception { plane_ptr2 = new PointerPointer(AVFrame.AV_NUM_DATA_POINTERS).retainReference(); video_pkt = new AVPacket().retainReference(); audio_pkt = new AVPacket().retainReference(); + wrote_samples = false; got_video_packet = new int[1]; got_audio_packet = new int[1]; default_layout = new AVChannelLayout().retainReference(); @@ -1175,6 +1177,11 @@ public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buf return writeFrame((AVFrame)null); } + if (samples == null && samples_convert_ctx == null) { + // We haven't tried to record any samples yet so we don't need to flush. + return false; + } + int ret; if (sampleRate <= 0) { @@ -1264,7 +1271,8 @@ public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buf throw new Exception("Audio samples Buffer has unsupported type: " + samples); } - if (samples_convert_ctx == null || samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate) { + boolean formatChanged = samples_channels != audioChannels || samples_format != inputFormat || samples_rate != sampleRate; + if (samples != null && (samples_convert_ctx == null || formatChanged)) { if (samples_convert_ctx == null) { samples_convert_ctx = new SwrContext().retainReference(); } @@ -1293,6 +1301,11 @@ public synchronized boolean recordSamples(int sampleRate, int audioChannels, Buf for (int i = 0; i < samples_out.length; i++) { plane_ptr2.put(i, samples_out[i]); } + if (samples == null && inputCount == 0 && plane_ptr != null) { + plane_ptr.releaseReference(); + // needs to be null to flush swr context. + plane_ptr = null; + } if ((ret = swr_convert(samples_convert_ctx, plane_ptr2, outputCount, plane_ptr, inputCount)) < 0) { throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples."); } else if (ret == 0 && inputCount == 0) { @@ -1337,6 +1350,7 @@ private void writeSamples(int nb_samples) throws Exception { frame.format(audio_c.sample_fmt()); frame.quality(audio_c.global_quality()); writeFrame(frame); + wrote_samples = true; } private boolean writeFrame(AVFrame frame) throws Exception { @@ -1374,6 +1388,10 @@ private boolean writeFrame(AVFrame frame) throws Exception { /* write the compressed frame in the media file */ writePacket(AVMEDIA_TYPE_AUDIO, audio_pkt); + + if (frame == null && !wrote_samples) { + break; + } } return got_audio_packet[0] != 0;