ffprobe: restore reporting error code for failed inputs master
authorGyan Doshi <ffmpeg@gyani.pro>
Mon, 15 Aug 2022 18:26:16 +0000 (23:56 +0530)
committerGyan Doshi <ffmpeg@gyani.pro>
Wed, 17 Aug 2022 11:16:05 +0000 (16:46 +0530)
c11fb46731 led to a regression whereby the return code for missing
input or input probe is overridden by writer close return code and
hence not conveyed in the exit code.

1083 files changed:
Changelog
MAINTAINERS
Makefile
RELEASE
compat/cuda/cuda_runtime.h
compat/windows/mswindres [new file with mode: 0755]
configure
doc/APIchanges
doc/codecs.texi
doc/demuxers.texi
doc/encoders.texi
doc/ffmpeg.texi
doc/ffprobe.xsd
doc/filters.texi
doc/general_contents.texi
doc/muxers.texi
fftools/Makefile
fftools/cmdutils.c
fftools/ffmpeg.c
fftools/ffmpeg.h
fftools/ffmpeg_demux.c [new file with mode: 0644]
fftools/ffmpeg_filter.c
fftools/ffmpeg_hw.c
fftools/ffmpeg_mux.c
fftools/ffmpeg_opt.c
fftools/ffprobe.c
fftools/fftools.manifest [new file with mode: 0644]
fftools/fftoolsres.rc [new file with mode: 0644]
fftools/objpool.c [new file with mode: 0644]
fftools/objpool.h [new file with mode: 0644]
fftools/sync_queue.c [new file with mode: 0644]
fftools/sync_queue.h [new file with mode: 0644]
fftools/thread_queue.c [new file with mode: 0644]
fftools/thread_queue.h [new file with mode: 0644]
libavcodec/012v.c
libavcodec/4xm.c
libavcodec/8bps.c
libavcodec/8svx.c
libavcodec/Makefile
libavcodec/a64multienc.c
libavcodec/aac.h
libavcodec/aaccoder.c
libavcodec/aaccoder_trellis.h
libavcodec/aacdec.c
libavcodec/aacdec_fixed.c
libavcodec/aacdec_template.c
libavcodec/aacenc.c
libavcodec/aacenc.h
libavcodec/aacenc_is.c
libavcodec/aacenc_ltp.c
libavcodec/aacenc_pred.c
libavcodec/aacenc_quantization.h
libavcodec/aacenc_quantization_misc.h
libavcodec/aacenc_tns.c
libavcodec/aacsbr_template.c
libavcodec/aarch64/h264chroma_init_aarch64.c
libavcodec/aarch64/h264cmc_neon.S
libavcodec/aarch64/hevcdsp_idct_neon.S
libavcodec/aarch64/hevcdsp_init_aarch64.c
libavcodec/aarch64/me_cmp_init_aarch64.c
libavcodec/aarch64/me_cmp_neon.S
libavcodec/aarch64/rv40dsp_init_aarch64.c
libavcodec/aarch64/vc1dsp_init_aarch64.c
libavcodec/aarch64/videodsp_init.c
libavcodec/aasc.c
libavcodec/ac3_parser.c
libavcodec/ac3_parser_internal.h
libavcodec/ac3dec_fixed.c
libavcodec/ac3dec_float.c
libavcodec/ac3enc_fixed.c
libavcodec/ac3enc_float.c
libavcodec/acelp_filters.c
libavcodec/acelp_pitch_delay.c
libavcodec/acelp_vectors.c
libavcodec/adpcm.c
libavcodec/adpcmenc.c
libavcodec/adxdec.c
libavcodec/adxenc.c
libavcodec/agm.c
libavcodec/aic.c
libavcodec/alac.c
libavcodec/alacdsp.c
libavcodec/alacenc.c
libavcodec/allcodecs.c
libavcodec/alpha/me_cmp_alpha.c
libavcodec/alpha/me_cmp_mvi_asm.S
libavcodec/alsdec.c
libavcodec/amfenc_h264.c
libavcodec/amfenc_hevc.c
libavcodec/amrnbdec.c
libavcodec/amrwbdec.c
libavcodec/anm.c
libavcodec/ansi.c
libavcodec/apedec.c
libavcodec/apng.h
libavcodec/aptxdec.c
libavcodec/aptxenc.c
libavcodec/arbc.c
libavcodec/argo.c
libavcodec/arm/Makefile
libavcodec/arm/flacdsp_init_arm.c
libavcodec/arm/h264chroma_init_arm.c
libavcodec/arm/h264cmc_neon.S
libavcodec/arm/hevcdsp_init_neon.c
libavcodec/arm/me_cmp_init_arm.c
libavcodec/arm/mpegvideoencdsp_init_arm.c
libavcodec/arm/rv40dsp_init_arm.c
libavcodec/arm/vc1dsp_init_neon.c
libavcodec/arm/videodsp_init_armv5te.c
libavcodec/arm/vp56_arith.h [deleted file]
libavcodec/arm/vp8.h
libavcodec/arm/vp8_armv6.S
libavcodec/arm/vpx_arith.h [new file with mode: 0644]
libavcodec/assdec.c
libavcodec/assenc.c
libavcodec/asvdec.c
libavcodec/asvenc.c
libavcodec/atrac1.c
libavcodec/atrac3.c
libavcodec/atrac3plusdec.c
libavcodec/atrac9dec.c
libavcodec/audiotoolboxdec.c
libavcodec/audiotoolboxenc.c
libavcodec/aura.c
libavcodec/av1dec.c
libavcodec/avcodec.c
libavcodec/avcodec.h
libavcodec/avrndec.c
libavcodec/avs.c
libavcodec/avuidec.c
libavcodec/avuienc.c
libavcodec/bethsoftvideo.c
libavcodec/bfi.c
libavcodec/bink.c
libavcodec/binkaudio.c
libavcodec/bintext.c
libavcodec/bitpacked_dec.c
libavcodec/bitpacked_enc.c
libavcodec/bmpenc.c
libavcodec/bmvaudio.c
libavcodec/bmvvideo.c
libavcodec/c93.c
libavcodec/cavsdec.c
libavcodec/cbs.c
libavcodec/cbs.h
libavcodec/cbs_av1.c
libavcodec/cbs_h2645.c
libavcodec/cbs_internal.h
libavcodec/cbs_jpeg.c
libavcodec/cbs_mpeg2.c
libavcodec/cbs_sei.c
libavcodec/cbs_vp9.c
libavcodec/ccaption_dec.c
libavcodec/cdgraphics.c
libavcodec/cdtoons.c
libavcodec/cdxl.c
libavcodec/cfhd.c
libavcodec/cfhdenc.c
libavcodec/cfhdencdsp.c
libavcodec/cfhdencdsp.h
libavcodec/cinepak.c
libavcodec/cinepakenc.c
libavcodec/clearvideo.c
libavcodec/cljrdec.c
libavcodec/cljrenc.c
libavcodec/cllc.c
libavcodec/cngdec.c
libavcodec/cngenc.c
libavcodec/codec.h
libavcodec/codec_desc.c
libavcodec/codec_id.h
libavcodec/codec_internal.h
libavcodec/cook.c
libavcodec/cpia.c
libavcodec/cri.c
libavcodec/crystalhd.c
libavcodec/cscd.c
libavcodec/cuviddec.c
libavcodec/cyuv.c
libavcodec/dca_core.c
libavcodec/dca_core.h
libavcodec/dca_lbr.c
libavcodec/dca_lbr.h
libavcodec/dca_xll.c
libavcodec/dca_xll.h
libavcodec/dcadec.c
libavcodec/dcaenc.c
libavcodec/dds.c
libavcodec/decode.c
libavcodec/decode.h
libavcodec/dfa.c
libavcodec/dfpwmdec.c
libavcodec/dfpwmenc.c
libavcodec/diracdec.c
libavcodec/diracdsp.c
libavcodec/dnxhddec.c
libavcodec/dnxhdenc.c
libavcodec/dnxhdenc.h
libavcodec/dolby_e.c
libavcodec/dpcm.c
libavcodec/dpxenc.c
libavcodec/dsddec.c
libavcodec/dsicinaudio.c
libavcodec/dsicinvideo.c
libavcodec/dss_sp.c
libavcodec/dstdec.c
libavcodec/dv.h
libavcodec/dvaudiodec.c
libavcodec/dvbsubdec.c
libavcodec/dvdec.c
libavcodec/dvdsubdec.c
libavcodec/dvdsubenc.c
libavcodec/dvenc.c
libavcodec/dxa.c
libavcodec/dxv.c
libavcodec/eac3enc.c
libavcodec/eacmv.c
libavcodec/eamad.c
libavcodec/eatgq.c
libavcodec/eatgv.c
libavcodec/eatqi.c
libavcodec/encode.c
libavcodec/encode.h
libavcodec/error_resilience.c
libavcodec/error_resilience.h
libavcodec/escape124.c
libavcodec/escape130.c
libavcodec/evrcdec.c
libavcodec/exr.c
libavcodec/exrenc.c
libavcodec/fastaudio.c
libavcodec/fflcms2.c [new file with mode: 0644]
libavcodec/fflcms2.h [new file with mode: 0644]
libavcodec/ffv1dec.c
libavcodec/ffv1dec_template.c
libavcodec/ffv1enc.c
libavcodec/ffwavesynth.c
libavcodec/fic.c
libavcodec/fitsenc.c
libavcodec/flacdec.c
libavcodec/flacdsp.c
libavcodec/flacdsp.h
libavcodec/flacenc.c
libavcodec/flacencdsp.c [new file with mode: 0644]
libavcodec/flacencdsp.h [new file with mode: 0644]
libavcodec/flashsv.c
libavcodec/flashsv2enc.c
libavcodec/flashsvenc.c
libavcodec/flicvideo.c
libavcodec/flvdec.c
libavcodec/flvenc.c
libavcodec/fmvc.c
libavcodec/fraps.c
libavcodec/frwu.c
libavcodec/g2meet.c
libavcodec/g722dec.c
libavcodec/g722enc.c
libavcodec/g723_1dec.c
libavcodec/g723_1enc.c
libavcodec/g726.c
libavcodec/g729dec.c
libavcodec/gdv.c
libavcodec/get_bits.h
libavcodec/gif.c
libavcodec/gifdec.c
libavcodec/gsmdec.c
libavcodec/h261dec.c
libavcodec/h261enc.c
libavcodec/h263dec.c
libavcodec/h264_loopfilter.c
libavcodec/h264_metadata_bsf.c
libavcodec/h264_slice.c
libavcodec/h264chroma.h
libavcodec/h264chroma_template.c
libavcodec/h264dec.c
libavcodec/h265_metadata_bsf.c
libavcodec/half2float.h
libavcodec/hapdec.c
libavcodec/hapenc.c
libavcodec/hcadec.c
libavcodec/hcom.c
libavcodec/hdr_parser.c [new file with mode: 0644]
libavcodec/hdrdec.c [new file with mode: 0644]
libavcodec/hdrenc.c [new file with mode: 0644]
libavcodec/hevc_cabac.c
libavcodec/hevc_filter.c
libavcodec/hevc_mvs.c
libavcodec/hevc_ps.c
libavcodec/hevc_refs.c
libavcodec/hevc_sei.c
libavcodec/hevc_sei.h
libavcodec/hevcdec.c
libavcodec/hevcdec.h
libavcodec/hevcdsp.h
libavcodec/hevcdsp_template.c
libavcodec/hevcpred.h
libavcodec/hevcpred_template.c
libavcodec/hnm4video.c
libavcodec/hq_hqa.c
libavcodec/hq_hqa.h
libavcodec/hqx.c
libavcodec/hqx.h
libavcodec/huffyuvdec.c
libavcodec/huffyuvenc.c
libavcodec/idcinvideo.c
libavcodec/iff.c
libavcodec/ilbcdec.c
libavcodec/imc.c
libavcodec/imm4.c
libavcodec/imm5.c
libavcodec/imx.c
libavcodec/indeo2.c
libavcodec/indeo3.c
libavcodec/indeo4.c
libavcodec/indeo5.c
libavcodec/intelh263dec.c
libavcodec/internal.h
libavcodec/interplayacm.c
libavcodec/interplayvideo.c
libavcodec/intrax8.c
libavcodec/intrax8.h
libavcodec/ituh263dec.c
libavcodec/ituh263enc.c
libavcodec/j2kenc.c
libavcodec/jacosubdec.c
libavcodec/jpeg2000.h
libavcodec/jpeg2000dec.c
libavcodec/jpeglsdec.c
libavcodec/jpeglsenc.c
libavcodec/jvdec.c
libavcodec/kgv1dec.c
libavcodec/kmvc.c
libavcodec/lagarith.c
libavcodec/lcldec.c
libavcodec/lclenc.c
libavcodec/libaom.c [new file with mode: 0644]
libavcodec/libaom.h [new file with mode: 0644]
libavcodec/libaomdec.c
libavcodec/libaomenc.c
libavcodec/libaribb24.c
libavcodec/libcelt_dec.c
libavcodec/libcodec2.c
libavcodec/libdav1d.c
libavcodec/libdavs2.c
libavcodec/libfdk-aacdec.c
libavcodec/libfdk-aacenc.c
libavcodec/libgsmdec.c
libavcodec/libgsmenc.c
libavcodec/libilbc.c
libavcodec/libjxldec.c
libavcodec/libjxlenc.c
libavcodec/libkvazaar.c
libavcodec/libmp3lame.c
libavcodec/libopencore-amr.c
libavcodec/libopenh264dec.c
libavcodec/libopenh264enc.c
libavcodec/libopenjpegdec.c
libavcodec/libopenjpegenc.c
libavcodec/libopusdec.c
libavcodec/libopusenc.c
libavcodec/librav1e.c
libavcodec/libshine.c
libavcodec/libspeexdec.c
libavcodec/libspeexenc.c
libavcodec/libsvtav1.c
libavcodec/libtheoraenc.c
libavcodec/libtwolame.c
libavcodec/libuavs3d.c
libavcodec/libvo-amrwbenc.c
libavcodec/libvorbisdec.c
libavcodec/libvorbisenc.c
libavcodec/libvpxdec.c
libavcodec/libvpxenc.c
libavcodec/libwebpenc.c
libavcodec/libwebpenc_animencoder.c
libavcodec/libx264.c
libavcodec/libx265.c
libavcodec/libxavs.c
libavcodec/libxavs2.c
libavcodec/libxvid.c
libavcodec/libzvbi-teletextdec.c
libavcodec/ljpegenc.c
libavcodec/loco.c
libavcodec/loongarch/h264chroma_lasx.c
libavcodec/loongarch/h264chroma_lasx.h
libavcodec/loongarch/hevc_lpf_sao_lsx.c
libavcodec/loongarch/hevc_mc_bi_lsx.c
libavcodec/loongarch/hevc_mc_uni_lsx.c
libavcodec/loongarch/hevc_mc_uniw_lsx.c
libavcodec/loongarch/hevcdsp_lsx.c
libavcodec/loongarch/hevcdsp_lsx.h
libavcodec/loongarch/vc1dsp_lasx.c
libavcodec/loongarch/vc1dsp_loongarch.h
libavcodec/loongarch/videodsp_init.c
libavcodec/lossless_videoencdsp.c
libavcodec/lossless_videoencdsp.h
libavcodec/lscrdec.c
libavcodec/m101.c
libavcodec/mace.c
libavcodec/magicyuv.c
libavcodec/magicyuvenc.c
libavcodec/mdec.c
libavcodec/me_cmp.c
libavcodec/me_cmp.h
libavcodec/mediacodecdec.c
libavcodec/metasound.c
libavcodec/mfenc.c
libavcodec/microdvddec.c
libavcodec/midivid.c
libavcodec/mimic.c
libavcodec/mips/aaccoder_mips.c
libavcodec/mips/h263dsp_mips.h
libavcodec/mips/h264chroma_mips.h
libavcodec/mips/h264chroma_mmi.c
libavcodec/mips/h264chroma_msa.c
libavcodec/mips/hevc_idct_msa.c
libavcodec/mips/hevc_lpf_sao_msa.c
libavcodec/mips/hevc_mc_bi_msa.c
libavcodec/mips/hevc_mc_biw_msa.c
libavcodec/mips/hevc_mc_uni_msa.c
libavcodec/mips/hevc_mc_uniw_msa.c
libavcodec/mips/hevcdsp_mips.h
libavcodec/mips/hevcdsp_mmi.c
libavcodec/mips/hevcdsp_msa.c
libavcodec/mips/hevcpred_mips.h
libavcodec/mips/hevcpred_msa.c
libavcodec/mips/me_cmp_mips.h
libavcodec/mips/me_cmp_msa.c
libavcodec/mips/mpegvideoencdsp_msa.c
libavcodec/mips/vc1dsp_mips.h
libavcodec/mips/vc1dsp_mmi.c
libavcodec/mips/videodsp_init.c
libavcodec/mjpegbdec.c
libavcodec/mjpegdec.c
libavcodec/mjpegenc.c
libavcodec/mlpdec.c
libavcodec/mlpenc.c
libavcodec/mmaldec.c
libavcodec/mmvideo.c
libavcodec/mobiclip.c
libavcodec/motion_est.c
libavcodec/motion_est.h
libavcodec/motionpixels.c
libavcodec/movtextdec.c
libavcodec/movtextenc.c
libavcodec/mpc7.c
libavcodec/mpc8.c
libavcodec/mpeg12dec.c
libavcodec/mpeg12enc.c
libavcodec/mpeg4videodec.c
libavcodec/mpeg4videoenc.c
libavcodec/mpeg_er.c
libavcodec/mpegaudiodec_fixed.c
libavcodec/mpegaudiodec_float.c
libavcodec/mpegaudioenc_fixed.c
libavcodec/mpegaudioenc_float.c
libavcodec/mpegpicture.c
libavcodec/mpegpicture.h
libavcodec/mpegutils.c
libavcodec/mpegutils.h
libavcodec/mpegvideo.c
libavcodec/mpegvideo.h
libavcodec/mpegvideo_dec.c
libavcodec/mpegvideo_enc.c
libavcodec/mpegvideo_motion.c
libavcodec/mpegvideodec.h
libavcodec/mpegvideodsp.c
libavcodec/mpegvideodsp.h
libavcodec/mpegvideoencdsp.c
libavcodec/mpegvideoencdsp.h
libavcodec/mpl2dec.c
libavcodec/mscc.c
libavcodec/msmpeg4dec.c
libavcodec/msmpeg4dec.h
libavcodec/msmpeg4enc.c
libavcodec/msmpeg4enc.h
libavcodec/msrle.c
libavcodec/mss1.c
libavcodec/mss2.c
libavcodec/mss3.c
libavcodec/mss4.c
libavcodec/msvideo1.c
libavcodec/msvideo1enc.c
libavcodec/mv30.c
libavcodec/mvcdec.c
libavcodec/mvha.c
libavcodec/mwsc.c
libavcodec/mxpegdec.c
libavcodec/nellymoserdec.c
libavcodec/nellymoserenc.c
libavcodec/notchlc.c
libavcodec/nuv.c
libavcodec/nvdec.c
libavcodec/nvdec_hevc.c
libavcodec/nvdec_vp8.c
libavcodec/nvenc.c
libavcodec/nvenc_h264.c
libavcodec/nvenc_hevc.c
libavcodec/omx.c
libavcodec/on2avc.c
libavcodec/options_table.h
libavcodec/opusdec.c
libavcodec/opusenc.c
libavcodec/pafaudio.c
libavcodec/pafvideo.c
libavcodec/pamenc.c
libavcodec/parsers.c
libavcodec/pcm-blurayenc.c
libavcodec/pcm-dvd.c
libavcodec/pcm-dvdenc.c
libavcodec/pcm.c
libavcodec/pcxenc.c
libavcodec/pgssubdec.c
libavcodec/photocd.c
libavcodec/pixlet.c
libavcodec/pngdec.c
libavcodec/pngenc.c
libavcodec/pnm.c
libavcodec/pnm.h
libavcodec/pnm_parser.c
libavcodec/pnmdec.c
libavcodec/pnmenc.c
libavcodec/ppc/h264chroma_template.c
libavcodec/ppc/me_cmp.c
libavcodec/ppc/mpegvideodsp.c
libavcodec/ppc/mpegvideoencdsp.c
libavcodec/ppc/videodsp.c
libavcodec/proresdata.c
libavcodec/proresdata.h
libavcodec/proresdec2.c
libavcodec/proresenc_anatoliy.c
libavcodec/proresenc_kostya.c
libavcodec/prosumer.c
libavcodec/pthread_frame.c
libavcodec/pthread_slice.c
libavcodec/qcelpdec.c
libavcodec/qdm2.c
libavcodec/qdmc.c
libavcodec/qdrw.c
libavcodec/qoidec.c
libavcodec/qoienc.c
libavcodec/qpeg.c
libavcodec/qpeldsp.c
libavcodec/qsv.c
libavcodec/qsv.h
libavcodec/qsv_internal.h
libavcodec/qsvdec.c
libavcodec/qsvenc.c
libavcodec/qsvenc.h
libavcodec/qsvenc_h264.c
libavcodec/qsvenc_hevc.c
libavcodec/qsvenc_jpeg.c
libavcodec/qsvenc_mpeg2.c
libavcodec/qsvenc_vp9.c
libavcodec/qtrle.c
libavcodec/qtrleenc.c
libavcodec/r210dec.c
libavcodec/r210enc.c
libavcodec/ra144dec.c
libavcodec/ra144enc.c
libavcodec/ra288.c
libavcodec/ralf.c
libavcodec/rasc.c
libavcodec/ratecontrol.c
libavcodec/raw.c
libavcodec/rawdec.c
libavcodec/rawenc.c
libavcodec/realtextdec.c
libavcodec/rkmppdec.c
libavcodec/rl2.c
libavcodec/roqaudioenc.c
libavcodec/roqvideodec.c
libavcodec/roqvideoenc.c
libavcodec/rpza.c
libavcodec/rpzaenc.c
libavcodec/rscc.c
libavcodec/rv10.c
libavcodec/rv10enc.c
libavcodec/rv20enc.c
libavcodec/rv30.c
libavcodec/rv34.c
libavcodec/rv40.c
libavcodec/rv40dsp.c
libavcodec/s302menc.c
libavcodec/samidec.c
libavcodec/sanm.c
libavcodec/sbcdec.c
libavcodec/sbcenc.c
libavcodec/sbrdsp.c
libavcodec/sbrdsp.h
libavcodec/sbrdsp_fixed.c
libavcodec/scpr.c
libavcodec/screenpresso.c
libavcodec/sga.c
libavcodec/sgidec.c
libavcodec/sgienc.c
libavcodec/sgirledec.c
libavcodec/shorten.c
libavcodec/sipr.c
libavcodec/siren.c
libavcodec/smacker.c
libavcodec/smc.c
libavcodec/smcenc.c
libavcodec/snow.c
libavcodec/snow.h
libavcodec/snow_dwt.c
libavcodec/snow_dwt.h
libavcodec/snowdec.c
libavcodec/snowenc.c
libavcodec/sonic.c
libavcodec/sp5xdec.c
libavcodec/speedhq.c
libavcodec/speedhqenc.c
libavcodec/speexdec.c
libavcodec/srtdec.c
libavcodec/srtenc.c
libavcodec/subviewerdec.c
libavcodec/sunrastenc.c
libavcodec/svq1dec.c
libavcodec/svq1enc.c
libavcodec/svq3.c
libavcodec/takdec.c
libavcodec/targa_y216dec.c
libavcodec/targaenc.c
libavcodec/tdsc.c
libavcodec/tests/snowenc.c
libavcodec/textdec.c
libavcodec/thread.h
libavcodec/threadframe.h
libavcodec/tiertexseqv.c
libavcodec/tiff.c
libavcodec/tiffenc.c
libavcodec/tmv.c
libavcodec/truemotion1.c
libavcodec/truemotion2.c
libavcodec/truemotion2rt.c
libavcodec/truespeech.c
libavcodec/tscc.c
libavcodec/tscc2.c
libavcodec/tta.c
libavcodec/ttaenc.c
libavcodec/ttmlenc.c
libavcodec/ttmlenc.h
libavcodec/twinvqdec.c
libavcodec/ulti.c
libavcodec/utils.c
libavcodec/utvideodec.c
libavcodec/utvideoenc.c
libavcodec/v210dec.c
libavcodec/v210enc.c
libavcodec/v210x.c
libavcodec/v308dec.c
libavcodec/v308enc.c
libavcodec/v408dec.c
libavcodec/v408enc.c
libavcodec/v410dec.c
libavcodec/v410enc.c
libavcodec/v4l2_m2m.c
libavcodec/v4l2_m2m_dec.c
libavcodec/v4l2_m2m_enc.c
libavcodec/vaapi_decode.c
libavcodec/vaapi_encode.c
libavcodec/vaapi_encode_h264.c
libavcodec/vaapi_encode_h265.c
libavcodec/vaapi_encode_mjpeg.c
libavcodec/vaapi_encode_mpeg2.c
libavcodec/vaapi_encode_vp8.c
libavcodec/vaapi_encode_vp9.c
libavcodec/vaapi_vp8.c
libavcodec/vb.c
libavcodec/vble.c
libavcodec/vbndec.c
libavcodec/vbnenc.c
libavcodec/vc1_block.c
libavcodec/vc1dec.c
libavcodec/vc1dsp.c
libavcodec/vc2enc.c
libavcodec/vcr1.c
libavcodec/version.h
libavcodec/videodsp.c
libavcodec/videodsp.h
libavcodec/videotoolboxenc.c
libavcodec/vima.c
libavcodec/vmdaudio.c
libavcodec/vmdvideo.c
libavcodec/vmnc.c
libavcodec/vorbisdec.c
libavcodec/vorbisenc.c
libavcodec/vp3.c
libavcodec/vp5.c
libavcodec/vp56.c
libavcodec/vp56.h
libavcodec/vp56rac.c [deleted file]
libavcodec/vp6.c
libavcodec/vp8.c
libavcodec/vp8.h
libavcodec/vp89_rac.h [new file with mode: 0644]
libavcodec/vp9.c
libavcodec/vp9_mc_template.c
libavcodec/vp9block.c
libavcodec/vp9dec.h
libavcodec/vp9mvs.c
libavcodec/vp9prob.c
libavcodec/vp9recon.c
libavcodec/vp9shared.h
libavcodec/vpx_rac.c [new file with mode: 0644]
libavcodec/vpx_rac.h [new file with mode: 0644]
libavcodec/vqavideo.c
libavcodec/wavpack.c
libavcodec/wavpackenc.c
libavcodec/wbmpdec.c [new file with mode: 0644]
libavcodec/wbmpenc.c [new file with mode: 0644]
libavcodec/wcmv.c
libavcodec/webp.c
libavcodec/webvttdec.c
libavcodec/webvttenc.c
libavcodec/wmadec.c
libavcodec/wmaenc.c
libavcodec/wmalosslessdec.c
libavcodec/wmaprodec.c
libavcodec/wmavoice.c
libavcodec/wmv2.c
libavcodec/wmv2.h
libavcodec/wmv2dec.c
libavcodec/wmv2enc.c
libavcodec/wnv1.c
libavcodec/wrapped_avframe.c
libavcodec/ws-snd1.c
libavcodec/x86/Makefile
libavcodec/x86/cfhdencdsp_init.c
libavcodec/x86/flacdsp_init.c
libavcodec/x86/flacencdsp_init.c [new file with mode: 0644]
libavcodec/x86/h264_chromamc.asm
libavcodec/x86/h264_chromamc_10bit.asm
libavcodec/x86/h264chroma_init.c
libavcodec/x86/hevc_add_res.asm
libavcodec/x86/hevc_mc.asm
libavcodec/x86/hevc_sao.asm
libavcodec/x86/hevc_sao_10bit.asm
libavcodec/x86/hevcdsp.h
libavcodec/x86/hevcdsp_init.c
libavcodec/x86/lossless_videoencdsp.asm
libavcodec/x86/lossless_videoencdsp_init.c
libavcodec/x86/me_cmp.asm
libavcodec/x86/me_cmp_init.c
libavcodec/x86/mpegvideodsp.c
libavcodec/x86/mpegvideoenc_qns_template.c
libavcodec/x86/mpegvideoencdsp.asm
libavcodec/x86/mpegvideoencdsp_init.c
libavcodec/x86/pngdsp.asm
libavcodec/x86/pngdsp_init.c
libavcodec/x86/rv40dsp_init.c
libavcodec/x86/vc1dsp_init.c
libavcodec/x86/videodsp_init.c
libavcodec/x86/vp56_arith.h [deleted file]
libavcodec/x86/vpx_arith.h [new file with mode: 0644]
libavcodec/xan.c
libavcodec/xbmenc.c
libavcodec/xfacedec.c
libavcodec/xl.c
libavcodec/xsubdec.c
libavcodec/xsubenc.c
libavcodec/xwdenc.c
libavcodec/xxan.c
libavcodec/y41pdec.c
libavcodec/y41penc.c
libavcodec/ylc.c
libavcodec/yop.c
libavcodec/yuv4dec.c
libavcodec/yuv4enc.c
libavcodec/zerocodec.c
libavcodec/zmbv.c
libavcodec/zmbvenc.c
libavdevice/Makefile
libavdevice/alsa_enc.c
libavdevice/avdevice.c
libavdevice/fbdev_enc.c
libavdevice/lavfi.c
libavdevice/oss_enc.c
libavdevice/pulse_audio_enc.c
libavdevice/sndio_enc.c
libavdevice/v4l2.c
libavdevice/version.h
libavfilter/Makefile
libavfilter/allfilters.c
libavfilter/avfiltergraph.c
libavfilter/buffersrc.c
libavfilter/dnn/dnn_backend_openvino.c
libavfilter/edge_common.c
libavfilter/edge_common.h
libavfilter/edge_template.c [new file with mode: 0644]
libavfilter/f_loop.c
libavfilter/fflcms2.c
libavfilter/fflcms2.h
libavfilter/framesync.c
libavfilter/framesync.h
libavfilter/opencl/remap.cl [new file with mode: 0644]
libavfilter/opencl_source.h
libavfilter/qsvvpp.c
libavfilter/qsvvpp.h
libavfilter/version.h
libavfilter/vf_atadenoise.c
libavfilter/vf_blurdetect.c
libavfilter/vf_chromakey_cuda.c [new file with mode: 0644]
libavfilter/vf_chromakey_cuda.cu [new file with mode: 0644]
libavfilter/vf_cropdetect.c
libavfilter/vf_deinterlace_qsv.c
libavfilter/vf_deshake_opencl.c
libavfilter/vf_drawtext.c
libavfilter/vf_edgedetect.c
libavfilter/vf_overlay_qsv.c
libavfilter/vf_overlay_vaapi.c
libavfilter/vf_remap_opencl.c [new file with mode: 0644]
libavfilter/vf_scale_qsv.c
libavfilter/vf_showinfo.c
libavfilter/vf_signature.c
libavfilter/vf_sr.c
libavfilter/vf_stack.c
libavfilter/vf_xfade.c
libavfilter/vf_zscale.c
libavfilter/vsrc_ddagrab.c [new file with mode: 0644]
libavfilter/vsrc_ddagrab_shaders.h [new file with mode: 0644]
libavformat/Makefile
libavformat/aaxdec.c
libavformat/ac3dec.c
libavformat/aiffdec.c
libavformat/allformats.c
libavformat/apngdec.c
libavformat/apngenc.c
libavformat/argo_cvg.c
libavformat/asfcrypt.c
libavformat/asfdec_f.c
libavformat/avformat.c
libavformat/avienc.c
libavformat/avisynth.c
libavformat/bintext.c
libavformat/caf.c
libavformat/cafdec.c
libavformat/cinedec.c
libavformat/demux.c
libavformat/dvenc.c
libavformat/fifo.c
libavformat/flvdec.c
libavformat/flvenc.c
libavformat/gif.c
libavformat/hevc.c
libavformat/hls.c
libavformat/hlsenc.c
libavformat/iff.c
libavformat/imfdec.c
libavformat/img2.c
libavformat/img2dec.c
libavformat/img2enc.c
libavformat/internal.h
libavformat/ipfsgateway.c
libavformat/isom.c
libavformat/isom.h
libavformat/isom_tags.c
libavformat/lrcenc.c
libavformat/matroskadec.c
libavformat/mlpdec.c
libavformat/mov.c
libavformat/movenc.c
libavformat/movenc.h
libavformat/movenchint.c
libavformat/mpc8.c
libavformat/mpegenc.c
libavformat/mpegtsenc.c
libavformat/mux.c
libavformat/mux.h
libavformat/mux_utils.c
libavformat/mxf.c
libavformat/mxf.h
libavformat/mxfdec.c
libavformat/mxfenc.c
libavformat/nutdec.c
libavformat/oggenc.c
libavformat/protocols.c
libavformat/rawenc.c
libavformat/riff.c
libavformat/rtsp.c
libavformat/sctp.c
libavformat/segment.c
libavformat/spdifenc.c
libavformat/supenc.c
libavformat/swfenc.c
libavformat/tee.c
libavformat/ttmlenc.c
libavformat/version.h
libavformat/webm_chunk.c
libavutil/Makefile
libavutil/buffer.c
libavutil/frame.c
libavutil/frame.h
libavutil/hwcontext_d3d11va.c
libavutil/hwcontext_opencl.c
libavutil/hwcontext_qsv.c
libavutil/hwcontext_qsv.h
libavutil/hwcontext_stub.c [new file with mode: 0644]
libavutil/hwcontext_vaapi.c
libavutil/hwcontext_videotoolbox.h
libavutil/loongarch/loongson_intrinsics.h
libavutil/mem.c
libavutil/opt.c
libavutil/pixdesc.c
libavutil/pixfmt.h
libavutil/tests/pixfmt_best.c
libavutil/tx.c
libavutil/tx_priv.h
libavutil/tx_template.c
libavutil/version.h
libavutil/x86/tx_float.asm
libpostproc/Makefile
libpostproc/version.h
libswresample/Makefile
libswresample/version.h
libswscale/Makefile
libswscale/aarch64/hscale.S
libswscale/aarch64/output.S
libswscale/aarch64/swscale.c
libswscale/input.c
libswscale/output.c
libswscale/rgb2rgb.c
libswscale/utils.c
libswscale/version.h
libswscale/x86/swscale.c
tests/api/api-h264-test.c
tests/api/api-seek-test.c
tests/checkasm/Makefile
tests/checkasm/checkasm.c
tests/checkasm/checkasm.h
tests/checkasm/flacdsp.c
tests/checkasm/hevc_add_res.c
tests/checkasm/motion.c
tests/checkasm/sw_scale.c
tests/fate-run.sh
tests/fate/ffmpeg.mak
tests/fate/filter-video.mak
tests/fate/h264.mak
tests/fate/hevc.mak
tests/fate/image.mak
tests/fate/imf.mak
tests/fate/lavf-image.mak
tests/fate/mov.mak
tests/fate/pcm.mak
tests/ref/fate/audiomatch-afconvert-16000-mono-he-m4a
tests/ref/fate/audiomatch-afconvert-16000-stereo-he-m4a
tests/ref/fate/audiomatch-afconvert-16000-stereo-he2-m4a
tests/ref/fate/audiomatch-afconvert-44100-mono-he-m4a
tests/ref/fate/audiomatch-afconvert-44100-stereo-he-m4a
tests/ref/fate/audiomatch-afconvert-44100-stereo-he2-m4a
tests/ref/fate/audiomatch-nero-16000-mono-he-m4a
tests/ref/fate/audiomatch-nero-16000-stereo-he-m4a
tests/ref/fate/audiomatch-nero-16000-stereo-he2-m4a
tests/ref/fate/audiomatch-nero-44100-mono-he-m4a
tests/ref/fate/audiomatch-nero-44100-stereo-he-m4a
tests/ref/fate/audiomatch-nero-44100-stereo-he2-m4a
tests/ref/fate/copy-shortest1
tests/ref/fate/copy-shortest2
tests/ref/fate/exif-image-embedded
tests/ref/fate/exif-image-jpg
tests/ref/fate/exif-image-tiff
tests/ref/fate/exif-image-webp
tests/ref/fate/ffprobe_compact
tests/ref/fate/ffprobe_csv
tests/ref/fate/ffprobe_default
tests/ref/fate/ffprobe_flat
tests/ref/fate/ffprobe_ini
tests/ref/fate/ffprobe_json
tests/ref/fate/ffprobe_xml
tests/ref/fate/ffprobe_xsd
tests/ref/fate/filter-metadata-cropdetect
tests/ref/fate/filter-metadata-cropdetect1 [new file with mode: 0644]
tests/ref/fate/filter-metadata-cropdetect2 [new file with mode: 0644]
tests/ref/fate/filter-metadata-freezedetect
tests/ref/fate/filter-metadata-scdet
tests/ref/fate/filter-metadata-signalstats-yuv420p
tests/ref/fate/filter-metadata-signalstats-yuv420p10
tests/ref/fate/filter-pixdesc-nv16 [new file with mode: 0644]
tests/ref/fate/filter-pixdesc-vuya [new file with mode: 0644]
tests/ref/fate/filter-pixfmts-copy
tests/ref/fate/filter-pixfmts-crop
tests/ref/fate/filter-pixfmts-field
tests/ref/fate/filter-pixfmts-fieldorder
tests/ref/fate/filter-pixfmts-hflip
tests/ref/fate/filter-pixfmts-il
tests/ref/fate/filter-pixfmts-null
tests/ref/fate/filter-pixfmts-pad
tests/ref/fate/filter-pixfmts-scale
tests/ref/fate/filter-pixfmts-transpose
tests/ref/fate/filter-pixfmts-vflip
tests/ref/fate/flcl1905
tests/ref/fate/flv-add_keyframe_index
tests/ref/fate/h264-afd [new file with mode: 0644]
tests/ref/fate/h264-dts_5frames
tests/ref/fate/hevc-conformance-WPP_HIGH_TP_444_8BIT_RExt_Apple_2 [new file with mode: 0644]
tests/ref/fate/imgutils
tests/ref/fate/jpg-icc
tests/ref/fate/matroska-flac-extradata-update
tests/ref/fate/mkv-1242
tests/ref/fate/mov-avif-demux-still-image-1-item [new file with mode: 0644]
tests/ref/fate/mov-avif-demux-still-image-multiple-items [new file with mode: 0644]
tests/ref/fate/mov-zombie
tests/ref/fate/pcm_dvd-16-1-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-16-1-96000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-16-2-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-16-5.1-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-16-5.1-96000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-16-7.1-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-24-1-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-24-2-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-24-5.1-48000 [new file with mode: 0644]
tests/ref/fate/pcm_dvd-24-7.1-48000 [new file with mode: 0644]
tests/ref/fate/pixfmt_best
tests/ref/fate/png-icc
tests/ref/fate/png-icc-parse [new file with mode: 0644]
tests/ref/fate/png-side-data
tests/ref/fate/shortest-sub [new file with mode: 0644]
tests/ref/fate/sws-pixdesc-query
tests/ref/fate/wmv8-x8intra
tests/ref/lavf/bmp
tests/ref/lavf/dpx
tests/ref/lavf/gbrp10le.dpx
tests/ref/lavf/gbrp12le.dpx
tests/ref/lavf/gbrpf32be.pfm [new file with mode: 0644]
tests/ref/lavf/gbrpf32le.pfm [new file with mode: 0644]
tests/ref/lavf/gray.pam
tests/ref/lavf/gray.xwd
tests/ref/lavf/gray16be.pam
tests/ref/lavf/gray16be.png
tests/ref/lavf/grayf32be.pfm [new file with mode: 0644]
tests/ref/lavf/grayf32le.pfm [new file with mode: 0644]
tests/ref/lavf/jpg
tests/ref/lavf/monob.pam
tests/ref/lavf/monow.xwd
tests/ref/lavf/none.gbrapf32le.exr [new file with mode: 0644]
tests/ref/lavf/none.gbrpf32le.exr [new file with mode: 0644]
tests/ref/lavf/none.grayf32le.exr [new file with mode: 0644]
tests/ref/lavf/pam
tests/ref/lavf/pcx
tests/ref/lavf/pgm
tests/ref/lavf/png
tests/ref/lavf/ppm
tests/ref/lavf/qoi
tests/ref/lavf/rgb48be.pam
tests/ref/lavf/rgb48be.png
tests/ref/lavf/rgb48le.dpx
tests/ref/lavf/rgb48le_10.dpx
tests/ref/lavf/rgb4_byte.xwd
tests/ref/lavf/rgb555be.xwd
tests/ref/lavf/rgb565be.xwd
tests/ref/lavf/rgb8.xwd
tests/ref/lavf/rgba.pam
tests/ref/lavf/rgba.xwd
tests/ref/lavf/rgba64le.dpx
tests/ref/lavf/rle.gbrapf32le.exr [new file with mode: 0644]
tests/ref/lavf/rle.gbrpf32le.exr [new file with mode: 0644]
tests/ref/lavf/rle.grayf32le.exr [new file with mode: 0644]
tests/ref/lavf/sgi
tests/ref/lavf/sun
tests/ref/lavf/tga
tests/ref/lavf/tiff
tests/ref/lavf/wbmp [new file with mode: 0644]
tests/ref/lavf/xbm
tests/ref/lavf/xwd
tests/ref/lavf/zip1.gbrapf32le.exr [new file with mode: 0644]
tests/ref/lavf/zip1.gbrpf32le.exr [new file with mode: 0644]
tests/ref/lavf/zip1.grayf32le.exr [new file with mode: 0644]
tests/ref/lavf/zip16.gbrapf32le.exr [new file with mode: 0644]
tests/ref/lavf/zip16.gbrpf32le.exr [new file with mode: 0644]
tests/ref/lavf/zip16.grayf32le.exr [new file with mode: 0644]
tests/ref/seek/vsynth_lena-mpeg2-422
tests/ref/seek/vsynth_lena-mpeg4-adap
tests/ref/vsynth/vsynth1-mpeg2-422
tests/ref/vsynth/vsynth1-mpeg4-adap
tests/ref/vsynth/vsynth2-mpeg2-422
tests/ref/vsynth/vsynth2-mpeg4-adap
tests/ref/vsynth/vsynth3-mpeg2-422
tests/ref/vsynth/vsynth3-mpeg4-adap
tests/ref/vsynth/vsynth_lena-mpeg2-422
tests/ref/vsynth/vsynth_lena-mpeg4-adap
tools/Makefile
tools/target_dec_fuzzer.c

index c39cc50..fa83786 100644 (file)
--- a/Changelog
+++ b/Changelog
@@ -1,6 +1,17 @@
 Entries are sorted chronologically from oldest to youngest within each release,
 releases are sorted from youngest to oldest.
 
+version <next>:
+- Radiance HDR image support
+- ddagrab (Desktop Duplication) video capture filter
+- ffmpeg -shortest_buf_duration option
+- ffmpeg now requires threading to be built
+- ffmpeg now runs every muxer in a separate thread
+- Add new mode to cropdetect filter to detect crop-area based on motion vectors and edges
+- VAAPI decoding and encoding for 8bit 444 HEVC and VP9
+- WBMP (Wireless Application Protocol Bitmap) image format
+
+
 version 5.1:
 - add ipfs/ipns protocol support
 - dialogue enhance audio filter
@@ -22,6 +33,9 @@ version 5.1:
 - ffprobe -o option
 - virtualbass audio filter
 - VDPAU AV1 hwaccel
+- PHM image format support
+- remap_opencl filter
+- added chromakey_cuda filter
 
 
 version 5.0:
index 274fc89..ed2ec0b 100644 (file)
@@ -40,7 +40,8 @@ Miscellaneous Areas
 ===================
 
 documentation                           Stefano Sabatini, Mike Melanson, Timothy Gu, Gyan Doshi
-project server                          Árpád Gereöffy, Michael Niedermayer, Reimar Doeffinger, Alexander Strasser, Nikolay Aleksandrov
+project server day to day operations    Árpád Gereöffy, Michael Niedermayer, Reimar Doeffinger, Alexander Strasser, Nikolay Aleksandrov
+project server emergencies              Árpád Gereöffy, Reimar Doeffinger, Alexander Strasser, Nikolay Aleksandrov
 presets                                 Robert Swain
 metadata subsystem                      Aurelien Jacobs
 release management                      Michael Niedermayer
@@ -625,6 +626,7 @@ Leo Izen (thebombzen)         B6FD 3CFC 7ACF 83FC 9137 6945 5A71 C331 FD2F A19A
 Loren Merritt                 ABD9 08F4 C920 3F65 D8BE 35D7 1540 DAA7 060F 56DE
 Lynne                         FE50 139C 6805 72CA FD52 1F8D A2FE A5F0 3F03 4464
 Michael Niedermayer           9FF2 128B 147E F673 0BAD F133 611E C787 040B 0FAB
+                              DD1E C9E8 DE08 5C62 9B3E 1846 B18E 8928 B394 8D64
 Nicolas George                24CE 01CE 9ACC 5CEB 74D8 8D9D B063 D997 36E5 4C93
 Niklas Haas (haasn)           1DDB 8076 B14D 5B48 32FC 99D9 EB52 DA9C 02BA 6FB4
 Nikolay Aleksandrov           8978 1D8C FB71 588E 4B27 EAA8 C4F0 B5FC E011 13B1
index 35365f0..61f79e2 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -78,6 +78,7 @@ tools/target_dem_%_fuzzer$(EXESUF): $(FF_DEP_LIBS)
 CONFIGURABLE_COMPONENTS =                                           \
     $(wildcard $(FFLIBS:%=$(SRC_PATH)/lib%/all*.c))                 \
     $(SRC_PATH)/libavcodec/bitstream_filters.c                      \
+    $(SRC_PATH)/libavcodec/hwaccels.h                               \
     $(SRC_PATH)/libavcodec/parsers.c                                \
     $(SRC_PATH)/libavformat/protocols.c                             \
 
diff --git a/RELEASE b/RELEASE
index 826fe71..238679d 100644 (file)
--- a/RELEASE
+++ b/RELEASE
@@ -1 +1 @@
-4.4.git
+5.1.git
index 30cd085..5837c1a 100644 (file)
@@ -181,7 +181,9 @@ static inline __device__ double trunc(double a) { return __builtin_trunc(a); }
 static inline __device__ float fabsf(float a) { return __builtin_fabsf(a); }
 static inline __device__ float fabs(float a) { return __builtin_fabsf(a); }
 static inline __device__ double fabs(double a) { return __builtin_fabs(a); }
+static inline __device__ float sqrtf(float a) { return __builtin_sqrtf(a); }
 
+static inline __device__ float __saturatef(float a) { return __nvvm_saturate_f(a); }
 static inline __device__ float __sinf(float a) { return __nvvm_sin_approx_f(a); }
 static inline __device__ float __cosf(float a) { return __nvvm_cos_approx_f(a); }
 static inline __device__ float __expf(float a) { return __nvvm_ex2_approx_f(a * (float)__builtin_log2(__builtin_exp(1))); }
diff --git a/compat/windows/mswindres b/compat/windows/mswindres
new file mode 100755 (executable)
index 0000000..450525a
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+if [ "$1" = "--version" ]; then
+    rc.exe /?
+    exit $?
+fi
+
+if [ $# -lt 2 ]; then
+    echo "Usage: mswindres [-I/include/path ...] [-DSOME_DEFINE ...] [-o output.o] input.rc [output.o]" >&2
+    exit 0
+fi
+
+EXTRA_OPTS="/nologo"
+
+while [ $# -gt 2 ]; do
+    case $1 in
+    -D*) EXTRA_OPTS="$EXTRA_OPTS /d$(echo $1 | sed -e "s/^..//" -e "s/ /\\\\ /g")" ;;
+    -I*) EXTRA_OPTS="$EXTRA_OPTS /i$(echo $1 | sed -e "s/^..//" -e "s/ /\\\\ /g")" ;;
+    -o)  OPT_OUT="$2"; shift ;;
+    esac
+    shift
+done
+
+IN="$1"
+if [ -z "$OPT_OUT" ]; then
+    OUT="$2"
+else
+    OUT="$OPT_OUT"
+fi
+
+eval set -- $EXTRA_OPTS
+rc.exe "$@" /fo "$OUT" "$IN"
index 0de9b2a..fe94941 100755 (executable)
--- a/configure
+++ b/configure
@@ -341,6 +341,7 @@ External library support:
   --disable-ffnvcodec      disable dynamically linked Nvidia code [autodetect]
   --enable-libdrm          enable DRM code (Linux) [no]
   --enable-libmfx          enable Intel MediaSDK (AKA Quick Sync Video) code via libmfx [no]
+  --enable-libvpl          enable Intel oneVPL code via libvpl if libmfx is not used [no]
   --enable-libnpp          enable Nvidia Performance Primitives-based code [no]
   --enable-mmal            enable Broadcom Multi-Media Abstraction Layer (Raspberry Pi) via MMAL [no]
   --disable-nvdec          disable Nvidia video decoding acceleration (via hwaccel) [autodetect]
@@ -1921,6 +1922,7 @@ HWACCEL_LIBRARY_NONFREE_LIST="
 HWACCEL_LIBRARY_LIST="
     $HWACCEL_LIBRARY_NONFREE_LIST
     libmfx
+    libvpl
     mmal
     omx
     opencl
@@ -2352,6 +2354,8 @@ TOOLCHAIN_FEATURES="
 "
 
 TYPES_LIST="
+    DPI_AWARENESS_CONTEXT
+    IDXGIOutput5
     kCMVideoCodecType_HEVC
     kCMVideoCodecType_HEVCWithAlpha
     kCMVideoCodecType_VP9
@@ -2447,7 +2451,6 @@ CONFIG_EXTRA="
     faandct
     faanidct
     fdctdsp
-    flacdsp
     fmtconvert
     frame_thread_encoder
     g722dsp
@@ -2488,6 +2491,8 @@ CONFIG_EXTRA="
     mpegvideo
     mpegvideodec
     mpegvideoenc
+    msmpeg4dec
+    msmpeg4enc
     mss34dsp
     pixblockdsp
     qpeldsp
@@ -2731,7 +2736,7 @@ h264dsp_select="startcode"
 hevcparse_select="atsc_a53 golomb"
 frame_thread_encoder_deps="encoders threads"
 inflate_wrapper_deps="zlib"
-intrax8_select="blockdsp idctdsp"
+intrax8_select="blockdsp wmv2dsp"
 iso_media_select="mpeg4audio"
 mdct_select="fft"
 mdct15_select="fft"
@@ -2742,6 +2747,8 @@ mpegaudiodsp_select="dct"
 mpegvideo_select="blockdsp h264chroma hpeldsp idctdsp videodsp"
 mpegvideodec_select="mpegvideo mpeg_er"
 mpegvideoenc_select="aandcttables me_cmp mpegvideo pixblockdsp qpeldsp"
+msmpeg4dec_select="h263_decoder"
+msmpeg4enc_select="h263_encoder"
 vc1dsp_select="h264chroma qpeldsp startcode"
 rdft_select="fft"
 
@@ -2815,8 +2822,7 @@ ffv1_encoder_select="rangecoder"
 ffvhuff_decoder_select="huffyuv_decoder"
 ffvhuff_encoder_select="huffyuv_encoder"
 fic_decoder_select="golomb"
-flac_decoder_select="flacdsp"
-flac_encoder_select="bswapdsp flacdsp lpc"
+flac_encoder_select="bswapdsp lpc"
 flashsv2_decoder_select="inflate_wrapper"
 flashsv2_encoder_select="deflate_wrapper"
 flashsv_decoder_select="inflate_wrapper"
@@ -2891,11 +2897,11 @@ mpeg4_decoder_select="h263_decoder mpeg4video_parser"
 mpeg4_encoder_select="h263_encoder"
 msa1_decoder_select="mss34dsp"
 mscc_decoder_select="inflate_wrapper"
-msmpeg4v1_decoder_select="h263_decoder"
-msmpeg4v2_decoder_select="h263_decoder"
-msmpeg4v2_encoder_select="h263_encoder"
-msmpeg4v3_decoder_select="h263_decoder"
-msmpeg4v3_encoder_select="h263_encoder"
+msmpeg4v1_decoder_select="msmpeg4dec"
+msmpeg4v2_decoder_select="msmpeg4dec"
+msmpeg4v2_encoder_select="msmpeg4enc"
+msmpeg4v3_decoder_select="msmpeg4dec"
+msmpeg4v3_encoder_select="msmpeg4enc"
 mss2_decoder_select="mpegvideodec qpeldsp vc1_decoder"
 mts2_decoder_select="jpegtables mss34dsp"
 mv30_decoder_select="aandcttables blockdsp"
@@ -2966,7 +2972,7 @@ utvideo_encoder_select="bswapdsp huffman llvidencdsp"
 vble_decoder_select="llviddsp"
 vbn_decoder_select="texturedsp"
 vbn_encoder_select="texturedspenc"
-vc1_decoder_select="blockdsp h263_decoder h264qpel intrax8 mpegvideodec vc1dsp"
+vc1_decoder_select="blockdsp h264qpel intrax8 mpegvideodec msmpeg4dec vc1dsp"
 vc1image_decoder_select="vc1_decoder"
 vorbis_decoder_select="mdct"
 vorbis_encoder_select="audio_frame_queue mdct"
@@ -2988,10 +2994,10 @@ wmav1_encoder_select="mdct sinewin wma_freqs"
 wmav2_decoder_select="mdct sinewin wma_freqs"
 wmav2_encoder_select="mdct sinewin wma_freqs"
 wmavoice_decoder_select="lsp rdft dct mdct sinewin"
-wmv1_decoder_select="h263_decoder"
-wmv1_encoder_select="h263_encoder"
-wmv2_decoder_select="blockdsp error_resilience h263_decoder idctdsp intrax8 videodsp wmv2dsp"
-wmv2_encoder_select="h263_encoder wmv2dsp"
+wmv1_decoder_select="msmpeg4dec"
+wmv1_encoder_select="msmpeg4enc"
+wmv2_decoder_select="blockdsp error_resilience idctdsp intrax8 msmpeg4dec videodsp wmv2dsp"
+wmv2_encoder_select="msmpeg4enc wmv2dsp"
 wmv3_decoder_select="vc1_decoder"
 wmv3image_decoder_select="wmv3_decoder"
 xma1_decoder_select="wmapro_decoder"
@@ -3142,6 +3148,8 @@ qsvvpp_select="qsv"
 vaapi_encode_deps="vaapi"
 v4l2_m2m_deps="linux_videodev2_h sem_timedwait"
 
+chromakey_cuda_filter_deps="ffnvcodec"
+chromakey_cuda_filter_deps_any="cuda_nvcc cuda_llvm"
 hwupload_cuda_filter_deps="ffnvcodec"
 scale_npp_filter_deps="ffnvcodec libnpp"
 scale2ref_npp_filter_deps="ffnvcodec libnpp"
@@ -3154,6 +3162,8 @@ overlay_cuda_filter_deps="ffnvcodec"
 overlay_cuda_filter_deps_any="cuda_nvcc cuda_llvm"
 sharpen_npp_filter_deps="ffnvcodec libnpp"
 
+ddagrab_filter_deps="d3d11va IDXGIOutput1 DXGI_OUTDUPL_FRAME_INFO"
+
 amf_deps_any="libdl LoadLibrary"
 nvenc_deps="ffnvcodec"
 nvenc_deps_any="libdl LoadLibrary"
@@ -3444,7 +3454,7 @@ mlp_demuxer_select="mlp_parser"
 mmf_muxer_select="riffenc"
 mov_demuxer_select="iso_media riffdec"
 mov_demuxer_suggest="zlib"
-mov_muxer_select="iso_media riffenc rtpenc_chain vp9_superframe_bsf aac_adtstoasc_bsf"
+mov_muxer_select="iso_media riffenc rtpenc_chain vp9_superframe_bsf aac_adtstoasc_bsf ac3_parser"
 mp3_demuxer_select="mpegaudio_parser"
 mp3_muxer_select="mpegaudioheader"
 mp4_muxer_select="mov_muxer"
@@ -3706,6 +3716,7 @@ prewitt_opencl_filter_deps="opencl"
 procamp_vaapi_filter_deps="vaapi"
 program_opencl_filter_deps="opencl"
 pullup_filter_deps="gpl"
+remap_opencl_filter_deps="opencl"
 removelogo_filter_deps="avcodec avformat swscale"
 repeatfields_filter_deps="gpl"
 roberts_opencl_filter_deps="opencl"
@@ -3807,12 +3818,12 @@ swresample_suggest="libm libsoxr stdatomic"
 swscale_deps="avutil"
 swscale_suggest="libm stdatomic"
 
-avcodec_extralibs="pthreads_extralibs iconv_extralibs dxva2_extralibs"
+avcodec_extralibs="pthreads_extralibs iconv_extralibs dxva2_extralibs lcms2_extralibs"
 avfilter_extralibs="pthreads_extralibs"
 avutil_extralibs="d3d11va_extralibs nanosleep_extralibs pthreads_extralibs vaapi_drm_extralibs vaapi_x11_extralibs vdpau_x11_extralibs"
 
 # programs
-ffmpeg_deps="avcodec avfilter avformat"
+ffmpeg_deps="avcodec avfilter avformat threads"
 ffmpeg_select="aformat_filter anull_filter atrim_filter format_filter
                hflip_filter null_filter
                transpose_filter trim_filter vflip_filter"
@@ -4360,6 +4371,7 @@ case "$toolchain" in
             die "Unsupported MSVC version (2013 or newer required)"
         fi
         ld_default="$source_path/compat/windows/mslink"
+        windres_default="$source_path/compat/windows/mswindres"
         nm_default="dumpbin.exe -symbols"
         ar_default="lib.exe"
         case "${arch:-$arch_default}" in
@@ -5560,7 +5572,7 @@ case $target_os in
             # Cannot build both shared and static libs when using dllimport.
             disable static
         fi
-        enabled shared && ! enabled small && test_cmd $windres --version && enable gnu_windres
+        ! enabled small && test_cmd $windres --version && enable gnu_windres
         enabled x86_32 && check_ldflags -Wl,--large-address-aware
         shlibdir_default="$bindir_default"
         SLIBPREF=""
@@ -5610,6 +5622,7 @@ case $target_os in
             # Cannot build both shared and static libs with MSVC or icl.
             disable static
         fi
+        ! enabled small && test_cmd $windres --version && enable gnu_windres
         enabled x86_32 && check_ldflags -LARGEADDRESSAWARE
         shlibdir_default="$bindir_default"
         SLIBPREF=""
@@ -5639,7 +5652,7 @@ case $target_os in
         SHFLAGS='-shared -Wl,--out-implib,$(SUBDIR)lib$(FULLNAME).dll.a'
         enabled x86_64 && objformat="win64" || objformat="win32"
         enable dos_paths
-        enabled shared && ! enabled small && test_cmd $windres --version && enable gnu_windres
+        ! enabled small && test_cmd $windres --version && enable gnu_windres
         add_cppflags -D_POSIX_C_SOURCE=200112 -D_XOPEN_SOURCE=600
         ;;
     *-dos|freedos|opendos)
@@ -6388,8 +6401,12 @@ check_struct "sys/time.h sys/resource.h" "struct rusage" ru_maxrss
 check_type "windows.h dxva.h" "DXVA_PicParams_AV1" -DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP -D_CRT_BUILD_DESKTOP_APP=0
 check_type "windows.h dxva.h" "DXVA_PicParams_HEVC" -DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP -D_CRT_BUILD_DESKTOP_APP=0
 check_type "windows.h dxva.h" "DXVA_PicParams_VP9" -DWINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP -D_CRT_BUILD_DESKTOP_APP=0
+check_type "windows.h dxgi1_2.h" "DXGI_OUTDUPL_FRAME_INFO"
+check_type "windows.h dxgi1_2.h" "IDXGIOutput1"
+check_type "windows.h dxgi1_5.h" "IDXGIOutput5"
 check_type "windows.h d3d11.h" "ID3D11VideoDecoder"
 check_type "windows.h d3d11.h" "ID3D11VideoContext"
+check_type "windows.h" "DPI_AWARENESS_CONTEXT" -D_WIN32_WINNT=0x0A00
 check_type "d3d9.h dxva2api.h" DXVA2_ConfigPictureDecode -D_WIN32_WINNT=0x0602
 check_func_headers mfapi.h MFCreateAlignedMemoryBuffer -lmfplat
 
@@ -6564,19 +6581,37 @@ enabled libjxl            && require_pkg_config libjxl "libjxl >= 0.7.0" jxl/dec
 enabled libklvanc         && require libklvanc libklvanc/vanc.h klvanc_context_create -lklvanc
 enabled libkvazaar        && require_pkg_config libkvazaar "kvazaar >= 0.8.1" kvazaar.h kvz_api_get
 enabled liblensfun        && require_pkg_config liblensfun lensfun lensfun.h lf_db_new
+
+if enabled libmfx && enabled libvpl; then
+   die "ERROR: can not use libmfx and libvpl together"
 # While it may appear that require is being used as a pkg-config
 # fallback for libmfx, it is actually being used to detect a different
 # installation route altogether.  If libmfx is installed via the Intel
 # Media SDK or Intel Media Server Studio, these don't come with
 # pkg-config support.  Instead, users should make sure that the build
 # can find the libraries and headers through other means.
-enabled libmfx            && { check_pkg_config libmfx "libmfx >= 1.28" "mfx/mfxvideo.h" MFXInit ||
-                               { require libmfx "mfx/mfxvideo.h mfx/mfxdefs.h" MFXInit "-llibmfx $advapi32_extralibs" &&
-                                 { test_cpp_condition mfx/mfxdefs.h "MFX_VERSION >= 1028" || die "ERROR: libmfx version must be >= 1.28"; }  &&
-                                 warn "using libmfx without pkg-config"; } }
+elif enabled libmfx; then
+    { check_pkg_config libmfx "libmfx >= 1.28 libmfx < 2.0" "mfxvideo.h" MFXInit ||
+# Some old versions of libmfx have the following settings in libmfx.pc:
+#   includedir=/usr/include
+#   Cflags: -I${includedir}
+# So add -I${includedir}/mfx to CFLAGS
+      { check_pkg_config libmfx "libmfx >= 1.28 libmfx < 2.0" "mfx/mfxvideo.h" MFXInit && add_cflags -I${libmfx_incdir}/mfx; } ||
+      { require libmfx "mfxvideo.h mfxdefs.h" MFXInit "-llibmfx $advapi32_extralibs" &&
+        { test_cpp_condition mfxdefs.h "MFX_VERSION >= 1028 && MFX_VERSION < 2000" || die "ERROR: libmfx version must be >= 1.28 and < 2.0"; }  &&
+        warn "using libmfx without pkg-config"; } } &&
+    warn "build FFmpeg against libmfx 1.x, obsolete features of libmfx such as OPAQUE memory,\n"\
+         "multi-frame encode, user plugins and LA_EXT rate control mode are enabled"
+elif enabled libvpl; then
+# Consider pkg-config only. The name of libmfx is still passed to check_pkg_config function for --enable-libvpl option
+# because QSV has dependency on libmfx, we can use the same dependency if using libmfx in this check. The package name
+# is extracted from "vpl >= 2.6"
+    check_pkg_config libmfx "vpl >= 2.6" "mfxvideo.h mfxdispatcher.h" MFXLoad || \
+            die "ERROR: libvpl >= 2.6 not found"
+fi
 
 if enabled libmfx; then
-   check_cc MFX_CODEC_VP9 "mfx/mfxvp9.h mfx/mfxstructures.h" "MFX_CODEC_VP9"
+   check_cc MFX_CODEC_VP9 "mfxdefs.h mfxstructures.h" "MFX_CODEC_VP9"
 fi
 
 enabled libmodplug        && require_pkg_config libmodplug libmodplug libmodplug/modplug.h ModPlug_Load
@@ -6663,8 +6698,8 @@ enabled libvpx            && {
 enabled libwebp           && {
     enabled libwebp_encoder      && require_pkg_config libwebp "libwebp >= 0.2.0" webp/encode.h WebPGetEncoderVersion
     enabled libwebp_anim_encoder && check_pkg_config libwebp_anim_encoder "libwebpmux >= 0.4.0" webp/mux.h WebPAnimEncoderOptionsInit; }
-enabled libx264           && check_pkg_config libx264 x264 "stdint.h x264.h" x264_encoder_encode &&
-                             require_cpp_condition libx264 x264.h "X264_BUILD >= 118" && {
+enabled libx264           && require_pkg_config libx264 x264 "stdint.h x264.h" x264_encoder_encode &&
+                             require_cpp_condition libx264 x264.h "X264_BUILD >= 122" && {
                              [ "$toolchain" != "msvc" ] ||
                              require_cpp_condition libx264 x264.h "X264_BUILD >= 158"; } &&
                              check_cpp_condition libx262 x264.h "X264_MPEG2"
@@ -6755,7 +6790,7 @@ if enabled sdl2; then
         sdl2_cflags=$("${SDL2_CONFIG}" --cflags)
         sdl2_extralibs=$("${SDL2_CONFIG}" --libs)
         test_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) >= 0x020001" $sdl2_cflags &&
-        test_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) < 0x020100" $sdl2_cflags &&
+        test_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) < 0x030000" $sdl2_cflags &&
         check_func_headers SDL_events.h SDL_PollEvent $sdl2_extralibs $sdl2_cflags &&
             enable sdl2
     fi
index 20b9449..b3ba07e 100644 (file)
@@ -14,24 +14,53 @@ libavutil:     2021-04-27
 
 API changes, most recent first:
 
-2022-06-12 - xxxxxxxxxx - lavf 59.25.100 - avio.h
+2022-08-07 - e95b08a7dd - lavu 57.33.101 - pixfmt.h
+  Add AV_PIX_FMT_RGBAF16{BE,LE} pixel formats.
+
+2022-08-xx - xxxxxxxxxx - lavu 57.33.100 - hwcontext_qsv.h
+  Add loader field to AVQSVDeviceContext
+
+2022-08-03 - xxxxxxxxxx - lavu 57.32.100 - pixfmt.h
+  Add AV_PIX_FMT_VUYA.
+
+2022-08-xx - xxxxxxxxxx - lavc 59.41.100 - avcodec.h codec.h
+  Add AV_CODEC_FLAG_RECON_FRAME and AV_CODEC_CAP_ENCODER_RECON_FRAME.
+  avcodec_receive_frame() may now be used on encoders when
+  AV_CODEC_FLAG_RECON_FRAME is active.
+
+2022-08-xx - xxxxxxxxxx - lavu 57.31.100 - frame.h
+  av_frame_make_writable() may now be called on non-refcounted
+  frames and will make a refcounted copy out of them.
+  Previously an error was returned in such cases.
+
+2022-07-xx - xxxxxxxxx - lavc 59.40.100 - avcodec.h
+  Add the AV_CODEC_FLAG2_ICC_PROFILES flag to AVCodecContext, to enable
+  automatic reading and writing of embedded ICC profiles in image files.
+  The "flags2" option now supports the corresponding flag "icc_profiles".
+
+2022-07-xx - xxxxxxxxxx - lavu 57.30.100 - frame.h
+  Add AVFrame.duration, deprecate AVFrame.pkt_duration.
+
+-------- 8< --------- FFmpeg 5.1 was cut here -------- 8< ---------
+
+2022-06-12 - 7cae3d8b76 - lavf 59.25.100 - avio.h
   Add avio_vprintf(), similar to avio_printf() but allow to use it
   from within a function taking a variable argument list as input.
 
-2022-06-12 - xxxxxxxxx - lavu 57.27.100 - uuid.h
+2022-06-12 - ff59ecc4de - lavu 57.27.100 - uuid.h
   Add UUID handling functions.
   Add av_uuid_parse(), av_uuid_urn_parse(), av_uuid_parse_range(),
   av_uuid_parse_range(), av_uuid_equal(), av_uuid_copy(), and av_uuid_nil().
 
-2022-06-01 - xxxxxxxxx - lavu 57.26.100 - csp.h
+2022-06-01 - d42b410e05 - lavu 57.26.100 - csp.h
   Add public API for colorspace structs.
   Add av_csp_luma_coeffs_from_avcsp(), av_csp_primaries_desc_from_id(),
   and av_csp_primaries_id_from_desc().
 
-2022-05-23 - xxxxxxxxx - lavu 57.25.100 - avutil.h
+2022-05-23 - 4cdc14aa95 - lavu 57.25.100 - avutil.h
   Deprecate av_fopen_utf8() without replacement.
 
-2022-03-16 - xxxxxxxxxx - all libraries - version_major.h
+2022-03-16 - f3a0e2ee2b - all libraries - version_major.h
   Add lib<name>/version_major.h as new installed headers, which only
   contain the major version number (and corresponding API deprecation
   defines).
@@ -73,10 +102,10 @@ API changes, most recent first:
   Update AVFrame for the new channel layout API: add ch_layout, deprecate
   channels/channel_layout.
 
-2022-03-10 - xxxxxxxxxx - lavu 57.23.100 - cpu.h
+2022-03-10 - f629ea2e18 - lavu 57.23.100 - cpu.h
   Add AV_CPU_FLAG_AVX512ICL.
 
-2022-02-07 - xxxxxxxxxx - lavu 57.21.100 - fifo.h
+2022-02-07 - a10f1aec1f - lavu 57.21.100 - fifo.h
   Deprecate AVFifoBuffer and the API around it, namely av_fifo_alloc(),
   av_fifo_alloc_array(), av_fifo_free(), av_fifo_freep(), av_fifo_reset(),
   av_fifo_size(), av_fifo_space(), av_fifo_generic_peek_at(),
@@ -84,7 +113,7 @@ API changes, most recent first:
   av_fifo_realloc2(), av_fifo_grow(), av_fifo_drain() and av_fifo_peek2().
   Users should switch to the AVFifo-API.
 
-2022-02-07 - xxxxxxxxxx - lavu 57.20.100 - fifo.h
+2022-02-07 - 7329b22c05 - lavu 57.20.100 - fifo.h
   Add a new FIFO API, which allows setting a FIFO element size.
   This API operates on these elements rather than on bytes.
   Add av_fifo_alloc2(), av_fifo_elem_size(), av_fifo_can_read(),
@@ -1687,7 +1716,7 @@ API changes, most recent first:
 2014-04-15 - ef818d8 - lavf 55.37.101 - avformat.h
   Add av_format_inject_global_side_data()
 
-2014-04-12 - 4f698be - lavu 52.76.100 - log.h
+2014-04-12 - 4f698be8f - lavu 52.76.100 - log.h
   Add av_log_get_flags()
 
 2014-04-11 - 6db42a2b - lavd 55.12.100 - avdevice.h
index 5e10020..1adacd2 100644 (file)
@@ -644,6 +644,8 @@ for codecs that support it. See also @file{doc/examples/export_mvs.c}.
 Do not skip samples and export skip information as frame side data.
 @item ass_ro_flush_noop
 Do not reset ASS ReadOrder field on flush.
+@item icc_profiles
+Generate/parse embedded ICC profiles from/to colorimetry tags.
 @end table
 
 @item export_side_data @var{flags} (@emph{decoding/encoding,audio,video,subtitles})
index c95a9ae..2b6dd86 100644 (file)
@@ -373,6 +373,9 @@ It accepts the following options:
 @item live_start_index
 segment index to start live streams at (negative values are from the end).
 
+@item prefer_x_start
+prefer to use #EXT-X-START if it's in playlist instead of live_start_index.
+
 @item allowed_extensions
 ',' separated list of file extensions that hls is allowed to access.
 
index 02a91ff..6d73f74 100644 (file)
@@ -3337,10 +3337,13 @@ For encoders set this flag to ON to reduce power consumption and GPU usage.
 Following options can be used durning qsv encoding.
 
 @table @option
-@item @var{qsv_config_qp}
+@item @var{global_quality}
+@item @var{i_quant_factor}
+@item @var{i_quant_offset}
+@item @var{b_quant_factor}
+@item @var{b_quant_offset}
 Supported in h264_qsv and hevc_qsv.
-This option can be set in per-frame metadata. QP parameter can be dynamically
-changed when encoding in CQP mode.
+Change these value to reset qsv codec's qp configuration.
 @end table
 
 @subsection H264 options
index d943f4d..42440d9 100644 (file)
@@ -518,6 +518,21 @@ see @ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1)
 Like the @code{-ss} option but relative to the "end of file". That is negative
 values are earlier in the file, 0 is at EOF.
 
+@item -isync @var{input_index} (@emph{input})
+Assign an input as a sync source.
+
+This will take the difference between the start times of the target and reference inputs and
+offset the timestamps of the target file by that difference. The source timestamps of the two
+inputs should derive from the same clock source for expected results. If @code{copyts} is set
+then @code{start_at_zero} must also be set. If either of the inputs has no starting timestamp
+then no sync adjustment is made.
+
+Acceptable values are those that refer to a valid ffmpeg input index. If the sync reference is
+the target index itself or @var{-1}, then no adjustment is made to target timestamps. A sync
+reference may not itself be synced to any other input.
+
+Default value is @var{-1}.
+
 @item -itsoffset @var{offset} (@emph{input})
 Set the input time offset.
 
@@ -977,7 +992,8 @@ to keep the interlaced format for minimum losses.
 The alternative is to deinterlace the input stream by use of a filter
 such as @code{yadif} or @code{bwdif}, but deinterlacing introduces losses.
 @item -psnr
-Calculate PSNR of compressed frames.
+Calculate PSNR of compressed frames. This option is deprecated, pass the
+PSNR flag to the encoder instead, using @code{-flags +psnr}.
 @item -vstats
 Dump video coding statistics to @file{vstats_HHMMSS.log}.
 @item -vstats_file @var{file}
@@ -1395,18 +1411,18 @@ Set the size of the canvas used to render subtitles.
 @section Advanced options
 
 @table @option
-@item -map [-]@var{input_file_id}[:@var{stream_specifier}][?][,@var{sync_file_id}[:@var{stream_specifier}]] | @var{[linklabel]} (@emph{output})
+@item -map [-]@var{input_file_id}[:@var{stream_specifier}][?] | @var{[linklabel]} (@emph{output})
 
-Designate one or more input streams as a source for the output file. Each input
-stream is identified by the input file index @var{input_file_id} and
-the input stream index @var{input_stream_id} within the input
-file. Both indices start at 0. If specified,
-@var{sync_file_id}:@var{stream_specifier} sets which input stream
-is used as a presentation sync reference.
+Create one or more streams in the output file. This option has two forms for
+specifying the data source(s): the first selects one or more streams from some
+input file (specified with @code{-i}), the second takes an output from some
+complex filtergraph (specified with @code{-filter_complex} or
+@code{-filter_complex_script}).
 
-The first @code{-map} option on the command line specifies the
-source for output stream 0, the second @code{-map} option specifies
-the source for output stream 1, etc.
+In the first form, an output stream is created for every stream from the input
+file with the index @var{input_file_id}. If @var{stream_specifier} is given,
+only those streams that match the specifier are used (see the
+@ref{Stream specifiers} section for the @var{stream_specifier} syntax).
 
 A @code{-} character before the stream identifier creates a "negative" mapping.
 It disables matching streams from already created mappings.
@@ -1420,39 +1436,56 @@ An alternative @var{[linklabel]} form will map outputs from complex filter
 graphs (see the @option{-filter_complex} option) to the output file.
 @var{linklabel} must correspond to a defined output link label in the graph.
 
-For example, to map ALL streams from the first input file to output
+This option may be specified multiple times, each adding more streams to the
+output file. Any given input stream may also be mapped any number of times as a
+source for different output streams, e.g. in order to use different encoding
+options and/or filters. The streams are created in the output in the same order
+in which the @code{-map} options are given on the commandline.
+
+Using this option disables the default mappings for this output file.
+
+Examples:
+
+@table @emph
+
+@item map everything
+To map ALL streams from the first input file to output
 @example
 ffmpeg -i INPUT -map 0 output
 @end example
 
-For example, if you have two audio streams in the first input file,
-these streams are identified by "0:0" and "0:1". You can use
-@code{-map} to select which streams to place in an output file. For
-example:
+@item select specific stream
+If you have two audio streams in the first input file, these streams are
+identified by @var{0:0} and @var{0:1}. You can use @code{-map} to select which
+streams to place in an output file. For example:
 @example
 ffmpeg -i INPUT -map 0:1 out.wav
 @end example
-will map the input stream in @file{INPUT} identified by "0:1" to
-the (single) output stream in @file{out.wav}.
+will map the second input stream in @file{INPUT} to the (single) output stream
+in @file{out.wav}.
 
-For example, to select the stream with index 2 from input file
-@file{a.mov} (specified by the identifier "0:2"), and stream with
-index 6 from input @file{b.mov} (specified by the identifier "1:6"),
-and copy them to the output file @file{out.mov}:
+@item create multiple streams
+To select the stream with index 2 from input file @file{a.mov} (specified by the
+identifier @var{0:2}), and stream with index 6 from input @file{b.mov}
+(specified by the identifier @var{1:6}), and copy them to the output file
+@file{out.mov}:
 @example
 ffmpeg -i a.mov -i b.mov -c copy -map 0:2 -map 1:6 out.mov
 @end example
 
+@item create multiple streams 2
 To select all video and the third audio stream from an input file:
 @example
 ffmpeg -i INPUT -map 0:v -map 0:a:2 OUTPUT
 @end example
 
+@item negative map
 To map all the streams except the second audio, use negative mappings
 @example
 ffmpeg -i INPUT -map 0 -map -0:a:1 OUTPUT
 @end example
 
+@item optional map
 To map the video and audio streams from the first input, and using the
 trailing @code{?}, ignore the audio mapping if no audio streams exist in
 the first input:
@@ -1460,12 +1493,13 @@ the first input:
 ffmpeg -i INPUT -map 0:v -map 0:a? OUTPUT
 @end example
 
+@item map by language
 To pick the English audio stream:
 @example
 ffmpeg -i INPUT -map 0:m:language:eng OUTPUT
 @end example
 
-Note that using this option disables the default mappings for this output file.
+@end table
 
 @item -ignore_unknown
 Ignore input streams with unknown type instead of failing if copying
@@ -1476,6 +1510,10 @@ Allow input streams with unknown type to be copied instead of failing if copying
 such streams is attempted.
 
 @item -map_channel [@var{input_file_id}.@var{stream_specifier}.@var{channel_id}|-1][?][:@var{output_file_id}.@var{stream_specifier}]
+This option is deprecated and will be removed. It can be replaced by the
+@var{pan} filter. In some cases it may be easier to use some combination of the
+@var{channelsplit}, @var{channelmap}, or @var{amerge} filters.
+
 Map an audio channel from a given input to an output. If
 @var{output_file_id}.@var{stream_specifier} is not set, the audio channel will
 be mapped on all the audio streams.
@@ -1749,7 +1787,23 @@ Default value is 0.
 @item -bitexact (@emph{input/output})
 Enable bitexact mode for (de)muxer and (de/en)coder
 @item -shortest (@emph{output})
-Finish encoding when the shortest input stream ends.
+Finish encoding when the shortest output stream ends.
+
+Note that this option may require buffering frames, which introduces extra
+latency. The maximum amount of this latency may be controlled with the
+@code{-shortest_buf_duration} option.
+
+@item -shortest_buf_duration @var{duration} (@emph{output})
+The @code{-shortest} option may require buffering potentially large amounts
+of data when at least one of the streams is "sparse" (i.e. has large gaps
+between frames – this is typically the case for subtitles).
+
+This option controls the maximum duration of buffered frames in seconds.
+Larger values may allow the @code{-shortest} option to produce more accurate
+results, but increase memory use and latency.
+
+The default value is 10 seconds.
+
 @item -dts_delta_threshold
 Timestamp discontinuity delta threshold.
 @item -dts_error_threshold @var{seconds}
@@ -1884,13 +1938,16 @@ to the @option{-ss} option is considered an actual timestamp, and is not
 offset by the start time of the file. This matters only for files which do
 not start from timestamp 0, such as transport streams.
 
-@item -thread_queue_size @var{size} (@emph{input})
-This option sets the maximum number of queued packets when reading from the
-file or device. With low latency / high rate live streams, packets may be
-discarded if they are not read in a timely manner; setting this value can
+@item -thread_queue_size @var{size} (@emph{input/output})
+For input, this option sets the maximum number of queued packets when reading
+from the file or device. With low latency / high rate live streams, packets may
+be discarded if they are not read in a timely manner; setting this value can
 force ffmpeg to use a separate input thread and read packets as soon as they
 arrive. By default ffmpeg only does this if multiple inputs are specified.
 
+For output, this option specified the maximum number of packets that may be
+queued to each muxing thread.
+
 @item -sdp_file @var{file} (@emph{global})
 Print sdp information for an output stream to @var{file}.
 This allows dumping sdp information when at least one output isn't an
index 3af621a..6e678a9 100644 (file)
@@ -92,6 +92,8 @@
       <xsd:attribute name="best_effort_timestamp_time" type="xsd:float" />
       <xsd:attribute name="pkt_duration"  type="xsd:long" />
       <xsd:attribute name="pkt_duration_time" type="xsd:float"/>
+      <xsd:attribute name="duration"      type="xsd:long" />
+      <xsd:attribute name="duration_time" type="xsd:float"/>
       <xsd:attribute name="pkt_pos"       type="xsd:long" />
       <xsd:attribute name="pkt_size"      type="xsd:int" />
 
index e525e87..01a359f 100644 (file)
@@ -2210,13 +2210,6 @@ and @var{pan} audio filters support many formats). If the @var{amix}
 input has integer samples then @ref{aresample} will be automatically
 inserted to perform the conversion to float samples.
 
-For example
-@example
-ffmpeg -i INPUT1 -i INPUT2 -i INPUT3 -filter_complex amix=inputs=3:duration=first:dropout_transition=3 OUTPUT
-@end example
-will mix 3 input audio streams to a single output with the same duration as the
-first input and a dropout transition time of 3 seconds.
-
 It accepts the following parameters:
 @table @option
 
@@ -2243,8 +2236,10 @@ The transition time, in seconds, for volume renormalization when an input
 stream ends. The default value is 2 seconds.
 
 @item weights
-Specify weight of each input audio stream as sequence.
-Each weight is separated by space. By default all inputs have same weight.
+Specify weight of each input audio stream as a sequence of numbers separated
+by a space. If fewer weights are specified compared to number of inputs, the
+last weight is assigned to the remaining inputs.
+Default weight for each input is 1.
 
 @item normalize
 Always scale inputs instead of only doing summation of samples.
@@ -2252,6 +2247,26 @@ Beware of heavy clipping if inputs are not normalized prior or after filtering
 by this filter if this option is disabled. By default is enabled.
 @end table
 
+@subsection Examples
+
+@itemize
+
+@item
+This will mix 3 input audio streams to a single output with the same duration as the
+first input and a dropout transition time of 3 seconds:
+@example
+ffmpeg -i INPUT1 -i INPUT2 -i INPUT3 -filter_complex amix=inputs=3:duration=first:dropout_transition=3 OUTPUT
+@end example
+
+@item
+This will mix one vocal and one music input audio stream to a single output with the same duration as the
+longest input. The music will have quarter the weight as the vocals, and the inputs are not normalized:
+@example
+ffmpeg -i VOCALS -i MUSIC -filter_complex amix=inputs=2:duration=longest:dropout_transition=0:weights="1 0.25":normalize=0 OUTPUT
+@end example
+
+@end itemize
+
 @subsection Commands
 
 This filter supports the following commands:
@@ -8599,6 +8614,7 @@ The command accepts the same syntax of the corresponding option.
 If the specified expression is not valid, it is kept at its current
 value.
 
+@anchor{chromakey}
 @section chromakey
 YUV colorspace color/chroma keying.
 
@@ -8651,6 +8667,48 @@ ffmpeg -f lavfi -i color=c=black:s=1280x720 -i video.mp4 -shortest -filter_compl
 @end example
 @end itemize
 
+@section chromakey_cuda
+CUDA accelerated YUV colorspace color/chroma keying.
+
+This filter works like normal chromakey filter but operates on CUDA frames.
+for more details and parameters see @ref{chromakey}.
+
+@subsection Examples
+
+@itemize
+@item
+Make all the green pixels in the input video transparent and use it as an overlay for another video:
+
+@example
+./ffmpeg \
+    -hwaccel cuda -hwaccel_output_format cuda -i input_green.mp4  \
+    -hwaccel cuda -hwaccel_output_format cuda -i base_video.mp4 \
+    -init_hw_device cuda \
+    -filter_complex \
+    " \
+        [0:v]chromakey_cuda=0x25302D:0.1:0.12:1[overlay_video]; \
+        [1:v]scale_cuda=format=yuv420p[base]; \
+        [base][overlay_video]overlay_cuda" \
+    -an -sn -c:v h264_nvenc -cq 20 output.mp4
+@end example
+
+@item
+Process two software sources, explicitly uploading the frames:
+
+@example
+./ffmpeg -init_hw_device cuda=cuda -filter_hw_device cuda \
+    -f lavfi -i color=size=800x600:color=white,format=yuv420p \
+    -f lavfi -i yuvtestsrc=size=200x200,format=yuv420p \
+    -filter_complex \
+    " \
+        [0]hwupload[under]; \
+        [1]hwupload,chromakey_cuda=green:0.1:0.12[over]; \
+        [under][over]overlay_cuda" \
+    -c:v hevc_nvenc -cq 18 -preset slow output.mp4
+@end example
+
+@end itemize
+
 @section chromanr
 Reduce chrominance noise.
 
@@ -10095,12 +10153,23 @@ Auto-detect the crop size.
 
 It calculates the necessary cropping parameters and prints the
 recommended parameters via the logging system. The detected dimensions
-correspond to the non-black area of the input video.
+correspond to the non-black or video area of the input video according to @var{mode}.
 
 It accepts the following parameters:
 
 @table @option
 
+@item mode
+Depending on @var{mode} crop detection is based on either the mere black value of surrounding pixels or a combination of motion vectors and edge pixels.
+
+@table @samp
+@item black
+Detect black pixels surrounding the playing video. For fine control use option @var{limit}.
+
+@item mvedges
+Detect the playing video by the motion vectors inside the video and scanning for edge pixels typically forming the border of a playing video.
+@end table
+
 @item limit
 Set higher black value threshold, which can be optionally specified
 from nothing (0) to everything (255 for 8-bit based formats). An intensity
@@ -10126,8 +10195,48 @@ detect the current optimal crop area. Default value is 0.
 This can be useful when channel logos distort the video area. 0
 indicates 'never reset', and returns the largest area encountered during
 playback.
+
+@item mv_threshold
+Set motion in pixel units as threshold for motion detection. It defaults to 8.
+
+@item low
+@item high
+Set low and high threshold values used by the Canny thresholding
+algorithm.
+
+The high threshold selects the "strong" edge pixels, which are then
+connected through 8-connectivity with the "weak" edge pixels selected
+by the low threshold.
+
+@var{low} and @var{high} threshold values must be chosen in the range
+[0,1], and @var{low} should be lesser or equal to @var{high}.
+
+Default value for @var{low} is @code{5/255}, and default value for @var{high}
+is @code{15/255}.
 @end table
 
+@subsection Examples
+
+@itemize
+@item
+Find video area surrounded by black borders:
+@example
+ffmpeg -i file.mp4 -vf cropdetect,metadata=mode=print -f null -
+@end example
+
+@item
+Find an embedded video area, generate motion vectors beforehand:
+@example
+ffmpeg -i file.mp4 -vf mestimate,cropdetect=mode=mvedges,metadata=mode=print -f null -
+@end example
+
+@item
+Find an embedded video area, use motion vectors from decoder:
+@example
+ffmpeg -flags2 +export_mvs -i file.mp4 -vf cropdetect=mode=mvedges,metadata=mode=print -f null -
+@end example
+@end itemize
+
 @anchor{cue}
 @section cue
 
@@ -11799,7 +11908,7 @@ The current packet's position in the input file or stream
 (in bytes, from the start of the input). A value of -1 indicates
 this info is not available.
 
-@item pkt_duration
+@item duration
 The current packet's duration, in seconds.
 
 @item pkt_size
@@ -24265,6 +24374,8 @@ Set one of available transition effects:
 @item squeezeh
 @item squeezev
 @item zoomin
+@item fadefast
+@item fadeslow
 @end table
 Default transition effect is fade.
 
@@ -24381,8 +24492,26 @@ the output video frame will be filled. Similarly, videos can overlap each
 other if their position doesn't leave enough space for the full frame of
 adjoining videos.
 
-For 2 inputs, a default layout of @code{0_0|w0_0} is set. In all other cases,
-a layout must be set by the user.
+For 2 inputs, a default layout of @code{0_0|w0_0} (equivalent to
+@code{grid=2x1}) is set. In all other cases, a layout or a grid must be set by
+the user. Either @code{grid} or @code{layout} can be specified at a time.
+Specifying both will result in an error.
+
+@item grid
+Specify a fixed size grid of inputs.
+This option is used to create a fixed size grid of the input streams. Set the
+grid size in the form @code{COLUMNSxROWS}. There must be @code{ROWS * COLUMNS}
+input streams and they will be arranged as a grid with @code{ROWS} rows and
+@code{COLUMNS} columns. When using this option, each input stream within a row
+must have the same height and all the rows must have the same width.
+
+If @code{grid} is set, then @code{inputs} option is ignored and is implicitly
+set to @code{ROWS * COLUMNS}.
+
+For 2 inputs, a default grid of @code{2x1} (equivalent to
+@code{layout=0_0|w0_0}) is set. In all other cases, a layout or a grid must be
+set by the user. Either @code{grid} or @code{layout} can be specified at a time.
+Specifying both will result in an error.
 
 @item shortest
 If set to 1, force the output to terminate when the shortest input
@@ -25683,6 +25812,30 @@ __kernel void blend_images(__write_only image2d_t dst,
 
 @end itemize
 
+@section remap_opencl
+
+Remap pixels using 2nd: Xmap and 3rd: Ymap input video stream.
+
+Destination pixel at position (X, Y) will be picked from source (x, y) position
+where x = Xmap(X, Y) and y = Ymap(X, Y). If mapping values are out of range, zero
+value for pixel will be used for destination pixel.
+
+Xmap and Ymap input video streams must be of same dimensions. Output video stream
+will have Xmap/Ymap video stream dimensions.
+Xmap and Ymap input video streams are 32bit float pixel format, single channel.
+
+@table @option
+@item interp
+Specify interpolation used for remapping of pixels.
+Allowed values are @code{near} and @code{linear}.
+Default value is @code{linear}.
+
+@item fill
+Specify the color of the unmapped pixels. For the syntax of this option,
+check the @ref{color syntax,,"Color" section in the ffmpeg-utils
+manual,ffmpeg-utils}. Default color is @code{black}.
+@end table
+
 @section roberts_opencl
 Apply the Roberts cross operator (@url{https://en.wikipedia.org/wiki/Roberts_cross}) to input video stream.
 
@@ -26030,9 +26183,9 @@ Set the height of the overlaid video on the main video.
 Default value is the height of input overlay video.
 
 @item alpha
-Set blocking detection thresholds. Allowed range is 0.0 to 1.0, it
-requires an input video with alpha channel.
-Default value is @code{0.0}.
+Set transparency of overlaid video. Allowed range is 0.0 to 1.0.
+Higher value means lower transparency.
+Default value is @code{1.0}.
 
 @end table
 
@@ -26348,6 +26501,90 @@ need for a nullsrc video source.
 @end itemize
 
 
+@section ddagrab
+
+Captures the Windows Desktop via Desktop Duplication API.
+
+The filter exclusively returns D3D11 Hardware Frames, for on-gpu encoding
+or processing. So an explicit @ref{hwdownload} is needed for any kind of
+software processing.
+
+It accepts the following options:
+
+@table @option
+@item output_idx
+DXGI Output Index to capture.
+
+Usually corresponds to the index Windows has given the screen minus one,
+so it's starting at 0.
+
+Defaults to output 0.
+
+@item draw_mouse
+Whether to draw the mouse cursor.
+
+Defaults to true.
+
+Only affects hardware cursors. If a game or application renders its own cursor,
+it'll always be captured.
+
+@item framerate
+Framerate at which the desktop will be captured.
+
+Defaults to 30 FPS.
+
+@item video_size
+Specify the size of the captured video.
+
+Defaults to the full size of the screen.
+
+Cropped from the bottom/right if smaller than screen size.
+
+@item offset_x
+Horizontal offset of the captured video.
+
+@item offset_y
+Vertical offset of the captured video.
+
+@item output_fmt
+Desired filter output format.
+Defaults to 8 Bit BGRA.
+
+It accepts the following values:
+@table @samp
+@item auto
+Passes all supported output formats to DDA and returns what DDA decides to use.
+@item 8bit
+@item bgra
+8 Bit formats always work, and DDA will convert to them if neccesary.
+@item 10bit
+@item x2bgr10
+Filter initialization will fail if 10 bit format is requested but unavailable.
+@end table
+
+@end table
+
+@subsection Examples
+
+Capture primary screen and encode using nvenc:
+@example
+ffmpeg -f lavfi -i ddagrab -c:v h264_nvenc -cq 18 output.mp4
+@end example
+
+You can also skip the lavfi device and directly use the filter.
+Also demonstrates downloading the frame and encoding with libx264.
+Explicit output format specification is required in this case:
+@example
+ffmpeg -filter_complex ddagrab=output_idx=1:framerate=60,hwdownload,format=bgra -c:v libx264 -crf 18 output.mp4
+@end example
+
+If you want to capture only a subsection of the desktop, this can be achieved
+by specifying a smaller size and its offsets into the screen:
+@example
+ddagrab=video_size=800x600:offset_x=100:offset_y=100
+@end example
+
+
 @section gradients
 Generate several gradients.
 
index 987a2f8..86ec6d6 100644 (file)
@@ -749,6 +749,8 @@ following image formats are supported:
     @tab OpenEXR
 @item FITS         @tab X @tab X
     @tab Flexible Image Transport System
+@item HDR          @tab X @tab X
+    @tab Radiance HDR RGBE Image format
 @item IMG          @tab   @tab X
     @tab GEM Raster image
 @item JPEG         @tab X @tab X
@@ -775,6 +777,8 @@ following image formats are supported:
     @tab PGM with U and V components in YUV 4:2:0
 @item PGX          @tab   @tab X
     @tab PGX file decoder
+@item PHM          @tab X @tab X
+    @tab Portable HalfFloatMap image
 @item PIC          @tab @tab X
     @tab Pictor/PC Paint
 @item PNG          @tab X @tab X
@@ -797,6 +801,8 @@ following image formats are supported:
     @tab Targa (.TGA) image format
 @item VBN  @tab X @tab X
     @tab Vizrt Binary Image format
+@item WBMP         @tab X @tab X
+    @tab Wireless Application Protocol Bitmap image format
 @item WebP         @tab E @tab X
     @tab WebP image format, encoding supported through external library libwebp
 @item XBM  @tab X @tab X
index b6cafaa..b2f4326 100644 (file)
@@ -1060,6 +1060,8 @@ and remove the @code{#EXT-X-ENDLIST} from the old segment list.
 @item round_durations
 Round the duration info in the playlist file segment info to integer
 values, instead of using floating point.
+If there are no other features requiring higher HLS versions be used,
+then this will allow ffmpeg to output a HLS version 2 m3u8.
 
 @item discont_start
 Add the @code{#EXT-X-DISCONTINUITY} tag to the playlist, before the
index 81ad6c4..ff70b11 100644 (file)
@@ -10,13 +10,20 @@ ALLAVPROGS   = $(AVBASENAMES:%=%$(PROGSSUF)$(EXESUF))
 ALLAVPROGS_G = $(AVBASENAMES:%=%$(PROGSSUF)_g$(EXESUF))
 
 OBJS-ffmpeg +=                  \
+    fftools/ffmpeg_demux.o      \
     fftools/ffmpeg_filter.o     \
     fftools/ffmpeg_hw.o         \
     fftools/ffmpeg_mux.o        \
     fftools/ffmpeg_opt.o        \
+    fftools/objpool.o           \
+    fftools/sync_queue.o        \
+    fftools/thread_queue.o      \
 
 define DOFFTOOL
 OBJS-$(1) += fftools/cmdutils.o fftools/opt_common.o fftools/$(1).o $(OBJS-$(1)-yes)
+ifdef HAVE_GNU_WINDRES
+OBJS-$(1) += fftools/fftoolsres.o
+endif
 $(1)$(PROGSSUF)_g$(EXESUF): $$(OBJS-$(1))
 $$(OBJS-$(1)): | fftools
 $$(OBJS-$(1)): CFLAGS  += $(CFLAGS-$(1))
index 69a6f54..18e768b 100644 (file)
@@ -822,8 +822,8 @@ FILE *get_preset_file(char *filename, size_t filename_size,
 #endif
     char *env_home = getenv_utf8("HOME");
     char *env_ffmpeg_datadir = getenv_utf8("FFMPEG_DATADIR");
-    const char *base[3] = { env_home,
-                            env_ffmpeg_datadir,
+    const char *base[3] = { env_ffmpeg_datadir,
+                            env_home,   /* index=1(HOME) is special: search in a .ffmpeg subfolder */
                             FFMPEG_DATADIR, };
 
     if (is_path) {
index e7384f0..ef7177f 100644 (file)
 
 #include "ffmpeg.h"
 #include "cmdutils.h"
+#include "sync_queue.h"
 
 #include "libavutil/avassert.h"
 
@@ -137,13 +138,9 @@ static int64_t nb_frames_drop = 0;
 static int64_t decode_error_stat[2];
 unsigned nb_output_dumped = 0;
 
-int want_sdp = 1;
-
 static BenchmarkTimeStamps current_time;
 AVIOContext *progress_avio = NULL;
 
-static uint8_t *subtitle_out;
-
 InputStream **input_streams = NULL;
 int        nb_input_streams = 0;
 InputFile   **input_files   = NULL;
@@ -164,10 +161,6 @@ static struct termios oldtty;
 static int restore_tty;
 #endif
 
-#if HAVE_THREADS
-static void free_input_threads(void);
-#endif
-
 /* sub2video hack:
    Convert subtitles to video with alpha to insert them in filter graphs.
    This is a temporary solution until libavfilter gets real subtitles support.
@@ -264,7 +257,7 @@ void sub2video_update(InputStream *ist, int64_t heartbeat_pts, AVSubtitle *sub)
         num_rects = 0;
     }
     if (sub2video_get_blank_frame(ist) < 0) {
-        av_log(ist->dec_ctx, AV_LOG_ERROR,
+        av_log(NULL, AV_LOG_ERROR,
                "Impossible to get a blank canvas.\n");
         return;
     }
@@ -560,8 +553,6 @@ static void ffmpeg_cleanup(int ret)
     }
     av_freep(&filtergraphs);
 
-    av_freep(&subtitle_out);
-
     /* close files */
     for (i = 0; i < nb_output_files; i++)
         of_close(&output_files[i]);
@@ -575,6 +566,7 @@ static void ffmpeg_cleanup(int ret)
         av_bsf_free(&ost->bsf_ctx);
 
         av_frame_free(&ost->filtered_frame);
+        av_frame_free(&ost->sq_frame);
         av_frame_free(&ost->last_frame);
         av_packet_free(&ost->pkt);
         av_dict_free(&ost->encoder_opts);
@@ -584,30 +576,23 @@ static void ffmpeg_cleanup(int ret)
         av_freep(&ost->avfilter);
         av_freep(&ost->logfile_prefix);
 
+#if FFMPEG_OPT_MAP_CHANNEL
         av_freep(&ost->audio_channels_map);
         ost->audio_channels_mapped = 0;
+#endif
 
         av_dict_free(&ost->sws_dict);
         av_dict_free(&ost->swr_opts);
 
+        if (ost->enc_ctx)
+            av_freep(&ost->enc_ctx->stats_in);
         avcodec_free_context(&ost->enc_ctx);
-        avcodec_parameters_free(&ost->ref_par);
-
-        if (ost->muxing_queue) {
-            AVPacket *pkt;
-            while (av_fifo_read(ost->muxing_queue, &pkt, 1) >= 0)
-                av_packet_free(&pkt);
-            av_fifo_freep2(&ost->muxing_queue);
-        }
 
         av_freep(&output_streams[i]);
     }
-#if HAVE_THREADS
     free_input_threads();
-#endif
     for (i = 0; i < nb_input_files; i++) {
         avformat_close_input(&input_files[i]->ctx);
-        av_packet_free(&input_files[i]->pkt);
         av_freep(&input_files[i]);
     }
     for (i = 0; i < nb_input_streams; i++) {
@@ -623,6 +608,7 @@ static void ffmpeg_cleanup(int ret)
         av_freep(&ist->dts_buffer);
 
         avcodec_free_context(&ist->dec_ctx);
+        avcodec_parameters_free(&ist->par);
 
         av_freep(&input_streams[i]);
     }
@@ -702,13 +688,10 @@ static void update_benchmark(const char *fmt, ...)
 static void close_output_stream(OutputStream *ost)
 {
     OutputFile *of = output_files[ost->file_index];
-    AVRational time_base = ost->stream_copy ? ost->mux_timebase : ost->enc_ctx->time_base;
-
     ost->finished |= ENCODER_FINISHED;
-    if (of->shortest) {
-        int64_t end = av_rescale_q(ost->sync_opts - ost->first_pts, time_base, AV_TIME_BASE_Q);
-        of->recording_time = FFMIN(of->recording_time, end);
-    }
+
+    if (ost->sq_idx_encode >= 0)
+        sq_send(of->sq_encode, ost->sq_idx_encode, SQFRAME(NULL));
 }
 
 /*
@@ -725,27 +708,54 @@ static void close_output_stream(OutputStream *ost)
 static void output_packet(OutputFile *of, AVPacket *pkt,
                           OutputStream *ost, int eof)
 {
+    const char *err_msg;
     int ret = 0;
 
+    if (!eof && pkt->dts != AV_NOPTS_VALUE)
+        ost->last_mux_dts = av_rescale_q(pkt->dts, ost->mux_timebase, AV_TIME_BASE_Q);
+
     /* apply the output bitstream filters */
     if (ost->bsf_ctx) {
+        int bsf_eof = 0;
+
         ret = av_bsf_send_packet(ost->bsf_ctx, eof ? NULL : pkt);
+        if (ret < 0) {
+            err_msg = "submitting a packet for bitstream filtering";
+            goto fail;
+        }
+
+        while (!bsf_eof) {
+            ret = av_bsf_receive_packet(ost->bsf_ctx, pkt);
+            if (ret == AVERROR(EAGAIN))
+                return;
+            else if (ret == AVERROR_EOF)
+                bsf_eof = 1;
+            else if (ret < 0) {
+                err_msg = "applying bitstream filters to a packet";
+                goto fail;
+            }
+
+            ret = of_submit_packet(of, bsf_eof ? NULL : pkt, ost);
+            if (ret < 0)
+                goto mux_fail;
+        }
+    } else {
+        ret = of_submit_packet(of, eof ? NULL : pkt, ost);
         if (ret < 0)
-            goto finish;
-        while ((ret = av_bsf_receive_packet(ost->bsf_ctx, pkt)) >= 0)
-            of_write_packet(of, pkt, ost, 0);
-        if (ret == AVERROR(EAGAIN))
-            ret = 0;
-    } else if (!eof)
-        of_write_packet(of, pkt, ost, 0);
-
-finish:
-    if (ret < 0 && ret != AVERROR_EOF) {
-        av_log(NULL, AV_LOG_ERROR, "Error applying bitstream filters to an output "
-               "packet for stream #%d:%d.\n", ost->file_index, ost->index);
-        if(exit_on_error)
-            exit_program(1);
+            goto mux_fail;
     }
+
+    return;
+
+mux_fail:
+    err_msg = "submitting a packet to the muxer";
+
+fail:
+    av_log(NULL, AV_LOG_ERROR, "Error %s for output stream #%d:%d.\n",
+           err_msg, ost->file_index, ost->index);
+    if (exit_on_error)
+        exit_program(1);
+
 }
 
 static int check_recording_time(OutputStream *ost)
@@ -765,7 +775,9 @@ static double adjust_frame_pts_to_encoder_tb(OutputFile *of, OutputStream *ost,
                                              AVFrame *frame)
 {
     double float_pts = AV_NOPTS_VALUE; // this is identical to frame.pts but with higher precision
+    int64_t orig_pts = AV_NOPTS_VALUE;
     AVCodecContext *enc = ost->enc_ctx;
+    AVRational filter_tb = (AVRational){ -1, -1 };
     if (!frame || frame->pts == AV_NOPTS_VALUE ||
         !enc || !ost->filter || !ost->filter->graph->graph)
         goto early_exit;
@@ -774,9 +786,10 @@ static double adjust_frame_pts_to_encoder_tb(OutputFile *of, OutputStream *ost,
         AVFilterContext *filter = ost->filter->filter;
 
         int64_t start_time = (of->start_time == AV_NOPTS_VALUE) ? 0 : of->start_time;
-        AVRational filter_tb = av_buffersink_get_time_base(filter);
         AVRational tb = enc->time_base;
         int extra_bits = av_clip(29 - av_log2(tb.den), 0, 16);
+        filter_tb = av_buffersink_get_time_base(filter);
+        orig_pts = frame->pts;
 
         tb.den <<= extra_bits;
         float_pts =
@@ -794,9 +807,14 @@ static double adjust_frame_pts_to_encoder_tb(OutputFile *of, OutputStream *ost,
 early_exit:
 
     if (debug_ts) {
+        av_log(NULL, AV_LOG_INFO, "filter_raw -> pts:%s pts_time:%s time_base:%d/%d\n",
+               frame ? av_ts2str(orig_pts) : "NULL",
+               frame ? av_ts2timestr(orig_pts, &filter_tb) : "NULL",
+               filter_tb.num, filter_tb.den);
+
         av_log(NULL, AV_LOG_INFO, "filter -> pts:%s pts_time:%s exact:%f time_base:%d/%d\n",
                frame ? av_ts2str(frame->pts) : "NULL",
-               frame ? av_ts2timestr(frame->pts, &enc->time_base) : "NULL",
+               (enc && frame) ? av_ts2timestr(frame->pts, &enc->time_base) : "NULL",
                float_pts,
                enc ? enc->time_base.num : -1,
                enc ? enc->time_base.den : -1);
@@ -899,6 +917,7 @@ static int encode_frame(OutputFile *of, OutputStream *ost, AVFrame *frame)
 
     if (frame) {
         ost->frames_encoded++;
+        ost->samples_encoded += frame->nb_samples;
 
         if (debug_ts) {
             av_log(NULL, AV_LOG_INFO, "encoder <- type:%s "
@@ -971,6 +990,52 @@ static int encode_frame(OutputFile *of, OutputStream *ost, AVFrame *frame)
     av_assert0(0);
 }
 
+static int submit_encode_frame(OutputFile *of, OutputStream *ost,
+                               AVFrame *frame)
+{
+    int ret;
+
+    if (ost->sq_idx_encode < 0)
+        return encode_frame(of, ost, frame);
+
+    if (frame) {
+        ret = av_frame_ref(ost->sq_frame, frame);
+        if (ret < 0)
+            return ret;
+        frame = ost->sq_frame;
+    }
+
+    ret = sq_send(of->sq_encode, ost->sq_idx_encode,
+                  SQFRAME(frame));
+    if (ret < 0) {
+        if (frame)
+            av_frame_unref(frame);
+        if (ret != AVERROR_EOF)
+            return ret;
+    }
+
+    while (1) {
+        AVFrame *enc_frame = ost->sq_frame;
+
+        ret = sq_receive(of->sq_encode, ost->sq_idx_encode,
+                               SQFRAME(enc_frame));
+        if (ret == AVERROR_EOF) {
+            enc_frame = NULL;
+        } else if (ret < 0) {
+            return (ret == AVERROR(EAGAIN)) ? 0 : ret;
+        }
+
+        ret = encode_frame(of, ost, enc_frame);
+        if (enc_frame)
+            av_frame_unref(enc_frame);
+        if (ret < 0) {
+            if (ret == AVERROR_EOF)
+                close_output_stream(ost);
+            return ret;
+        }
+    }
+}
+
 static void do_audio_out(OutputFile *of, OutputStream *ost,
                          AVFrame *frame)
 {
@@ -984,10 +1049,9 @@ static void do_audio_out(OutputFile *of, OutputStream *ost,
     if (frame->pts == AV_NOPTS_VALUE || audio_sync_method < 0)
         frame->pts = ost->sync_opts;
     ost->sync_opts = frame->pts + frame->nb_samples;
-    ost->samples_encoded += frame->nb_samples;
 
-    ret = encode_frame(of, ost, frame);
-    if (ret < 0)
+    ret = submit_encode_frame(of, ost, frame);
+    if (ret < 0 && ret != AVERROR_EOF)
         exit_program(1);
 }
 
@@ -996,7 +1060,7 @@ static void do_subtitle_out(OutputFile *of,
                             AVSubtitle *sub)
 {
     int subtitle_out_max_size = 1024 * 1024;
-    int subtitle_out_size, nb, i;
+    int subtitle_out_size, nb, i, ret;
     AVCodecContext *enc;
     AVPacket *pkt = ost->pkt;
     int64_t pts;
@@ -1010,14 +1074,6 @@ static void do_subtitle_out(OutputFile *of,
 
     enc = ost->enc_ctx;
 
-    if (!subtitle_out) {
-        subtitle_out = av_malloc(subtitle_out_max_size);
-        if (!subtitle_out) {
-            av_log(NULL, AV_LOG_FATAL, "Failed to allocate subtitle_out\n");
-            exit_program(1);
-        }
-    }
-
     /* Note: DVB subtitle need one packet to draw them and one other
        packet to clear them */
     /* XXX: signal it in the codec context ? */
@@ -1037,6 +1093,12 @@ static void do_subtitle_out(OutputFile *of,
         if (!check_recording_time(ost))
             return;
 
+        ret = av_new_packet(pkt, subtitle_out_max_size);
+        if (ret < 0) {
+            av_log(NULL, AV_LOG_FATAL, "Failed to allocate subtitle encode buffer\n");
+            exit_program(1);
+        }
+
         sub->pts = pts;
         // start_display_time is required to be 0
         sub->pts               += av_rescale_q(sub->start_display_time, (AVRational){ 1, 1000 }, AV_TIME_BASE_Q);
@@ -1047,8 +1109,7 @@ static void do_subtitle_out(OutputFile *of,
 
         ost->frames_encoded++;
 
-        subtitle_out_size = avcodec_encode_subtitle(enc, subtitle_out,
-                                                    subtitle_out_max_size, sub);
+        subtitle_out_size = avcodec_encode_subtitle(enc, pkt->data, pkt->size, sub);
         if (i == 1)
             sub->num_rects = save_num_rects;
         if (subtitle_out_size < 0) {
@@ -1056,9 +1117,7 @@ static void do_subtitle_out(OutputFile *of,
             exit_program(1);
         }
 
-        av_packet_unref(pkt);
-        pkt->data = subtitle_out;
-        pkt->size = subtitle_out_size;
+        av_shrink_packet(pkt, subtitle_out_size);
         pkt->pts  = av_rescale_q(sub->pts, AV_TIME_BASE_Q, ost->mux_timebase);
         pkt->duration = av_rescale_q(sub->end_display_time, (AVRational){ 1, 1000 }, ost->mux_timebase);
         if (enc->codec_id == AV_CODEC_ID_DVB_SUBTITLE) {
@@ -1107,8 +1166,8 @@ static void do_video_out(OutputFile *of,
         (nb_filtergraphs == 0 || !filtergraphs[0]->graph_desc) &&
         next_picture &&
         ist &&
-        lrintf(next_picture->pkt_duration * av_q2d(ist->st->time_base) / av_q2d(enc->time_base)) > 0) {
-        duration = lrintf(next_picture->pkt_duration * av_q2d(ist->st->time_base) / av_q2d(enc->time_base));
+        lrintf(next_picture->duration * av_q2d(ist->st->time_base) / av_q2d(enc->time_base)) > 0) {
+        duration = lrintf(next_picture->duration * av_q2d(ist->st->time_base) / av_q2d(enc->time_base));
     }
 
     if (!next_picture) {
@@ -1139,7 +1198,7 @@ static void do_video_out(OutputFile *of,
 
         switch (ost->vsync_method) {
         case VSYNC_VSCFR:
-            if (ost->frame_number == 0 && delta0 >= 0.5) {
+            if (ost->vsync_frame_number == 0 && delta0 >= 0.5) {
                 av_log(NULL, AV_LOG_DEBUG, "Not duplicating %d initial frames\n", (int)lrintf(delta0));
                 delta = duration;
                 delta0 = 0;
@@ -1147,7 +1206,7 @@ static void do_video_out(OutputFile *of,
             }
         case VSYNC_CFR:
             // FIXME set to 0.5 after we fix some dts/pts bugs like in avidec.c
-            if (frame_drop_threshold && delta < frame_drop_threshold && ost->frame_number) {
+            if (frame_drop_threshold && delta < frame_drop_threshold && ost->vsync_frame_number) {
                 nb_frames = 0;
             } else if (delta < -1.1)
                 nb_frames = 0;
@@ -1156,15 +1215,18 @@ static void do_video_out(OutputFile *of,
                 if (delta0 > 1.1)
                     nb0_frames = llrintf(delta0 - 0.6);
             }
+            next_picture->duration = 1;
             break;
         case VSYNC_VFR:
             if (delta <= -0.6)
                 nb_frames = 0;
             else if (delta > 0.6)
                 ost->sync_opts = llrint(sync_ipts);
+            next_picture->duration = duration;
             break;
         case VSYNC_DROP:
         case VSYNC_PASSTHROUGH:
+            next_picture->duration = duration;
             ost->sync_opts = llrint(sync_ipts);
             break;
         default:
@@ -1177,7 +1239,7 @@ static void do_video_out(OutputFile *of,
      * But there may be reordering, so we can't throw away frames on encoder
      * flush, we need to limit them here, before they go into encoder.
      */
-    nb_frames = FFMIN(nb_frames, ost->max_frames - ost->frame_number);
+    nb_frames = FFMIN(nb_frames, ost->max_frames - ost->vsync_frame_number);
     nb0_frames = FFMIN(nb0_frames, nb_frames);
 
     memmove(ost->last_nb0_frames + 1,
@@ -1189,7 +1251,7 @@ static void do_video_out(OutputFile *of,
         nb_frames_drop++;
         av_log(NULL, AV_LOG_VERBOSE,
                "*** dropping frame %"PRId64" from stream %d at ts %"PRId64"\n",
-               ost->frame_number, ost->st->index, ost->last_frame->pts);
+               ost->vsync_frame_number, ost->st->index, ost->last_frame->pts);
     }
     if (nb_frames > (nb0_frames && ost->last_dropped) + (nb_frames > nb0_frames)) {
         if (nb_frames > dts_error_threshold * 30) {
@@ -1278,12 +1340,12 @@ static void do_video_out(OutputFile *of,
             av_log(NULL, AV_LOG_DEBUG, "Forced keyframe at time %f\n", pts_time);
         }
 
-        ret = encode_frame(of, ost, in_picture);
-        if (ret < 0)
+        ret = submit_encode_frame(of, ost, in_picture);
+        if (ret < 0 && ret != AVERROR_EOF)
             exit_program(1);
 
         ost->sync_opts++;
-        ost->frame_number++;
+        ost->vsync_frame_number++;
     }
 
     av_frame_unref(ost->last_frame);
@@ -1291,19 +1353,6 @@ static void do_video_out(OutputFile *of,
         av_frame_move_ref(ost->last_frame, next_picture);
 }
 
-static void finish_output_stream(OutputStream *ost)
-{
-    OutputFile *of = output_files[ost->file_index];
-    AVRational time_base = ost->stream_copy ? ost->mux_timebase : ost->enc_ctx->time_base;
-
-    ost->finished = ENCODER_FINISHED | MUXER_FINISHED;
-
-    if (of->shortest) {
-        int64_t end = av_rescale_q(ost->sync_opts - ost->first_pts, time_base, AV_TIME_BASE_Q);
-        of->recording_time = FFMIN(of->recording_time, end);
-    }
-}
-
 /**
  * Get and encode new output from any of the filtergraphs, without causing
  * activity.
@@ -1359,6 +1408,12 @@ static int reap_filters(int flush)
                 continue;
             }
 
+            if (filtered_frame->pts != AV_NOPTS_VALUE) {
+                AVRational tb = av_buffersink_get_time_base(filter);
+                ost->last_filter_pts = av_rescale_q(filtered_frame->pts, tb,
+                                                    AV_TIME_BASE_Q);
+            }
+
             switch (av_buffersink_get_type(filter)) {
             case AVMEDIA_TYPE_VIDEO:
                 if (!ost->frame_aspect_ratio.num)
@@ -1398,15 +1453,17 @@ static void print_final_stats(int64_t total_size)
 
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream *ost = output_streams[i];
-        switch (ost->enc_ctx->codec_type) {
+        AVCodecParameters *par = ost->st->codecpar;
+        switch (par->codec_type) {
             case AVMEDIA_TYPE_VIDEO: video_size += ost->data_size; break;
             case AVMEDIA_TYPE_AUDIO: audio_size += ost->data_size; break;
             case AVMEDIA_TYPE_SUBTITLE: subtitle_size += ost->data_size; break;
             default:                 other_size += ost->data_size; break;
         }
-        extra_size += ost->enc_ctx->extradata_size;
+        extra_size += par->extradata_size;
         data_size  += ost->data_size;
-        if (   (ost->enc_ctx->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2))
+        if (ost->enc_ctx &&
+            (ost->enc_ctx->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2))
             != AV_CODEC_FLAG_PASS1)
             pass1_used = 0;
     }
@@ -1436,7 +1493,7 @@ static void print_final_stats(int64_t total_size)
 
         for (j = 0; j < f->nb_streams; j++) {
             InputStream *ist = input_streams[f->ist_index + j];
-            enum AVMediaType type = ist->dec_ctx->codec_type;
+            enum AVMediaType type = ist->par->codec_type;
 
             total_size    += ist->data_size;
             total_packets += ist->nb_packets;
@@ -1466,18 +1523,18 @@ static void print_final_stats(int64_t total_size)
         uint64_t total_packets = 0, total_size = 0;
 
         av_log(NULL, AV_LOG_VERBOSE, "Output file #%d (%s):\n",
-               i, of->ctx->url);
+               i, of->url);
 
-        for (j = 0; j < of->ctx->nb_streams; j++) {
+        for (j = 0; j < of->nb_streams; j++) {
             OutputStream *ost = output_streams[of->ost_index + j];
-            enum AVMediaType type = ost->enc_ctx->codec_type;
+            enum AVMediaType type = ost->st->codecpar->codec_type;
 
             total_size    += ost->data_size;
-            total_packets += ost->packets_written;
+            total_packets += atomic_load(&ost->packets_written);
 
             av_log(NULL, AV_LOG_VERBOSE, "  Output stream #%d:%d (%s): ",
                    i, j, av_get_media_type_string(type));
-            if (ost->encoding_needed) {
+            if (ost->enc_ctx) {
                 av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" frames encoded",
                        ost->frames_encoded);
                 if (type == AVMEDIA_TYPE_AUDIO)
@@ -1486,7 +1543,7 @@ static void print_final_stats(int64_t total_size)
             }
 
             av_log(NULL, AV_LOG_VERBOSE, "%"PRIu64" packets muxed (%"PRIu64" bytes); ",
-                   ost->packets_written, ost->data_size);
+                   atomic_load(&ost->packets_written), ost->data_size);
 
             av_log(NULL, AV_LOG_VERBOSE, "\n");
         }
@@ -1507,10 +1564,7 @@ static void print_final_stats(int64_t total_size)
 static void print_report(int is_last_report, int64_t timer_start, int64_t cur_time)
 {
     AVBPrint buf, buf_script;
-    OutputStream *ost;
-    AVFormatContext *oc;
-    int64_t total_size;
-    AVCodecContext *enc;
+    int64_t total_size = of_filesize(output_files[0]);
     int vid, i;
     double bitrate;
     double speed;
@@ -1538,31 +1592,22 @@ static void print_report(int is_last_report, int64_t timer_start, int64_t cur_ti
 
     t = (cur_time-timer_start) / 1000000.0;
 
-
-    oc = output_files[0]->ctx;
-
-    total_size = avio_size(oc->pb);
-    if (total_size <= 0) // FIXME improve avio_size() so it works with non seekable output too
-        total_size = avio_tell(oc->pb);
-
     vid = 0;
     av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC);
     av_bprint_init(&buf_script, 0, AV_BPRINT_SIZE_AUTOMATIC);
     for (i = 0; i < nb_output_streams; i++) {
-        float q = -1;
-        ost = output_streams[i];
-        enc = ost->enc_ctx;
-        if (!ost->stream_copy)
-            q = ost->quality / (float) FF_QP2LAMBDA;
+        OutputStream         * const ost = output_streams[i];
+        const AVCodecContext * const enc = ost->enc_ctx;
+        const float q = enc ? ost->quality / (float) FF_QP2LAMBDA : -1;
 
-        if (vid && enc->codec_type == AVMEDIA_TYPE_VIDEO) {
+        if (vid && ost->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
             av_bprintf(&buf, "q=%2.1f ", q);
             av_bprintf(&buf_script, "stream_%d_%d_q=%.1f\n",
                        ost->file_index, ost->index, q);
         }
-        if (!vid && enc->codec_type == AVMEDIA_TYPE_VIDEO) {
+        if (!vid && ost->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
             float fps;
-            int64_t frame_number = ost->frame_number;
+            uint64_t frame_number = atomic_load(&ost->packets_written);
 
             fps = t > 1 ? frame_number / t : 0;
             av_bprintf(&buf, "frame=%5"PRId64" fps=%3.*f q=%3.1f ",
@@ -1582,7 +1627,8 @@ static void print_report(int is_last_report, int64_t timer_start, int64_t cur_ti
                     av_bprintf(&buf, "%X", av_log2(qp_histogram[j] + 1));
             }
 
-            if ((enc->flags & AV_CODEC_FLAG_PSNR) && (ost->pict_type != AV_PICTURE_TYPE_NONE || is_last_report)) {
+            if (enc && (enc->flags & AV_CODEC_FLAG_PSNR) &&
+                (ost->pict_type != AV_PICTURE_TYPE_NONE || is_last_report)) {
                 int j;
                 double error, error_sum = 0;
                 double scale, scale_sum = 0;
@@ -1614,9 +1660,8 @@ static void print_report(int is_last_report, int64_t timer_start, int64_t cur_ti
             vid = 1;
         }
         /* compute min output value */
-        if (av_stream_get_end_pts(ost->st) != AV_NOPTS_VALUE) {
-            pts = FFMAX(pts, av_rescale_q(av_stream_get_end_pts(ost->st),
-                                          ost->st->time_base, AV_TIME_BASE_Q));
+        if (ost->last_mux_dts != AV_NOPTS_VALUE) {
+            pts = FFMAX(pts, ost->last_mux_dts);
             if (copy_ts) {
                 if (copy_ts_first_pts == AV_NOPTS_VALUE && pts > 1)
                     copy_ts_first_pts = pts;
@@ -1736,12 +1781,19 @@ static void flush_encoders(void)
 {
     int i, ret;
 
+    for (i = 0; i < nb_output_streams; i++) {
+        OutputStream   *ost = output_streams[i];
+        OutputFile      *of = output_files[ost->file_index];
+        if (ost->sq_idx_encode >= 0)
+            sq_send(of->sq_encode, ost->sq_idx_encode, SQFRAME(NULL));
+    }
+
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream   *ost = output_streams[i];
         AVCodecContext *enc = ost->enc_ctx;
         OutputFile      *of = output_files[ost->file_index];
 
-        if (!ost->encoding_needed)
+        if (!enc)
             continue;
 
         // Try to enable encoding with no input frames.
@@ -1758,7 +1810,7 @@ static void flush_encoders(void)
                 for (x = 0; x < fg->nb_inputs; x++) {
                     InputFilter *ifilter = fg->inputs[x];
                     if (ifilter->format < 0 &&
-                        ifilter_parameters_from_codecpar(ifilter, ifilter->ist->st->codecpar) < 0) {
+                        ifilter_parameters_from_codecpar(ifilter, ifilter->ist->par) < 0) {
                         av_log(NULL, AV_LOG_ERROR, "Error copying paramerets from input stream\n");
                         exit_program(1);
                     }
@@ -1773,7 +1825,7 @@ static void flush_encoders(void)
                     exit_program(1);
                 }
 
-                finish_output_stream(ost);
+                output_packet(of, ost->pkt, ost, 1);
             }
 
             init_output_stream_wrapper(ost, NULL, 1);
@@ -1782,7 +1834,7 @@ static void flush_encoders(void)
         if (enc->codec_type != AVMEDIA_TYPE_VIDEO && enc->codec_type != AVMEDIA_TYPE_AUDIO)
             continue;
 
-        ret = encode_frame(of, ost, NULL);
+        ret = submit_encode_frame(of, ost, NULL);
         if (ret != AVERROR_EOF)
             exit_program(1);
     }
@@ -1828,12 +1880,9 @@ static void do_streamcopy(InputStream *ist, OutputStream *ost, const AVPacket *p
         return;
 
     if (!ost->streamcopy_started && !ost->copy_prior_start) {
-        int64_t comp_start = start_time;
-        if (copy_ts && f->start_time != AV_NOPTS_VALUE)
-            comp_start = FFMAX(start_time, f->start_time + f->ts_offset);
         if (pkt->pts == AV_NOPTS_VALUE ?
-            ist->pts < comp_start :
-            pkt->pts < av_rescale_q(comp_start, AV_TIME_BASE_Q, ist->st->time_base))
+            ist->pts < ost->ts_copy_start :
+            pkt->pts < av_rescale_q(ost->ts_copy_start, AV_TIME_BASE_Q, ist->st->time_base))
             return;
     }
 
@@ -1864,11 +1913,11 @@ static void do_streamcopy(InputStream *ist, OutputStream *ost, const AVPacket *p
     if (pkt->dts == AV_NOPTS_VALUE) {
         opkt->dts = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ost->mux_timebase);
     } else if (ost->st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
-        int duration = av_get_audio_frame_duration(ist->dec_ctx, pkt->size);
+        int duration = av_get_audio_frame_duration2(ist->par, pkt->size);
         if(!duration)
-            duration = ist->dec_ctx->frame_size;
+            duration = ist->par->frame_size;
         opkt->dts = av_rescale_delta(ist->st->time_base, pkt->dts,
-                                    (AVRational){1, ist->dec_ctx->sample_rate}, duration,
+                                    (AVRational){1, ist->par->sample_rate}, duration,
                                     &ist->filter_in_rescale_delta_last, ost->mux_timebase);
         /* dts will be set immediately afterwards to what pts is now */
         opkt->pts = opkt->dts - ost_tb_start_time;
@@ -1885,25 +1934,6 @@ static void do_streamcopy(InputStream *ist, OutputStream *ost, const AVPacket *p
     ost->streamcopy_started = 1;
 }
 
-int guess_input_channel_layout(InputStream *ist)
-{
-    AVCodecContext *dec = ist->dec_ctx;
-
-    if (dec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC) {
-        char layout_name[256];
-
-        if (dec->ch_layout.nb_channels > ist->guess_layout_max)
-            return 0;
-        av_channel_layout_default(&dec->ch_layout, dec->ch_layout.nb_channels);
-        if (dec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC)
-            return 0;
-        av_channel_layout_describe(&dec->ch_layout, layout_name, sizeof(layout_name));
-        av_log(NULL, AV_LOG_WARNING, "Guessed Channel Layout for Input Stream "
-               "#%d.%d : %s\n", ist->file_index, ist->st->index, layout_name);
-    }
-    return 1;
-}
-
 static void check_decode_result(InputStream *ist, int *got_output, int ret)
 {
     if (*got_output || ret<0)
@@ -1947,7 +1977,7 @@ static int ifilter_send_frame(InputFilter *ifilter, AVFrame *frame, int keep_ref
     /* determine if the parameters for this input changed */
     need_reinit = ifilter->format != frame->format;
 
-    switch (ifilter->ist->st->codecpar->codec_type) {
+    switch (ifilter->ist->par->codec_type) {
     case AVMEDIA_TYPE_AUDIO:
         need_reinit |= ifilter->sample_rate    != frame->sample_rate ||
                        av_channel_layout_compare(&ifilter->ch_layout, &frame->ch_layout);
@@ -2027,7 +2057,7 @@ static int ifilter_send_eof(InputFilter *ifilter, int64_t pts)
     } else {
         // the filtergraph was never configured
         if (ifilter->format < 0) {
-            ret = ifilter_parameters_from_codecpar(ifilter, ifilter->ist->st->codecpar);
+            ret = ifilter_parameters_from_codecpar(ifilter, ifilter->ist->par);
             if (ret < 0)
                 return ret;
         }
@@ -2183,9 +2213,9 @@ static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output, int64_
 
     // The following line may be required in some cases where there is no parser
     // or the parser does not has_b_frames correctly
-    if (ist->st->codecpar->video_delay < ist->dec_ctx->has_b_frames) {
+    if (ist->par->video_delay < ist->dec_ctx->has_b_frames) {
         if (ist->dec_ctx->codec_id == AV_CODEC_ID_H264) {
-            ist->st->codecpar->video_delay = ist->dec_ctx->has_b_frames;
+            ist->par->video_delay = ist->dec_ctx->has_b_frames;
         } else
             av_log(ist->dec_ctx, AV_LOG_WARNING,
                    "video_delay is larger in decoder than demuxer %d > %d.\n"
@@ -2193,7 +2223,7 @@ static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output, int64_
                    "of this file to https://streams.videolan.org/upload/ "
                    "and contact the ffmpeg-devel mailing list. (ffmpeg-devel@ffmpeg.org)\n",
                    ist->dec_ctx->has_b_frames,
-                   ist->st->codecpar->video_delay);
+                   ist->par->video_delay);
     }
 
     if (ret != AVERROR_EOF)
@@ -2226,10 +2256,9 @@ static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output, int64_
         if (err < 0)
             goto fail;
     }
-    ist->hwaccel_retrieved_pix_fmt = decoded_frame->format;
 
     best_effort_timestamp= decoded_frame->best_effort_timestamp;
-    *duration_pts = decoded_frame->pkt_duration;
+    *duration_pts = decoded_frame->duration;
 
     if (ist->framerate.num)
         best_effort_timestamp = ist->cfr_next_pts++;
@@ -2293,7 +2322,7 @@ static int transcode_subtitles(InputStream *ist, AVPacket *pkt, int *got_output,
             end = av_rescale(subtitle.pts - ist->prev_sub.subtitle.pts,
                              1000, AV_TIME_BASE);
             if (end < ist->prev_sub.subtitle.end_display_time) {
-                av_log(ist->dec_ctx, AV_LOG_DEBUG,
+                av_log(NULL, AV_LOG_DEBUG,
                        "Subtitle duration reduced from %"PRId32" to %d%s\n",
                        ist->prev_sub.subtitle.end_display_time, end,
                        end <= 0 ? ", dropping it" : "");
@@ -2332,7 +2361,7 @@ static int transcode_subtitles(InputStream *ist, AVPacket *pkt, int *got_output,
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream *ost = output_streams[i];
 
-        if (!check_output_constraints(ist, ost) || !ost->encoding_needed
+        if (!check_output_constraints(ist, ost) || !ost->enc_ctx
             || ost->enc->type != AVMEDIA_TYPE_SUBTITLE)
             continue;
 
@@ -2363,6 +2392,7 @@ static int send_filter_eof(InputStream *ist)
 /* pkt = NULL means EOF (needed to flush decoder buffers) */
 static int process_input_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
 {
+    const AVCodecParameters *par = ist->par;
     int ret = 0, i;
     int repeating = 0;
     int eof_reached = 0;
@@ -2395,7 +2425,7 @@ static int process_input_packet(InputStream *ist, const AVPacket *pkt, int no_eo
 
     if (pkt && pkt->dts != AV_NOPTS_VALUE) {
         ist->next_dts = ist->dts = av_rescale_q(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q);
-        if (ist->dec_ctx->codec_type != AVMEDIA_TYPE_VIDEO || !ist->decoding_needed)
+        if (par->codec_type != AVMEDIA_TYPE_VIDEO || !ist->decoding_needed)
             ist->next_pts = ist->pts = ist->dts;
     }
 
@@ -2409,7 +2439,7 @@ static int process_input_packet(InputStream *ist, const AVPacket *pkt, int no_eo
         ist->pts = ist->next_pts;
         ist->dts = ist->next_dts;
 
-        switch (ist->dec_ctx->codec_type) {
+        switch (par->codec_type) {
         case AVMEDIA_TYPE_AUDIO:
             ret = decode_audio    (ist, repeating ? NULL : avpkt, &got_output,
                                    &decode_failed);
@@ -2506,12 +2536,12 @@ static int process_input_packet(InputStream *ist, const AVPacket *pkt, int no_eo
     /* handle stream copy */
     if (!ist->decoding_needed && pkt) {
         ist->dts = ist->next_dts;
-        switch (ist->dec_ctx->codec_type) {
+        switch (par->codec_type) {
         case AVMEDIA_TYPE_AUDIO:
             av_assert1(pkt->duration >= 0);
-            if (ist->dec_ctx->sample_rate) {
-                ist->next_dts += ((int64_t)AV_TIME_BASE * ist->dec_ctx->frame_size) /
-                                  ist->dec_ctx->sample_rate;
+            if (par->sample_rate) {
+                ist->next_dts += ((int64_t)AV_TIME_BASE * par->frame_size) /
+                                  par->sample_rate;
             } else {
                 ist->next_dts += av_rescale_q(pkt->duration, ist->st->time_base, AV_TIME_BASE_Q);
             }
@@ -2540,7 +2570,8 @@ static int process_input_packet(InputStream *ist, const AVPacket *pkt, int no_eo
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream *ost = output_streams[i];
 
-        if (!check_output_constraints(ist, ost) || ost->encoding_needed)
+        if (!check_output_constraints(ist, ost) || ost->enc_ctx ||
+            (!pkt && no_eof))
             continue;
 
         do_streamcopy(ist, ost, pkt);
@@ -2708,24 +2739,31 @@ static int init_output_stream_streamcopy(OutputStream *ost)
 {
     OutputFile *of = output_files[ost->file_index];
     InputStream *ist = get_input_stream(ost);
-    AVCodecParameters *par_dst = ost->st->codecpar;
-    AVCodecParameters *par_src = ost->ref_par;
+    InputFile *ifile = input_files[ist->file_index];
+    AVCodecParameters *par = ost->st->codecpar;
+    AVCodecContext *codec_ctx;
     AVRational sar;
     int i, ret;
-    uint32_t codec_tag = par_dst->codec_tag;
+    uint32_t codec_tag = par->codec_tag;
 
     av_assert0(ist && !ost->filter);
 
-    ret = avcodec_parameters_to_context(ost->enc_ctx, ist->st->codecpar);
+    codec_ctx = avcodec_alloc_context3(NULL);
+    if (!codec_ctx)
+        return AVERROR(ENOMEM);
+
+    ret = avcodec_parameters_to_context(codec_ctx, ist->par);
     if (ret >= 0)
-        ret = av_opt_set_dict(ost->enc_ctx, &ost->encoder_opts);
+        ret = av_opt_set_dict(codec_ctx, &ost->encoder_opts);
     if (ret < 0) {
         av_log(NULL, AV_LOG_FATAL,
                "Error setting up codec context options.\n");
+        avcodec_free_context(&codec_ctx);
         return ret;
     }
 
-    ret = avcodec_parameters_from_context(par_src, ost->enc_ctx);
+    ret = avcodec_parameters_from_context(par, codec_ctx);
+    avcodec_free_context(&codec_ctx);
     if (ret < 0) {
         av_log(NULL, AV_LOG_FATAL,
                "Error getting reference codec parameters.\n");
@@ -2735,16 +2773,12 @@ static int init_output_stream_streamcopy(OutputStream *ost)
     if (!codec_tag) {
         unsigned int codec_tag_tmp;
         if (!of->format->codec_tag ||
-            av_codec_get_id (of->format->codec_tag, par_src->codec_tag) == par_src->codec_id ||
-            !av_codec_get_tag2(of->format->codec_tag, par_src->codec_id, &codec_tag_tmp))
-            codec_tag = par_src->codec_tag;
+            av_codec_get_id (of->format->codec_tag, par->codec_tag) == par->codec_id ||
+            !av_codec_get_tag2(of->format->codec_tag, par->codec_id, &codec_tag_tmp))
+            codec_tag = par->codec_tag;
     }
 
-    ret = avcodec_parameters_copy(par_dst, par_src);
-    if (ret < 0)
-        return ret;
-
-    par_dst->codec_tag = codec_tag;
+    par->codec_tag = codec_tag;
 
     if (!ost->frame_rate.num)
         ost->frame_rate = ist->framerate;
@@ -2770,6 +2804,15 @@ static int init_output_stream_streamcopy(OutputStream *ost)
     if (ost->st->duration <= 0 && ist->st->duration > 0)
         ost->st->duration = av_rescale_q(ist->st->duration, ist->st->time_base, ost->st->time_base);
 
+    if (!ost->copy_prior_start) {
+        ost->ts_copy_start = (of->start_time == AV_NOPTS_VALUE) ?
+                             0 : of->start_time;
+        if (copy_ts && ifile->start_time != AV_NOPTS_VALUE) {
+            ost->ts_copy_start = FFMAX(ost->ts_copy_start,
+                                       ifile->start_time + ifile->ts_offset);
+        }
+    }
+
     if (ist->st->nb_side_data) {
         for (i = 0; i < ist->st->nb_side_data; i++) {
             const AVPacketSideData *sd_src = &ist->st->side_data[i];
@@ -2789,30 +2832,27 @@ static int init_output_stream_streamcopy(OutputStream *ost)
             av_display_rotation_set((int32_t *)sd, -ost->rotate_override_value);
     }
 
-    switch (par_dst->codec_type) {
+    switch (par->codec_type) {
     case AVMEDIA_TYPE_AUDIO:
-        if (audio_volume != 256) {
-            av_log(NULL, AV_LOG_FATAL, "-acodec copy and -vol are incompatible (frames are not decoded)\n");
-            exit_program(1);
-        }
-        if((par_dst->block_align == 1 || par_dst->block_align == 1152 || par_dst->block_align == 576) && par_dst->codec_id == AV_CODEC_ID_MP3)
-            par_dst->block_align= 0;
-        if(par_dst->codec_id == AV_CODEC_ID_AC3)
-            par_dst->block_align= 0;
+        if ((par->block_align == 1 || par->block_align == 1152 || par->block_align == 576) &&
+            par->codec_id == AV_CODEC_ID_MP3)
+            par->block_align = 0;
+        if (par->codec_id == AV_CODEC_ID_AC3)
+            par->block_align = 0;
         break;
     case AVMEDIA_TYPE_VIDEO:
         if (ost->frame_aspect_ratio.num) { // overridden by the -aspect cli option
             sar =
                 av_mul_q(ost->frame_aspect_ratio,
-                         (AVRational){ par_dst->height, par_dst->width });
+                         (AVRational){ par->height, par->width });
             av_log(NULL, AV_LOG_WARNING, "Overriding aspect ratio "
                    "with stream copy may produce invalid files\n");
             }
         else if (ist->st->sample_aspect_ratio.num)
             sar = ist->st->sample_aspect_ratio;
         else
-            sar = par_src->sample_aspect_ratio;
-        ost->st->sample_aspect_ratio = par_dst->sample_aspect_ratio = sar;
+            sar = par->sample_aspect_ratio;
+        ost->st->sample_aspect_ratio = par->sample_aspect_ratio = sar;
         ost->st->avg_frame_rate = ist->st->avg_frame_rate;
         ost->st->r_frame_rate = ist->st->r_frame_rate;
         break;
@@ -2825,37 +2865,18 @@ static int init_output_stream_streamcopy(OutputStream *ost)
 
 static void set_encoder_id(OutputFile *of, OutputStream *ost)
 {
-    const AVDictionaryEntry *e;
-
     uint8_t *encoder_string;
     int encoder_string_len;
-    int format_flags = 0;
-    int codec_flags = ost->enc_ctx->flags;
 
     if (av_dict_get(ost->st->metadata, "encoder",  NULL, 0))
         return;
 
-    e = av_dict_get(of->opts, "fflags", NULL, 0);
-    if (e) {
-        const AVOption *o = av_opt_find(of->ctx, "fflags", NULL, 0, 0);
-        if (!o)
-            return;
-        av_opt_eval_flags(of->ctx, o, e->value, &format_flags);
-    }
-    e = av_dict_get(ost->encoder_opts, "flags", NULL, 0);
-    if (e) {
-        const AVOption *o = av_opt_find(ost->enc_ctx, "flags", NULL, 0, 0);
-        if (!o)
-            return;
-        av_opt_eval_flags(ost->enc_ctx, o, e->value, &codec_flags);
-    }
-
     encoder_string_len = sizeof(LIBAVCODEC_IDENT) + strlen(ost->enc->name) + 2;
     encoder_string     = av_mallocz(encoder_string_len);
     if (!encoder_string)
         exit_program(1);
 
-    if (!(format_flags & AVFMT_FLAG_BITEXACT) && !(codec_flags & AV_CODEC_FLAG_BITEXACT))
+    if (!of->bitexact && !ost->bitexact)
         av_strlcpy(encoder_string, LIBAVCODEC_IDENT " ", encoder_string_len);
     else
         av_strlcpy(encoder_string, "Lavc ", encoder_string_len);
@@ -2889,12 +2910,15 @@ static void parse_forced_key_frames(char *kf, OutputStream *ost,
             *next++ = 0;
 
         if (!memcmp(p, "chapters", 8)) {
-
-            AVFormatContext *avf = output_files[ost->file_index]->ctx;
+            OutputFile *of = output_files[ost->file_index];
+            AVChapter * const *ch;
+            unsigned int    nb_ch;
             int j;
 
-            if (avf->nb_chapters > INT_MAX - size ||
-                !(pts = av_realloc_f(pts, size += avf->nb_chapters - 1,
+            ch = of_get_chapters(of, &nb_ch);
+
+            if (nb_ch > INT_MAX - size ||
+                !(pts = av_realloc_f(pts, size += nb_ch - 1,
                                      sizeof(*pts)))) {
                 av_log(NULL, AV_LOG_FATAL,
                        "Could not allocate forced key frames array.\n");
@@ -2903,8 +2927,8 @@ static void parse_forced_key_frames(char *kf, OutputStream *ost,
             t = p[8] ? parse_time_or_die("force_key_frames", p + 8, 1) : 0;
             t = av_rescale_q(t, AV_TIME_BASE_Q, avctx->time_base);
 
-            for (j = 0; j < avf->nb_chapters; j++) {
-                AVChapter *c = avf->chapters[j];
+            for (j = 0; j < nb_ch; j++) {
+                const AVChapter *c = ch[j];
                 av_assert1(index < size);
                 pts[index++] = av_rescale_q(c->start, c->time_base,
                                             avctx->time_base) + t;
@@ -2931,7 +2955,6 @@ static void init_encoder_time_base(OutputStream *ost, AVRational default_time_ba
 {
     InputStream *ist = get_input_stream(ost);
     AVCodecContext *enc_ctx = ost->enc_ctx;
-    AVFormatContext *oc;
 
     if (ost->enc_timebase.num > 0) {
         enc_ctx->time_base = ost->enc_timebase;
@@ -2944,8 +2967,9 @@ static void init_encoder_time_base(OutputStream *ost, AVRational default_time_ba
             return;
         }
 
-        oc = output_files[ost->file_index]->ctx;
-        av_log(oc, AV_LOG_WARNING, "Input stream data not available, using default time base\n");
+        av_log(NULL, AV_LOG_WARNING,
+               "Input stream data for output stream #%d:%d not available, "
+               "using default time base\n", ost->file_index, ost->index);
     }
 
     enc_ctx->time_base = default_time_base;
@@ -2957,7 +2981,6 @@ static int init_output_stream_encode(OutputStream *ost, AVFrame *frame)
     AVCodecContext *enc_ctx = ost->enc_ctx;
     AVCodecContext *dec_ctx = NULL;
     OutputFile      *of = output_files[ost->file_index];
-    AVFormatContext *oc = of->ctx;
     int ret;
 
     set_encoder_id(output_files[ost->file_index], ost);
@@ -3020,7 +3043,7 @@ static int init_output_stream_encode(OutputStream *ost, AVFrame *frame)
         if (   av_q2d(enc_ctx->time_base) < 0.001 && ost->vsync_method != VSYNC_PASSTHROUGH
            && (ost->vsync_method == VSYNC_CFR || ost->vsync_method == VSYNC_VSCFR ||
                (ost->vsync_method == VSYNC_AUTO && !(of->format->flags & AVFMT_VARIABLE_FPS)))){
-            av_log(oc, AV_LOG_WARNING, "Frame rate very high for a muxer not efficiently supporting it.\n"
+            av_log(NULL, AV_LOG_WARNING, "Frame rate very high for a muxer not efficiently supporting it.\n"
                                        "Please consider specifying a lower framerate, a different muxer or "
                                        "setting vsync/fps_mode to vfr\n");
         }
@@ -3098,8 +3121,8 @@ static int init_output_stream_encode(OutputStream *ost, AVFrame *frame)
     case AVMEDIA_TYPE_SUBTITLE:
         enc_ctx->time_base = AV_TIME_BASE_Q;
         if (!enc_ctx->width) {
-            enc_ctx->width     = input_streams[ost->source_index]->st->codecpar->width;
-            enc_ctx->height    = input_streams[ost->source_index]->st->codecpar->height;
+            enc_ctx->width     = input_streams[ost->source_index]->par->width;
+            enc_ctx->height    = input_streams[ost->source_index]->par->height;
         }
         break;
     case AVMEDIA_TYPE_DATA:
@@ -3109,6 +3132,12 @@ static int init_output_stream_encode(OutputStream *ost, AVFrame *frame)
         break;
     }
 
+    if (ost->bitexact)
+        enc_ctx->flags |= AV_CODEC_FLAG_BITEXACT;
+
+    if (ost->sq_idx_encode >= 0)
+        sq_set_tb(of->sq_encode, ost->sq_idx_encode, enc_ctx->time_base);
+
     ost->mux_timebase = enc_ctx->time_base;
 
     return 0;
@@ -3117,9 +3146,10 @@ static int init_output_stream_encode(OutputStream *ost, AVFrame *frame)
 static int init_output_stream(OutputStream *ost, AVFrame *frame,
                               char *error, int error_len)
 {
+    OutputFile *of = output_files[ost->file_index];
     int ret = 0;
 
-    if (ost->encoding_needed) {
+    if (ost->enc_ctx) {
         const AVCodec *codec = ost->enc;
         AVCodecContext *dec = NULL;
         InputStream *ist;
@@ -3236,7 +3266,7 @@ static int init_output_stream(OutputStream *ost, AVFrame *frame,
         // copy estimated duration as a hint to the muxer
         if (ost->st->duration <= 0 && ist && ist->st->duration > 0)
             ost->st->duration = av_rescale_q(ist->st->duration, ist->st->time_base, ost->st->time_base);
-    } else if (ost->stream_copy) {
+    } else if (ost->source_index >= 0) {
         ret = init_output_stream_streamcopy(ost);
         if (ret < 0)
             return ret;
@@ -3249,6 +3279,9 @@ static int init_output_stream(OutputStream *ost, AVFrame *frame,
     if (ret < 0)
         return ret;
 
+    if (ost->sq_idx_mux >= 0)
+        sq_set_tb(of->sq_mux, ost->sq_idx_mux, ost->mux_timebase);
+
     ost->initialized = 1;
 
     ret = of_check_init(output_files[ost->file_index]);
@@ -3258,25 +3291,9 @@ static int init_output_stream(OutputStream *ost, AVFrame *frame,
     return ret;
 }
 
-static void report_new_stream(int input_index, AVPacket *pkt)
-{
-    InputFile *file = input_files[input_index];
-    AVStream *st = file->ctx->streams[pkt->stream_index];
-
-    if (pkt->stream_index < file->nb_streams_warn)
-        return;
-    av_log(file->ctx, AV_LOG_WARNING,
-           "New %s stream %d:%d at pos:%"PRId64" and DTS:%ss\n",
-           av_get_media_type_string(st->codecpar->codec_type),
-           input_index, pkt->stream_index,
-           pkt->pos, av_ts2timestr(pkt->dts, &st->time_base));
-    file->nb_streams_warn = pkt->stream_index + 1;
-}
-
 static int transcode_init(void)
 {
     int ret = 0, i, j, k;
-    AVFormatContext *oc;
     OutputStream *ost;
     InputStream *ist;
     char error[1024] = {0};
@@ -3304,15 +3321,32 @@ static int transcode_init(void)
                 input_streams[j + ifile->ist_index]->start = av_gettime_relative();
     }
 
+    // Correct starttime based on the enabled streams
+    for (i = 0; i < nb_input_files; i++) {
+        InputFile       *ifile = input_files[i];
+        AVFormatContext    *is = ifile->ctx;
+        int64_t new_start_time = INT64_MAX;
+
+        if (is->start_time == AV_NOPTS_VALUE ||
+            !(is->iformat->flags & AVFMT_TS_DISCONT))
+            continue;
+
+        for (int j = 0; j < is->nb_streams; j++) {
+            AVStream *st = is->streams[j];
+            if(st->discard == AVDISCARD_ALL || st->start_time == AV_NOPTS_VALUE)
+                continue;
+            new_start_time = FFMIN(new_start_time, av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q));
+        }
+        if (new_start_time > is->start_time) {
+            av_log(is, AV_LOG_VERBOSE, "Correcting start time by %"PRId64"\n", new_start_time - is->start_time);
+            ifile->ts_offset = -new_start_time;
+        }
+    }
+
     /* init input streams */
     for (i = 0; i < nb_input_streams; i++)
-        if ((ret = init_input_stream(i, error, sizeof(error))) < 0) {
-            for (i = 0; i < nb_output_streams; i++) {
-                ost = output_streams[i];
-                avcodec_close(ost->enc_ctx);
-            }
+        if ((ret = init_input_stream(i, error, sizeof(error))) < 0)
             goto dump_format;
-        }
 
     /*
      * initialize stream copy and subtitle/data streams.
@@ -3324,9 +3358,9 @@ static int transcode_init(void)
      *   known after the encoder is initialized.
      */
     for (i = 0; i < nb_output_streams; i++) {
-        if (!output_streams[i]->stream_copy &&
-            (output_streams[i]->enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO ||
-             output_streams[i]->enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO))
+        if (output_streams[i]->enc_ctx &&
+            (output_streams[i]->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ||
+             output_streams[i]->st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO))
             continue;
 
         ret = init_output_stream_wrapper(output_streams[i], NULL, 0);
@@ -3350,16 +3384,6 @@ static int transcode_init(void)
         }
     }
 
-    /* write headers for files with no streams */
-    for (i = 0; i < nb_output_files; i++) {
-        oc = output_files[i]->ctx;
-        if (output_files[i]->format->flags & AVFMT_NOSTREAMS && oc->nb_streams == 0) {
-            ret = of_check_init(output_files[i]);
-            if (ret < 0)
-                goto dump_format;
-        }
-    }
-
  dump_format:
     /* dump the stream mapping */
     av_log(NULL, AV_LOG_INFO, "Stream mapping:\n");
@@ -3404,13 +3428,7 @@ static int transcode_init(void)
                input_streams[ost->source_index]->st->index,
                ost->file_index,
                ost->index);
-        if (ost->sync_ist != input_streams[ost->source_index])
-            av_log(NULL, AV_LOG_INFO, " [sync #%d:%d]",
-                   ost->sync_ist->file_index,
-                   ost->sync_ist->st->index);
-        if (ost->stream_copy)
-            av_log(NULL, AV_LOG_INFO, " (copy)");
-        else {
+        if (ost->enc_ctx) {
             const AVCodec *in_codec    = input_streams[ost->source_index]->dec;
             const AVCodec *out_codec   = ost->enc;
             const char *decoder_name   = "?";
@@ -3440,7 +3458,8 @@ static int transcode_init(void)
             av_log(NULL, AV_LOG_INFO, " (%s (%s) -> %s (%s))",
                    in_codec_name, decoder_name,
                    out_codec_name, encoder_name);
-        }
+        } else
+            av_log(NULL, AV_LOG_INFO, " (copy)");
         av_log(NULL, AV_LOG_INFO, "\n");
     }
 
@@ -3461,18 +3480,9 @@ static int need_output(void)
 
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream *ost    = output_streams[i];
-        OutputFile *of       = output_files[ost->file_index];
-        AVFormatContext *os  = output_files[ost->file_index]->ctx;
 
-        if (ost->finished ||
-            (os->pb && avio_tell(os->pb) >= of->limit_filesize))
-            continue;
-        if (ost->frame_number >= ost->max_frames) {
-            int j;
-            for (j = 0; j < of->ctx->nb_streams; j++)
-                close_output_stream(output_streams[of->ost_index + j]);
+        if (ost->finished)
             continue;
-        }
 
         return 1;
     }
@@ -3493,13 +3503,18 @@ static OutputStream *choose_output(void)
 
     for (i = 0; i < nb_output_streams; i++) {
         OutputStream *ost = output_streams[i];
-        int64_t opts = ost->last_mux_dts == AV_NOPTS_VALUE ? INT64_MIN :
-                       av_rescale_q(ost->last_mux_dts, ost->st->time_base,
-                                    AV_TIME_BASE_Q);
-        if (ost->last_mux_dts == AV_NOPTS_VALUE)
-            av_log(NULL, AV_LOG_DEBUG,
-                "cur_dts is invalid st:%d (%d) [init:%d i_done:%d finish:%d] (this is harmless if it occurs once at the start per stream)\n",
-                ost->st->index, ost->st->id, ost->initialized, ost->inputs_done, ost->finished);
+        int64_t opts;
+
+        if (ost->filter && ost->last_filter_pts != AV_NOPTS_VALUE) {
+            opts = ost->last_filter_pts;
+        } else {
+            opts = ost->last_mux_dts == AV_NOPTS_VALUE ?
+                   INT64_MIN : ost->last_mux_dts;
+            if (ost->last_mux_dts == AV_NOPTS_VALUE)
+                av_log(NULL, AV_LOG_DEBUG,
+                    "cur_dts is invalid st:%d (%d) [init:%d i_done:%d finish:%d] (this is harmless if it occurs once at the start per stream)\n",
+                    ost->st->index, ost->st->id, ost->initialized, ost->inputs_done, ost->finished);
+        }
 
         if (!ost->initialized && !ost->inputs_done)
             return ost->unavailable ? NULL : ost;
@@ -3543,15 +3558,6 @@ static int check_keyboard_interaction(int64_t cur_time)
     if (key == '+') av_log_set_level(av_log_get_level()+10);
     if (key == '-') av_log_set_level(av_log_get_level()-10);
     if (key == 's') qp_hist     ^= 1;
-    if (key == 'h'){
-        if (do_hex_dump){
-            do_hex_dump = do_pkt_dump = 0;
-        } else if(do_pkt_dump){
-            do_hex_dump = 1;
-        } else
-            do_pkt_dump = 1;
-        av_log_set_level(AV_LOG_DEBUG);
-    }
     if (key == 'c' || key == 'C'){
         char buf[4096], target[64], command[256], arg[256] = {0};
         double time;
@@ -3618,7 +3624,8 @@ static int check_keyboard_interaction(int64_t cur_time)
         }
         for(i=0;i<nb_output_streams;i++) {
             OutputStream *ost = output_streams[i];
-            ost->enc_ctx->debug = debug;
+            if (ost->enc_ctx)
+                ost->enc_ctx->debug = debug;
         }
         if(debug) av_log_set_level(AV_LOG_DEBUG);
         fprintf(stderr,"debug=%d\n", debug);
@@ -3639,155 +3646,6 @@ static int check_keyboard_interaction(int64_t cur_time)
     return 0;
 }
 
-#if HAVE_THREADS
-static void *input_thread(void *arg)
-{
-    InputFile *f = arg;
-    AVPacket *pkt = f->pkt, *queue_pkt;
-    unsigned flags = f->non_blocking ? AV_THREAD_MESSAGE_NONBLOCK : 0;
-    int ret = 0;
-
-    while (1) {
-        ret = av_read_frame(f->ctx, pkt);
-
-        if (ret == AVERROR(EAGAIN)) {
-            av_usleep(10000);
-            continue;
-        }
-        if (ret < 0) {
-            av_thread_message_queue_set_err_recv(f->in_thread_queue, ret);
-            break;
-        }
-        queue_pkt = av_packet_alloc();
-        if (!queue_pkt) {
-            av_packet_unref(pkt);
-            av_thread_message_queue_set_err_recv(f->in_thread_queue, AVERROR(ENOMEM));
-            break;
-        }
-        av_packet_move_ref(queue_pkt, pkt);
-        ret = av_thread_message_queue_send(f->in_thread_queue, &queue_pkt, flags);
-        if (flags && ret == AVERROR(EAGAIN)) {
-            flags = 0;
-            ret = av_thread_message_queue_send(f->in_thread_queue, &queue_pkt, flags);
-            av_log(f->ctx, AV_LOG_WARNING,
-                   "Thread message queue blocking; consider raising the "
-                   "thread_queue_size option (current value: %d)\n",
-                   f->thread_queue_size);
-        }
-        if (ret < 0) {
-            if (ret != AVERROR_EOF)
-                av_log(f->ctx, AV_LOG_ERROR,
-                       "Unable to send packet to main thread: %s\n",
-                       av_err2str(ret));
-            av_packet_free(&queue_pkt);
-            av_thread_message_queue_set_err_recv(f->in_thread_queue, ret);
-            break;
-        }
-    }
-
-    return NULL;
-}
-
-static void free_input_thread(int i)
-{
-    InputFile *f = input_files[i];
-    AVPacket *pkt;
-
-    if (!f || !f->in_thread_queue)
-        return;
-    av_thread_message_queue_set_err_send(f->in_thread_queue, AVERROR_EOF);
-    while (av_thread_message_queue_recv(f->in_thread_queue, &pkt, 0) >= 0)
-        av_packet_free(&pkt);
-
-    pthread_join(f->thread, NULL);
-    f->joined = 1;
-    av_thread_message_queue_free(&f->in_thread_queue);
-}
-
-static void free_input_threads(void)
-{
-    int i;
-
-    for (i = 0; i < nb_input_files; i++)
-        free_input_thread(i);
-}
-
-static int init_input_thread(int i)
-{
-    int ret;
-    InputFile *f = input_files[i];
-
-    if (f->thread_queue_size < 0)
-        f->thread_queue_size = (nb_input_files > 1 ? 8 : 0);
-    if (!f->thread_queue_size)
-        return 0;
-
-    if (f->ctx->pb ? !f->ctx->pb->seekable :
-        strcmp(f->ctx->iformat->name, "lavfi"))
-        f->non_blocking = 1;
-    ret = av_thread_message_queue_alloc(&f->in_thread_queue,
-                                        f->thread_queue_size, sizeof(f->pkt));
-    if (ret < 0)
-        return ret;
-
-    if ((ret = pthread_create(&f->thread, NULL, input_thread, f))) {
-        av_log(NULL, AV_LOG_ERROR, "pthread_create failed: %s. Try to increase `ulimit -v` or decrease `ulimit -s`.\n", strerror(ret));
-        av_thread_message_queue_free(&f->in_thread_queue);
-        return AVERROR(ret);
-    }
-
-    return 0;
-}
-
-static int init_input_threads(void)
-{
-    int i, ret;
-
-    for (i = 0; i < nb_input_files; i++) {
-        ret = init_input_thread(i);
-        if (ret < 0)
-            return ret;
-    }
-    return 0;
-}
-
-static int get_input_packet_mt(InputFile *f, AVPacket **pkt)
-{
-    return av_thread_message_queue_recv(f->in_thread_queue, pkt,
-                                        f->non_blocking ?
-                                        AV_THREAD_MESSAGE_NONBLOCK : 0);
-}
-#endif
-
-static int get_input_packet(InputFile *f, AVPacket **pkt)
-{
-    if (f->readrate || f->rate_emu) {
-        int i;
-        int64_t file_start = copy_ts * (
-                              (f->ctx->start_time != AV_NOPTS_VALUE ? f->ctx->start_time * !start_at_zero : 0) +
-                              (f->start_time != AV_NOPTS_VALUE ? f->start_time : 0)
-                             );
-        float scale = f->rate_emu ? 1.0 : f->readrate;
-        for (i = 0; i < f->nb_streams; i++) {
-            InputStream *ist = input_streams[f->ist_index + i];
-            int64_t stream_ts_offset, pts, now;
-            if (!ist->nb_packets || (ist->decoding_needed && !ist->got_output)) continue;
-            stream_ts_offset = FFMAX(ist->first_dts != AV_NOPTS_VALUE ? ist->first_dts : 0, file_start);
-            pts = av_rescale(ist->dts, 1000000, AV_TIME_BASE);
-            now = (av_gettime_relative() - ist->start) * scale + stream_ts_offset;
-            if (pts > now)
-                return AVERROR(EAGAIN);
-        }
-    }
-
-#if HAVE_THREADS
-    if (f->thread_queue_size)
-        return get_input_packet_mt(f, pkt);
-#endif
-    *pkt = f->pkt;
-    return av_read_frame(f->ctx, *pkt);
-}
-
 static int got_eagain(void)
 {
     int i;
@@ -3806,83 +3664,119 @@ static void reset_eagain(void)
         output_streams[i]->unavailable = 0;
 }
 
-// set duration to max(tmp, duration) in a proper time base and return duration's time_base
-static AVRational duration_max(int64_t tmp, int64_t *duration, AVRational tmp_time_base,
-                               AVRational time_base)
+static void decode_flush(InputFile *ifile)
 {
-    int ret;
+    for (int i = 0; i < ifile->nb_streams; i++) {
+        InputStream *ist = input_streams[ifile->ist_index + i];
+        int ret;
 
-    if (!*duration) {
-        *duration = tmp;
-        return tmp_time_base;
-    }
-
-    ret = av_compare_ts(*duration, time_base, tmp, tmp_time_base);
-    if (ret < 0) {
-        *duration = tmp;
-        return tmp_time_base;
-    }
+        if (!ist->processing_needed)
+            continue;
 
-    return time_base;
-}
+        do {
+            ret = process_input_packet(ist, NULL, 1);
+        } while (ret > 0);
 
-static int seek_to_start(InputFile *ifile, AVFormatContext *is)
-{
-    InputStream *ist;
-    AVCodecContext *avctx;
-    int i, ret, has_audio = 0;
-    int64_t duration = 0;
+        if (ist->decoding_needed) {
+            /* report last frame duration to the demuxer thread */
+            if (ist->par->codec_type == AVMEDIA_TYPE_AUDIO) {
+                LastFrameDuration dur;
 
-    ret = avformat_seek_file(is, -1, INT64_MIN, is->start_time, is->start_time, 0);
-    if (ret < 0)
-        return ret;
+                dur.stream_idx = i;
+                dur.duration   = av_rescale_q(ist->nb_samples,
+                                              (AVRational){ 1, ist->dec_ctx->sample_rate},
+                                              ist->st->time_base);
 
-    for (i = 0; i < ifile->nb_streams; i++) {
-        ist   = input_streams[ifile->ist_index + i];
-        avctx = ist->dec_ctx;
+                av_thread_message_queue_send(ifile->audio_duration_queue, &dur, 0);
+            }
 
-        /* duration is the length of the last frame in a stream
-         * when audio stream is present we don't care about
-         * last video frame length because it's not defined exactly */
-        if (avctx->codec_type == AVMEDIA_TYPE_AUDIO && ist->nb_samples)
-            has_audio = 1;
+            avcodec_flush_buffers(ist->dec_ctx);
+        }
     }
+}
 
-    for (i = 0; i < ifile->nb_streams; i++) {
-        ist   = input_streams[ifile->ist_index + i];
-        avctx = ist->dec_ctx;
+static void ts_discontinuity_detect(InputFile *ifile, InputStream *ist,
+                                    AVPacket *pkt)
+{
+    const int fmt_is_discont = ifile->ctx->iformat->flags & AVFMT_TS_DISCONT;
+    int disable_discontinuity_correction = copy_ts;
+    int64_t pkt_dts = av_rescale_q_rnd(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q,
+                                       AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
 
-        if (has_audio) {
-            if (avctx->codec_type == AVMEDIA_TYPE_AUDIO && ist->nb_samples) {
-                AVRational sample_rate = {1, avctx->sample_rate};
+    if (copy_ts && ist->next_dts != AV_NOPTS_VALUE &&
+        fmt_is_discont && ist->st->pts_wrap_bits < 60) {
+        int64_t wrap_dts = av_rescale_q_rnd(pkt->dts + (1LL<<ist->st->pts_wrap_bits),
+                                            ist->st->time_base, AV_TIME_BASE_Q,
+                                            AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
+        if (FFABS(wrap_dts - ist->next_dts) < FFABS(pkt_dts - ist->next_dts)/10)
+            disable_discontinuity_correction = 0;
+    }
 
-                duration = av_rescale_q(ist->nb_samples, sample_rate, ist->st->time_base);
-            } else {
-                continue;
+    if (ist->next_dts != AV_NOPTS_VALUE && !disable_discontinuity_correction) {
+        int64_t delta = pkt_dts - ist->next_dts;
+        if (fmt_is_discont) {
+            if (FFABS(delta) > 1LL * dts_delta_threshold * AV_TIME_BASE ||
+                pkt_dts + AV_TIME_BASE/10 < FFMAX(ist->pts, ist->dts)) {
+                ifile->ts_offset_discont -= delta;
+                av_log(NULL, AV_LOG_DEBUG,
+                       "timestamp discontinuity for stream #%d:%d "
+                       "(id=%d, type=%s): %"PRId64", new offset= %"PRId64"\n",
+                       ist->file_index, ist->st->index, ist->st->id,
+                       av_get_media_type_string(ist->par->codec_type),
+                       delta, ifile->ts_offset_discont);
+                pkt->dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
+                if (pkt->pts != AV_NOPTS_VALUE)
+                    pkt->pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
             }
         } else {
-            if (ist->framerate.num) {
-                duration = av_rescale_q(1, av_inv_q(ist->framerate), ist->st->time_base);
-            } else if (ist->st->avg_frame_rate.num) {
-                duration = av_rescale_q(1, av_inv_q(ist->st->avg_frame_rate), ist->st->time_base);
-            } else {
-                duration = 1;
+            if (FFABS(delta) > 1LL * dts_error_threshold * AV_TIME_BASE) {
+                av_log(NULL, AV_LOG_WARNING, "DTS %"PRId64", next:%"PRId64" st:%d invalid dropping\n", pkt->dts, ist->next_dts, pkt->stream_index);
+                pkt->dts = AV_NOPTS_VALUE;
+            }
+            if (pkt->pts != AV_NOPTS_VALUE){
+                int64_t pkt_pts = av_rescale_q(pkt->pts, ist->st->time_base, AV_TIME_BASE_Q);
+                delta = pkt_pts - ist->next_dts;
+                if (FFABS(delta) > 1LL * dts_error_threshold * AV_TIME_BASE) {
+                    av_log(NULL, AV_LOG_WARNING, "PTS %"PRId64", next:%"PRId64" invalid dropping st:%d\n", pkt->pts, ist->next_dts, pkt->stream_index);
+                    pkt->pts = AV_NOPTS_VALUE;
+                }
             }
         }
-        if (!ifile->duration)
-            ifile->time_base = ist->st->time_base;
-        /* the total duration of the stream, max_pts - min_pts is
-         * the duration of the stream without the last frame */
-        if (ist->max_pts > ist->min_pts && ist->max_pts - (uint64_t)ist->min_pts < INT64_MAX - duration)
-            duration += ist->max_pts - ist->min_pts;
-        ifile->time_base = duration_max(duration, &ifile->duration, ist->st->time_base,
-                                        ifile->time_base);
+    } else if (ist->next_dts == AV_NOPTS_VALUE && !copy_ts &&
+               fmt_is_discont && ifile->last_ts != AV_NOPTS_VALUE) {
+        int64_t delta = pkt_dts - ifile->last_ts;
+        if (FFABS(delta) > 1LL * dts_delta_threshold * AV_TIME_BASE) {
+            ifile->ts_offset_discont -= delta;
+            av_log(NULL, AV_LOG_DEBUG,
+                   "Inter stream timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
+                   delta, ifile->ts_offset_discont);
+            pkt->dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
+            if (pkt->pts != AV_NOPTS_VALUE)
+                pkt->pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
+        }
     }
 
-    if (ifile->loop > 0)
-        ifile->loop--;
+    ifile->last_ts = av_rescale_q(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q);
+}
+
+static void ts_discontinuity_process(InputFile *ifile, InputStream *ist,
+                                     AVPacket *pkt)
+{
+    int64_t offset = av_rescale_q(ifile->ts_offset_discont, AV_TIME_BASE_Q,
+                                  ist->st->time_base);
 
-    return ret;
+    // apply previously-detected timestamp-discontinuity offset
+    // (to all streams, not just audio/video)
+    if (pkt->dts != AV_NOPTS_VALUE)
+        pkt->dts += offset;
+    if (pkt->pts != AV_NOPTS_VALUE)
+        pkt->pts += offset;
+
+    // detect timestamp discontinuities for audio/video
+    if ((ist->par->codec_type == AVMEDIA_TYPE_VIDEO ||
+         ist->par->codec_type == AVMEDIA_TYPE_AUDIO) &&
+        pkt->dts != AV_NOPTS_VALUE)
+        ts_discontinuity_detect(ifile, ist, pkt);
 }
 
 /*
@@ -3898,48 +3792,19 @@ static int process_input(int file_index)
     AVFormatContext *is;
     InputStream *ist;
     AVPacket *pkt;
-    int ret, thread_ret, i, j;
-    int64_t duration;
-    int64_t pkt_dts;
-    int disable_discontinuity_correction = copy_ts;
+    int ret, i, j;
 
     is  = ifile->ctx;
-    ret = get_input_packet(ifile, &pkt);
+    ret = ifile_get_packet(ifile, &pkt);
 
     if (ret == AVERROR(EAGAIN)) {
         ifile->eagain = 1;
         return ret;
     }
-    if (ret < 0 && ifile->loop) {
-        AVCodecContext *avctx;
-        for (i = 0; i < ifile->nb_streams; i++) {
-            ist = input_streams[ifile->ist_index + i];
-            avctx = ist->dec_ctx;
-            if (ist->processing_needed) {
-                ret = process_input_packet(ist, NULL, 1);
-                if (ret>0)
-                    return 0;
-                if (ist->decoding_needed)
-                    avcodec_flush_buffers(avctx);
-            }
-        }
-#if HAVE_THREADS
-        free_input_thread(file_index);
-#endif
-        ret = seek_to_start(ifile, is);
-#if HAVE_THREADS
-        thread_ret = init_input_thread(file_index);
-        if (thread_ret < 0)
-            return thread_ret;
-#endif
-        if (ret < 0)
-            av_log(NULL, AV_LOG_WARNING, "Seek to start failed.\n");
-        else
-            ret = get_input_packet(ifile, &pkt);
-        if (ret == AVERROR(EAGAIN)) {
-            ifile->eagain = 1;
-            return ret;
-        }
+    if (ret == 1) {
+        /* the input file is looped: flush the decoders */
+        decode_flush(ifile);
+        return AVERROR(EAGAIN);
     }
     if (ret < 0) {
         if (ret != AVERROR_EOF) {
@@ -3961,8 +3826,10 @@ static int process_input(int file_index)
                 OutputStream *ost = output_streams[j];
 
                 if (ost->source_index == ifile->ist_index + i &&
-                    (ost->stream_copy || ost->enc->type == AVMEDIA_TYPE_SUBTITLE))
-                    finish_output_stream(ost);
+                    (!ost->enc_ctx || ost->enc->type == AVMEDIA_TYPE_SUBTITLE)) {
+                    OutputFile *of = output_files[ost->file_index];
+                    output_packet(of, ost->pkt, ost, 1);
+                }
             }
         }
 
@@ -3972,17 +3839,6 @@ static int process_input(int file_index)
 
     reset_eagain();
 
-    if (do_pkt_dump) {
-        av_pkt_dump_log2(NULL, AV_LOG_INFO, pkt, do_hex_dump,
-                         is->streams[pkt->stream_index]);
-    }
-    /* the following test is needed in case new streams appear
-       dynamically in stream : we ignore them */
-    if (pkt->stream_index >= ifile->nb_streams) {
-        report_new_stream(file_index, pkt);
-        goto discard_packet;
-    }
-
     ist = input_streams[ifile->ist_index + pkt->stream_index];
 
     ist->data_size += pkt->size;
@@ -3991,61 +3847,6 @@ static int process_input(int file_index)
     if (ist->discard)
         goto discard_packet;
 
-    if (pkt->flags & AV_PKT_FLAG_CORRUPT) {
-        av_log(NULL, exit_on_error ? AV_LOG_FATAL : AV_LOG_WARNING,
-               "%s: corrupt input packet in stream %d\n", is->url, pkt->stream_index);
-        if (exit_on_error)
-            exit_program(1);
-    }
-
-    if (debug_ts) {
-        av_log(NULL, AV_LOG_INFO, "demuxer -> ist_index:%d type:%s "
-               "next_dts:%s next_dts_time:%s next_pts:%s next_pts_time:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s off:%s off_time:%s\n",
-               ifile->ist_index + pkt->stream_index, av_get_media_type_string(ist->dec_ctx->codec_type),
-               av_ts2str(ist->next_dts), av_ts2timestr(ist->next_dts, &AV_TIME_BASE_Q),
-               av_ts2str(ist->next_pts), av_ts2timestr(ist->next_pts, &AV_TIME_BASE_Q),
-               av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ist->st->time_base),
-               av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ist->st->time_base),
-               av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ist->st->time_base),
-               av_ts2str(input_files[ist->file_index]->ts_offset),
-               av_ts2timestr(input_files[ist->file_index]->ts_offset, &AV_TIME_BASE_Q));
-    }
-
-    if(!ist->wrap_correction_done && is->start_time != AV_NOPTS_VALUE && ist->st->pts_wrap_bits < 64){
-        int64_t stime, stime2;
-        // Correcting starttime based on the enabled streams
-        // FIXME this ideally should be done before the first use of starttime but we do not know which are the enabled streams at that point.
-        //       so we instead do it here as part of discontinuity handling
-        if (   ist->next_dts == AV_NOPTS_VALUE
-            && ifile->ts_offset == -is->start_time
-            && (is->iformat->flags & AVFMT_TS_DISCONT)) {
-            int64_t new_start_time = INT64_MAX;
-            for (i=0; i<is->nb_streams; i++) {
-                AVStream *st = is->streams[i];
-                if(st->discard == AVDISCARD_ALL || st->start_time == AV_NOPTS_VALUE)
-                    continue;
-                new_start_time = FFMIN(new_start_time, av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q));
-            }
-            if (new_start_time > is->start_time) {
-                av_log(is, AV_LOG_VERBOSE, "Correcting start time by %"PRId64"\n", new_start_time - is->start_time);
-                ifile->ts_offset = -new_start_time;
-            }
-        }
-
-        stime = av_rescale_q(is->start_time, AV_TIME_BASE_Q, ist->st->time_base);
-        stime2= stime + (1ULL<<ist->st->pts_wrap_bits);
-        ist->wrap_correction_done = 1;
-
-        if(stime2 > stime && pkt->dts != AV_NOPTS_VALUE && pkt->dts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
-            pkt->dts -= 1ULL<<ist->st->pts_wrap_bits;
-            ist->wrap_correction_done = 0;
-        }
-        if(stime2 > stime && pkt->pts != AV_NOPTS_VALUE && pkt->pts > stime + (1LL<<(ist->st->pts_wrap_bits-1))) {
-            pkt->pts -= 1ULL<<ist->st->pts_wrap_bits;
-            ist->wrap_correction_done = 0;
-        }
-    }
-
     /* add the stream-global side data to the first packet */
     if (ist->nb_packets == 1) {
         for (i = 0; i < ist->st->nb_side_data; i++) {
@@ -4066,99 +3867,13 @@ static int process_input(int file_index)
         }
     }
 
-    if (pkt->dts != AV_NOPTS_VALUE)
-        pkt->dts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
-    if (pkt->pts != AV_NOPTS_VALUE)
-        pkt->pts += av_rescale_q(ifile->ts_offset, AV_TIME_BASE_Q, ist->st->time_base);
-
-    if (pkt->pts != AV_NOPTS_VALUE)
-        pkt->pts *= ist->ts_scale;
-    if (pkt->dts != AV_NOPTS_VALUE)
-        pkt->dts *= ist->ts_scale;
-
-    pkt_dts = av_rescale_q_rnd(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
-    if ((ist->dec_ctx->codec_type == AVMEDIA_TYPE_VIDEO ||
-         ist->dec_ctx->codec_type == AVMEDIA_TYPE_AUDIO) &&
-        pkt_dts != AV_NOPTS_VALUE && ist->next_dts == AV_NOPTS_VALUE && !copy_ts
-        && (is->iformat->flags & AVFMT_TS_DISCONT) && ifile->last_ts != AV_NOPTS_VALUE) {
-        int64_t delta   = pkt_dts - ifile->last_ts;
-        if (delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
-            delta >  1LL*dts_delta_threshold*AV_TIME_BASE){
-            ifile->ts_offset -= delta;
-            av_log(NULL, AV_LOG_DEBUG,
-                   "Inter stream timestamp discontinuity %"PRId64", new offset= %"PRId64"\n",
-                   delta, ifile->ts_offset);
-            pkt->dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
-            if (pkt->pts != AV_NOPTS_VALUE)
-                pkt->pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
-        }
-    }
-
-    duration = av_rescale_q(ifile->duration, ifile->time_base, ist->st->time_base);
-    if (pkt->pts != AV_NOPTS_VALUE) {
-        pkt->pts += duration;
-        ist->max_pts = FFMAX(pkt->pts, ist->max_pts);
-        ist->min_pts = FFMIN(pkt->pts, ist->min_pts);
-    }
-
-    if (pkt->dts != AV_NOPTS_VALUE)
-        pkt->dts += duration;
-
-    pkt_dts = av_rescale_q_rnd(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
-
-    if (copy_ts && pkt_dts != AV_NOPTS_VALUE && ist->next_dts != AV_NOPTS_VALUE &&
-        (is->iformat->flags & AVFMT_TS_DISCONT) && ist->st->pts_wrap_bits < 60) {
-        int64_t wrap_dts = av_rescale_q_rnd(pkt->dts + (1LL<<ist->st->pts_wrap_bits),
-                                            ist->st->time_base, AV_TIME_BASE_Q,
-                                            AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
-        if (FFABS(wrap_dts - ist->next_dts) < FFABS(pkt_dts - ist->next_dts)/10)
-            disable_discontinuity_correction = 0;
-    }
-
-    if ((ist->dec_ctx->codec_type == AVMEDIA_TYPE_VIDEO ||
-         ist->dec_ctx->codec_type == AVMEDIA_TYPE_AUDIO) &&
-         pkt_dts != AV_NOPTS_VALUE && ist->next_dts != AV_NOPTS_VALUE &&
-        !disable_discontinuity_correction) {
-        int64_t delta   = pkt_dts - ist->next_dts;
-        if (is->iformat->flags & AVFMT_TS_DISCONT) {
-            if (delta < -1LL*dts_delta_threshold*AV_TIME_BASE ||
-                delta >  1LL*dts_delta_threshold*AV_TIME_BASE ||
-                pkt_dts + AV_TIME_BASE/10 < FFMAX(ist->pts, ist->dts)) {
-                ifile->ts_offset -= delta;
-                av_log(NULL, AV_LOG_DEBUG,
-                       "timestamp discontinuity for stream #%d:%d "
-                       "(id=%d, type=%s): %"PRId64", new offset= %"PRId64"\n",
-                       ist->file_index, ist->st->index, ist->st->id,
-                       av_get_media_type_string(ist->dec_ctx->codec_type),
-                       delta, ifile->ts_offset);
-                pkt->dts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
-                if (pkt->pts != AV_NOPTS_VALUE)
-                    pkt->pts -= av_rescale_q(delta, AV_TIME_BASE_Q, ist->st->time_base);
-            }
-        } else {
-            if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
-                 delta >  1LL*dts_error_threshold*AV_TIME_BASE) {
-                av_log(NULL, AV_LOG_WARNING, "DTS %"PRId64", next:%"PRId64" st:%d invalid dropping\n", pkt->dts, ist->next_dts, pkt->stream_index);
-                pkt->dts = AV_NOPTS_VALUE;
-            }
-            if (pkt->pts != AV_NOPTS_VALUE){
-                int64_t pkt_pts = av_rescale_q(pkt->pts, ist->st->time_base, AV_TIME_BASE_Q);
-                delta   = pkt_pts - ist->next_dts;
-                if ( delta < -1LL*dts_error_threshold*AV_TIME_BASE ||
-                     delta >  1LL*dts_error_threshold*AV_TIME_BASE) {
-                    av_log(NULL, AV_LOG_WARNING, "PTS %"PRId64", next:%"PRId64" invalid dropping st:%d\n", pkt->pts, ist->next_dts, pkt->stream_index);
-                    pkt->pts = AV_NOPTS_VALUE;
-                }
-            }
-        }
-    }
-
-    if (pkt->dts != AV_NOPTS_VALUE)
-        ifile->last_ts = av_rescale_q(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q);
+    // detect and try to correct for timestamp discontinuities
+    ts_discontinuity_process(ifile, ist, pkt);
 
     if (debug_ts) {
         av_log(NULL, AV_LOG_INFO, "demuxer+ffmpeg -> ist_index:%d type:%s pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s off:%s off_time:%s\n",
-               ifile->ist_index + pkt->stream_index, av_get_media_type_string(ist->dec_ctx->codec_type),
+               ifile->ist_index + pkt->stream_index,
+               av_get_media_type_string(ist->par->codec_type),
                av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ist->st->time_base),
                av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ist->st->time_base),
                av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ist->st->time_base),
@@ -4171,12 +3886,7 @@ static int process_input(int file_index)
     process_input_packet(ist, pkt, 0);
 
 discard_packet:
-#if HAVE_THREADS
-    if (ifile->thread_queue_size)
-        av_packet_free(&pkt);
-    else
-#endif
-    av_packet_unref(pkt);
+    av_packet_free(&pkt);
 
     return 0;
 }
@@ -4327,7 +4037,6 @@ static int transcode_step(void)
 static int transcode(void)
 {
     int ret, i;
-    AVFormatContext *os;
     OutputStream *ost;
     InputStream *ist;
     int64_t timer_start;
@@ -4343,10 +4052,8 @@ static int transcode(void)
 
     timer_start = av_gettime_relative();
 
-#if HAVE_THREADS
     if ((ret = init_input_threads()) < 0)
         goto fail;
-#endif
 
     while (!received_sigterm) {
         int64_t cur_time= av_gettime_relative();
@@ -4371,9 +4078,7 @@ static int transcode(void)
         /* dump report by using the output first video and audio streams */
         print_report(0, timer_start, cur_time);
     }
-#if HAVE_THREADS
     free_input_threads();
-#endif
 
     /* at the end of stream, we must flush the decoder buffers */
     for (i = 0; i < nb_input_streams; i++) {
@@ -4396,26 +4101,13 @@ static int transcode(void)
     /* dump report by using the first video and audio streams */
     print_report(1, timer_start, av_gettime_relative());
 
-    /* close the output files */
-    for (i = 0; i < nb_output_files; i++) {
-        os = output_files[i]->ctx;
-        if (os && os->oformat && !(os->oformat->flags & AVFMT_NOFILE)) {
-            if ((ret = avio_closep(&os->pb)) < 0) {
-                av_log(NULL, AV_LOG_ERROR, "Error closing file %s: %s\n", os->url, av_err2str(ret));
-                if (exit_on_error)
-                    exit_program(1);
-            }
-        }
-    }
-
     /* close each encoder */
     for (i = 0; i < nb_output_streams; i++) {
+        uint64_t packets_written;
         ost = output_streams[i];
-        if (ost->encoding_needed) {
-            av_freep(&ost->enc_ctx->stats_in);
-        }
-        total_packets_written += ost->packets_written;
-        if (!ost->packets_written && (abort_on_flags & ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM)) {
+        packets_written = atomic_load(&ost->packets_written);
+        total_packets_written += packets_written;
+        if (!packets_written && (abort_on_flags & ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM)) {
             av_log(NULL, AV_LOG_FATAL, "Empty output on stream %d.\n", i);
             exit_program(1);
         }
@@ -4431,8 +4123,6 @@ static int transcode(void)
         ist = input_streams[i];
         if (ist->decoding_needed) {
             avcodec_close(ist->dec_ctx);
-            if (ist->hwaccel_uninit)
-                ist->hwaccel_uninit(ist->dec_ctx);
         }
     }
 
@@ -4442,9 +4132,7 @@ static int transcode(void)
     ret = 0;
 
  fail:
-#if HAVE_THREADS
     free_input_threads();
-#endif
 
     if (output_streams) {
         for (i = 0; i < nb_output_streams; i++) {
@@ -4515,7 +4203,7 @@ static int64_t getmaxrss(void)
 
 int main(int argc, char **argv)
 {
-    int i, ret;
+    int ret;
     BenchmarkTimeStamps ti;
 
     init_dynload();
@@ -4551,11 +4239,6 @@ int main(int argc, char **argv)
         exit_program(1);
     }
 
-    for (i = 0; i < nb_output_files; i++) {
-        if (strcmp(output_files[i]->format->name, "rtp"))
-            want_sdp = 0;
-    }
-
     current_time = ti = get_benchmark_time_stamps();
     if (transcode() < 0)
         exit_program(1);
index 99d31c3..44cc23f 100644 (file)
 
 #include "config.h"
 
+#include <stdatomic.h>
 #include <stdint.h>
 #include <stdio.h>
 #include <signal.h>
 
 #include "cmdutils.h"
+#include "sync_queue.h"
 
 #include "libavformat/avformat.h"
 #include "libavformat/avio.h"
 
 #include "libswresample/swresample.h"
 
+// deprecated features
+#define FFMPEG_OPT_PSNR 1
+#define FFMPEG_OPT_MAP_CHANNEL 1
+#define FFMPEG_OPT_MAP_SYNC 1
+
 enum VideoSyncMethod {
     VSYNC_AUTO = -1,
     VSYNC_PASSTHROUGH,
@@ -75,15 +82,15 @@ typedef struct StreamMap {
     int disabled;           /* 1 is this mapping is disabled by a negative map */
     int file_index;
     int stream_index;
-    int sync_file_index;
-    int sync_stream_index;
     char *linklabel;       /* name of an output link, for mapping lavfi outputs */
 } StreamMap;
 
+#if FFMPEG_OPT_MAP_CHANNEL
 typedef struct {
     int  file_idx,  stream_idx,  channel_idx; // input
     int ofile_idx, ostream_idx;               // output
 } AudioChannelMap;
+#endif
 
 typedef struct OptionsContext {
     OptionGroup *g;
@@ -118,6 +125,7 @@ typedef struct OptionsContext {
     float readrate;
     int accurate_seek;
     int thread_queue_size;
+    int input_sync_ref;
 
     SpecifierOpt *ts_scale;
     int        nb_ts_scale;
@@ -135,8 +143,10 @@ typedef struct OptionsContext {
     /* output options */
     StreamMap *stream_maps;
     int     nb_stream_maps;
+#if FFMPEG_OPT_MAP_CHANNEL
     AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */
     int           nb_audio_channel_maps; /* number of (valid) -map_channel settings */
+#endif
     int metadata_global_manual;
     int metadata_streams_manual;
     int metadata_chapters_manual;
@@ -147,9 +157,10 @@ typedef struct OptionsContext {
 
     int64_t recording_time;
     int64_t stop_time;
-    uint64_t limit_filesize;
+    int64_t limit_filesize;
     float mux_preload;
     float mux_max_delay;
+    float shortest_buf_duration;
     int shortest;
     int bitexact;
 
@@ -312,6 +323,12 @@ typedef struct InputStream {
 #define DECODING_FOR_FILTER 2
     int processing_needed;   /* non zero if the packets must be processed */
 
+    /**
+     * Codec parameters - to be used by the decoding/streamcopy code.
+     * st->codecpar should not be accessed, because it may be modified
+     * concurrently by the demuxing thread.
+     */
+    AVCodecParameters *par;
     AVCodecContext *dec_ctx;
     const AVCodec *dec;
     AVFrame *decoded_frame;
@@ -378,12 +395,8 @@ typedef struct InputStream {
     char  *hwaccel_device;
     enum AVPixelFormat hwaccel_output_format;
 
-    /* hwaccel context */
-    void  *hwaccel_ctx;
-    void (*hwaccel_uninit)(AVCodecContext *s);
     int  (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
     enum AVPixelFormat hwaccel_pix_fmt;
-    enum AVPixelFormat hwaccel_retrieved_pix_fmt;
 
     /* stats */
     // combined size of all the packets read
@@ -400,7 +413,14 @@ typedef struct InputStream {
     int got_output;
 } InputStream;
 
+typedef struct LastFrameDuration {
+    int     stream_idx;
+    int64_t duration;
+} LastFrameDuration;
+
 typedef struct InputFile {
+    int index;
+
     AVFormatContext *ctx;
     int eof_reached;      /* true if eof reached */
     int eagain;           /* true if last read attempt returned EAGAIN */
@@ -410,8 +430,13 @@ typedef struct InputFile {
                              at the moment when looping happens */
     AVRational time_base; /* time base of the duration */
     int64_t input_ts_offset;
+    int input_sync_ref;
 
     int64_t ts_offset;
+    /**
+     * Extra timestamp offset added by discontinuity handling.
+     */
+    int64_t ts_offset_discont;
     int64_t last_ts;
     int64_t start_time;   /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */
     int64_t recording_time;
@@ -422,15 +447,15 @@ typedef struct InputFile {
     float readrate;
     int accurate_seek;
 
-    AVPacket *pkt;
-
-#if HAVE_THREADS
     AVThreadMessageQueue *in_thread_queue;
     pthread_t thread;           /* thread reading from this file */
     int non_blocking;           /* reading packets from the thread should not block */
-    int joined;                 /* the thread has been joined */
     int thread_queue_size;      /* maximum number of queued packets */
-#endif