avcodec/h264: Be more strict on rejecting pps/sps changes
[ffmpeg.git] / libavcodec / h264_slice.c
index e6645e8..48d7c56 100644 (file)
@@ -31,7 +31,6 @@
 #include "internal.h"
 #include "cabac.h"
 #include "cabac_functions.h"
-#include "dsputil.h"
 #include "error_resilience.h"
 #include "avcodec.h"
 #include "h264.h"
@@ -148,42 +147,6 @@ static const uint8_t dequant8_coeff_init[6][6] = {
     { 36, 32, 58, 34, 46, 43 },
 };
 
-static const enum AVPixelFormat h264_hwaccel_pixfmt_list_420[] = {
-#if CONFIG_H264_DXVA2_HWACCEL
-    AV_PIX_FMT_DXVA2_VLD,
-#endif
-#if CONFIG_H264_VAAPI_HWACCEL
-    AV_PIX_FMT_VAAPI_VLD,
-#endif
-#if CONFIG_H264_VDA_HWACCEL
-    AV_PIX_FMT_VDA_VLD,
-    AV_PIX_FMT_VDA,
-#endif
-#if CONFIG_H264_VDPAU_HWACCEL
-    AV_PIX_FMT_VDPAU,
-#endif
-    AV_PIX_FMT_YUV420P,
-    AV_PIX_FMT_NONE
-};
-
-static const enum AVPixelFormat h264_hwaccel_pixfmt_list_jpeg_420[] = {
-#if CONFIG_H264_DXVA2_HWACCEL
-    AV_PIX_FMT_DXVA2_VLD,
-#endif
-#if CONFIG_H264_VAAPI_HWACCEL
-    AV_PIX_FMT_VAAPI_VLD,
-#endif
-#if CONFIG_H264_VDA_HWACCEL
-    AV_PIX_FMT_VDA_VLD,
-    AV_PIX_FMT_VDA,
-#endif
-#if CONFIG_H264_VDPAU_HWACCEL
-    AV_PIX_FMT_VDPAU,
-#endif
-    AV_PIX_FMT_YUVJ420P,
-    AV_PIX_FMT_NONE
-};
-
 
 static void release_unused_pictures(H264Context *h, int remove_current)
 {
@@ -230,9 +193,9 @@ static int init_table_pools(H264Context *h)
                                                av_buffer_allocz);
     h->mb_type_pool      = av_buffer_pool_init((big_mb_num + h->mb_stride) *
                                                sizeof(uint32_t), av_buffer_allocz);
-    h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
-                                             sizeof(int16_t), av_buffer_allocz);
-    h->ref_index_pool  = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
+    h->motion_val_pool   = av_buffer_pool_init(2 * (b4_array_size + 4) *
+                                               sizeof(int16_t), av_buffer_allocz);
+    h->ref_index_pool    = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
 
     if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
         !h->ref_index_pool) {
@@ -444,7 +407,7 @@ static void clone_tables(H264Context *dst, H264Context *src, int i)
 }
 
 #define IN_RANGE(a, b, size) (((a) >= (b)) && ((a) < ((b) + (size))))
-#undef REBASE_PICTURE
+
 #define REBASE_PICTURE(pic, new_ctx, old_ctx)             \
     (((pic) && (pic) >= (old_ctx)->DPB &&                       \
       (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ?          \
@@ -538,7 +501,7 @@ int ff_h264_update_thread_context(AVCodecContext *dst,
         h->pps = h1->pps;
 
         if ((err = h264_slice_header_init(h, 1)) < 0) {
-            av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
+            av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed\n");
             return err;
         }
         context_reinitialized = 1;
@@ -550,7 +513,7 @@ int ff_h264_update_thread_context(AVCodecContext *dst,
 #endif
     }
     /* update linesize on resize for h264. The h264 decoder doesn't
-     * necessarily call ff_MPV_frame_start in the new thread */
+     * necessarily call ff_mpv_frame_start in the new thread */
     h->linesize   = h1->linesize;
     h->uvlinesize = h1->uvlinesize;
 
@@ -566,6 +529,7 @@ int ff_h264_update_thread_context(AVCodecContext *dst,
 
         av_freep(&h->rbsp_buffer[0]);
         av_freep(&h->rbsp_buffer[1]);
+        ff_h264_unref_picture(h, &h->last_pic_for_ec);
         memcpy(h, h1, offsetof(H264Context, intra_pcm_ptr));
         memcpy(&h->cabac, &h1->cabac,
                sizeof(H264Context) - offsetof(H264Context, cabac));
@@ -579,6 +543,7 @@ int ff_h264_update_thread_context(AVCodecContext *dst,
         memset(&h->mb_luma_dc, 0, sizeof(h->mb_luma_dc));
         memset(&h->mb_padding, 0, sizeof(h->mb_padding));
         memset(&h->cur_pic, 0, sizeof(h->cur_pic));
+        memset(&h->last_pic_for_ec, 0, sizeof(h->last_pic_for_ec));
 
         h->avctx             = dst;
         h->DPB               = NULL;
@@ -586,6 +551,17 @@ int ff_h264_update_thread_context(AVCodecContext *dst,
         h->mb_type_pool      = NULL;
         h->ref_index_pool    = NULL;
         h->motion_val_pool   = NULL;
+        h->intra4x4_pred_mode= NULL;
+        h->non_zero_count    = NULL;
+        h->slice_table_base  = NULL;
+        h->slice_table       = NULL;
+        h->cbp_table         = NULL;
+        h->chroma_pred_mode_table = NULL;
+        memset(h->mvd_table, 0, sizeof(h->mvd_table));
+        h->direct_table      = NULL;
+        h->list_counts       = NULL;
+        h->mb2b_xy           = NULL;
+        h->mb2br_xy          = NULL;
         for (i = 0; i < 2; i++) {
             h->rbsp_buffer[i] = NULL;
             h->rbsp_buffer_size[i] = 0;
@@ -784,7 +760,7 @@ static int h264_frame_start(H264Context *h)
 
     /* We mark the current picture as non-reference after allocating it, so
      * that if we break out due to an error it can be released automatically
-     * in the next ff_MPV_frame_start().
+     * in the next ff_mpv_frame_start().
      */
     h->cur_pic_ptr->reference = 0;
 
@@ -1026,76 +1002,94 @@ static int clone_slice(H264Context *dst, H264Context *src)
 
 static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
 {
+#define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
+                     CONFIG_H264_VAAPI_HWACCEL + \
+                     (CONFIG_H264_VDA_HWACCEL * 2) + \
+                     CONFIG_H264_VDPAU_HWACCEL)
+    enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
+    const enum AVPixelFormat *choices = pix_fmts;
+    int i;
+
     switch (h->sps.bit_depth_luma) {
     case 9:
         if (CHROMA444(h)) {
             if (h->avctx->colorspace == AVCOL_SPC_RGB) {
-                return AV_PIX_FMT_GBRP9;
+                *fmt++ = AV_PIX_FMT_GBRP9;
             } else
-                return AV_PIX_FMT_YUV444P9;
+                *fmt++ = AV_PIX_FMT_YUV444P9;
         } else if (CHROMA422(h))
-            return AV_PIX_FMT_YUV422P9;
+            *fmt++ = AV_PIX_FMT_YUV422P9;
         else
-            return AV_PIX_FMT_YUV420P9;
+            *fmt++ = AV_PIX_FMT_YUV420P9;
         break;
     case 10:
         if (CHROMA444(h)) {
             if (h->avctx->colorspace == AVCOL_SPC_RGB) {
-                return AV_PIX_FMT_GBRP10;
+                *fmt++ = AV_PIX_FMT_GBRP10;
             } else
-                return AV_PIX_FMT_YUV444P10;
+                *fmt++ = AV_PIX_FMT_YUV444P10;
         } else if (CHROMA422(h))
-            return AV_PIX_FMT_YUV422P10;
+            *fmt++ = AV_PIX_FMT_YUV422P10;
         else
-            return AV_PIX_FMT_YUV420P10;
+            *fmt++ = AV_PIX_FMT_YUV420P10;
         break;
     case 12:
         if (CHROMA444(h)) {
             if (h->avctx->colorspace == AVCOL_SPC_RGB) {
-                return AV_PIX_FMT_GBRP12;
+                *fmt++ = AV_PIX_FMT_GBRP12;
             } else
-                return AV_PIX_FMT_YUV444P12;
+                *fmt++ = AV_PIX_FMT_YUV444P12;
         } else if (CHROMA422(h))
-            return AV_PIX_FMT_YUV422P12;
+            *fmt++ = AV_PIX_FMT_YUV422P12;
         else
-            return AV_PIX_FMT_YUV420P12;
+            *fmt++ = AV_PIX_FMT_YUV420P12;
         break;
     case 14:
         if (CHROMA444(h)) {
             if (h->avctx->colorspace == AVCOL_SPC_RGB) {
-                return AV_PIX_FMT_GBRP14;
+                *fmt++ = AV_PIX_FMT_GBRP14;
             } else
-                return AV_PIX_FMT_YUV444P14;
+                *fmt++ = AV_PIX_FMT_YUV444P14;
         } else if (CHROMA422(h))
-            return AV_PIX_FMT_YUV422P14;
+            *fmt++ = AV_PIX_FMT_YUV422P14;
         else
-            return AV_PIX_FMT_YUV420P14;
+            *fmt++ = AV_PIX_FMT_YUV420P14;
         break;
     case 8:
+#if CONFIG_H264_VDPAU_HWACCEL
+        *fmt++ = AV_PIX_FMT_VDPAU;
+#endif
         if (CHROMA444(h)) {
-            if (h->avctx->colorspace == AVCOL_SPC_RGB) {
-                av_log(h->avctx, AV_LOG_DEBUG, "Detected GBR colorspace.\n");
-                return AV_PIX_FMT_GBR24P;
-            } else if (h->avctx->colorspace == AVCOL_SPC_YCGCO) {
+            if (h->avctx->colorspace == AVCOL_SPC_YCGCO)
                 av_log(h->avctx, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
-            }
-            return h->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ444P
-                                                                : AV_PIX_FMT_YUV444P;
+            if (h->avctx->colorspace == AVCOL_SPC_RGB)
+                *fmt++ = AV_PIX_FMT_GBRP;
+            else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+                *fmt++ = AV_PIX_FMT_YUVJ444P;
+            else
+                *fmt++ = AV_PIX_FMT_YUV444P;
         } else if (CHROMA422(h)) {
-            return h->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ422P
-                                                             : AV_PIX_FMT_YUV422P;
+            if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+                *fmt++ = AV_PIX_FMT_YUVJ422P;
+            else
+                *fmt++ = AV_PIX_FMT_YUV422P;
         } else {
-            int i;
-            const enum AVPixelFormat * fmt = h->avctx->codec->pix_fmts ?
-                                        h->avctx->codec->pix_fmts :
-                                        h->avctx->color_range == AVCOL_RANGE_JPEG ?
-                                        h264_hwaccel_pixfmt_list_jpeg_420 :
-                                        h264_hwaccel_pixfmt_list_420;
-
-            for (i=0; fmt[i] != AV_PIX_FMT_NONE; i++)
-                if (fmt[i] == h->avctx->pix_fmt && !force_callback)
-                    return fmt[i];
-            return ff_thread_get_format(h->avctx, fmt);
+#if CONFIG_H264_DXVA2_HWACCEL
+            *fmt++ = AV_PIX_FMT_DXVA2_VLD;
+#endif
+#if CONFIG_H264_VAAPI_HWACCEL
+            *fmt++ = AV_PIX_FMT_VAAPI_VLD;
+#endif
+#if CONFIG_H264_VDA_HWACCEL
+            *fmt++ = AV_PIX_FMT_VDA_VLD;
+            *fmt++ = AV_PIX_FMT_VDA;
+#endif
+            if (h->avctx->codec->pix_fmts)
+                choices = h->avctx->codec->pix_fmts;
+            else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+                *fmt++ = AV_PIX_FMT_YUVJ420P;
+            else
+                *fmt++ = AV_PIX_FMT_YUV420P;
         }
         break;
     default:
@@ -1103,6 +1097,13 @@ static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
                "Unsupported bit depth %d\n", h->sps.bit_depth_luma);
         return AVERROR_INVALIDDATA;
     }
+
+    *fmt = AV_PIX_FMT_NONE;
+
+    for (i=0; choices[i] != AV_PIX_FMT_NONE; i++)
+        if (choices[i] == h->avctx->pix_fmt && !force_callback)
+            return choices[i];
+    return ff_thread_get_format(h->avctx, choices);
 }
 
 /* export coded and cropped frame dimensions to AVCodecContext */
@@ -1110,11 +1111,13 @@ static int init_dimensions(H264Context *h)
 {
     int width  = h->width  - (h->sps.crop_right + h->sps.crop_left);
     int height = h->height - (h->sps.crop_top   + h->sps.crop_bottom);
+    int crop_present = h->sps.crop_left  || h->sps.crop_top ||
+                       h->sps.crop_right || h->sps.crop_bottom;
     av_assert0(h->sps.crop_right + h->sps.crop_left < (unsigned)h->width);
     av_assert0(h->sps.crop_top + h->sps.crop_bottom < (unsigned)h->height);
 
     /* handle container cropping */
-    if (!h->sps.crop &&
+    if (!crop_present &&
         FFALIGN(h->avctx->width,  16) == h->width &&
         FFALIGN(h->avctx->height, 16) == h->height) {
         width  = h->avctx->width;
@@ -1128,7 +1131,10 @@ static int init_dimensions(H264Context *h)
             return AVERROR_INVALIDDATA;
 
         av_log(h->avctx, AV_LOG_WARNING, "Ignoring cropping information.\n");
-        h->sps.crop_bottom = h->sps.crop_top = h->sps.crop_right = h->sps.crop_left = 0;
+        h->sps.crop_bottom =
+        h->sps.crop_top    =
+        h->sps.crop_right  =
+        h->sps.crop_left   =
         h->sps.crop        = 0;
 
         width  = h->width;
@@ -1150,8 +1156,7 @@ static int h264_slice_header_init(H264Context *h, int reinit)
                     h->avctx->thread_count : 1;
     int i, ret;
 
-    h->avctx->sample_aspect_ratio = h->sps.sar;
-    av_assert0(h->avctx->sample_aspect_ratio.den);
+    ff_set_sar(h->avctx, h->sps.sar);
     av_pix_fmt_get_chroma_sub_sample(h->avctx->pix_fmt,
                                      &h->chroma_x_shift, &h->chroma_y_shift);
 
@@ -1159,8 +1164,8 @@ static int h264_slice_header_init(H264Context *h, int reinit)
         int64_t den = h->sps.time_scale;
         if (h->x264_build < 44U)
             den *= 2;
-        av_reduce(&h->avctx->time_base.num, &h->avctx->time_base.den,
-                  h->sps.num_units_in_tick, den, 1 << 30);
+        av_reduce(&h->avctx->framerate.den, &h->avctx->framerate.num,
+                  h->sps.num_units_in_tick * h->avctx->ticks_per_frame, den, 1 << 30);
     }
 
     if (reinit)
@@ -1172,7 +1177,7 @@ static int h264_slice_header_init(H264Context *h, int reinit)
     ret = ff_h264_alloc_tables(h);
     if (ret < 0) {
         av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
-        return ret;
+        goto fail;
     }
 
     if (nb_slices > H264_MAX_THREADS || (nb_slices > h->mb_height && h->mb_height)) {
@@ -1191,18 +1196,17 @@ static int h264_slice_header_init(H264Context *h, int reinit)
         ret = ff_h264_context_init(h);
         if (ret < 0) {
             av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
-            return ret;
+            goto fail;
         }
     } else {
         for (i = 1; i < h->slice_context_count; i++) {
             H264Context *c;
             c                    = h->thread_context[i] = av_mallocz(sizeof(H264Context));
-            if (!c)
-                return AVERROR(ENOMEM);
-            c->avctx             = h->avctx;
-            if (CONFIG_ERROR_RESILIENCE) {
-                c->dsp               = h->dsp;
+            if (!c) {
+                ret = AVERROR(ENOMEM);
+                goto fail;
             }
+            c->avctx             = h->avctx;
             c->vdsp              = h->vdsp;
             c->h264dsp           = h->h264dsp;
             c->h264qpel          = h->h264qpel;
@@ -1219,7 +1223,6 @@ static int h264_slice_header_init(H264Context *h, int reinit)
             c->chroma_y_shift    = h->chroma_y_shift;
             c->qscale            = h->qscale;
             c->droppable         = h->droppable;
-            c->data_partitioning = h->data_partitioning;
             c->low_delay         = h->low_delay;
             c->mb_width          = h->mb_width;
             c->mb_height         = h->mb_height;
@@ -1237,13 +1240,17 @@ static int h264_slice_header_init(H264Context *h, int reinit)
         for (i = 0; i < h->slice_context_count; i++)
             if ((ret = ff_h264_context_init(h->thread_context[i])) < 0) {
                 av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
-                return ret;
+                goto fail;
             }
     }
 
     h->context_initialized = 1;
 
     return 0;
+fail:
+    ff_h264_free_tables(h, 0);
+    h->context_initialized = 0;
+    return ret;
 }
 
 static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
@@ -1277,6 +1284,8 @@ int ff_h264_decode_slice_header(H264Context *h, H264Context *h0)
     int must_reinit;
     int needs_reinit = 0;
     int field_pic_flag, bottom_field_flag;
+    int first_slice = h == h0 && !h0->current_slice;
+    PPS *pps;
 
     h->qpel_put = h->h264qpel.put_h264_qpel_pixels_tab;
     h->qpel_avg = h->h264qpel.avg_h264_qpel_pixels_tab;
@@ -1350,18 +1359,27 @@ int ff_h264_decode_slice_header(H264Context *h, H264Context *h0)
                h0->au_pps_id, pps_id);
         return AVERROR_INVALIDDATA;
     }
-    h->pps = *h0->pps_buffers[pps_id];
 
-    if (!h0->sps_buffers[h->pps.sps_id]) {
+    pps = h0->pps_buffers[pps_id];
+
+    if (!h0->sps_buffers[pps->sps_id]) {
         av_log(h->avctx, AV_LOG_ERROR,
                "non-existing SPS %u referenced\n",
                h->pps.sps_id);
         return AVERROR_INVALIDDATA;
     }
+    if (first_slice)
+        h->pps = *h0->pps_buffers[pps_id];
 
-    if (h->pps.sps_id != h->sps.sps_id ||
-        h->pps.sps_id != h->current_sps_id ||
-        h0->sps_buffers[h->pps.sps_id]->new) {
+    if (pps->sps_id != h->sps.sps_id ||
+        pps->sps_id != h->current_sps_id ||
+        h0->sps_buffers[pps->sps_id]->new) {
+
+        if (!first_slice) {
+            av_log(h->avctx, AV_LOG_ERROR,
+               "SPS changed in the middle of the frame\n");
+            return AVERROR_INVALIDDATA;
+        }
 
         h->sps = *h0->sps_buffers[h->pps.sps_id];
 
@@ -1652,9 +1670,11 @@ int ff_h264_decode_slice_header(H264Context *h, H264Context *h0)
             if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
                 /* Previous field is unmatched. Don't display it, but let it
                  * remain for reference if marked as such. */
+                h0->missing_fields ++;
                 h0->cur_pic_ptr = NULL;
                 h0->first_field = FIELD_PICTURE(h);
             } else {
+                h0->missing_fields = 0;
                 if (h0->cur_pic_ptr->frame_num != h->frame_num) {
                     ff_thread_report_progress(&h0->cur_pic_ptr->tf, INT_MAX,
                                               h0->picture_structure==PICT_BOTTOM_FIELD);
@@ -1950,7 +1970,12 @@ int ff_h264_decode_slice_header(H264Context *h, H264Context *h0)
                              (h->ref_list[j][i].reference & 3);
     }
 
-    if (h->ref_count[0]) ff_h264_set_erpic(&h->er.last_pic, &h->ref_list[0][0]);
+    if (h->ref_count[0]) {
+        ff_h264_set_erpic(&h->er.last_pic, &h->ref_list[0][0]);
+    } else if (h->last_pic_for_ec.f.buf[0]) {
+        ff_h264_set_erpic(&h->er.last_pic, &h->last_pic_for_ec);
+    }
+
     if (h->ref_count[1]) ff_h264_set_erpic(&h->er.next_pic, &h->ref_list[1][0]);
 
     h->er.ref_count = h->ref_count[0];
@@ -2492,14 +2517,12 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
                     if (   get_bits_left(&h->gb) == 0
                         || get_bits_left(&h->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
                         er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
-                                     h->mb_x - 1, h->mb_y,
-                                     ER_MB_END);
+                                     h->mb_x - 1, h->mb_y, ER_MB_END);
 
                         return 0;
                     } else {
                         er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
-                                     h->mb_x, h->mb_y,
-                                     ER_MB_END);
+                                     h->mb_x, h->mb_y, ER_MB_END);
 
                         return AVERROR_INVALIDDATA;
                     }
@@ -2512,8 +2535,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
 
                 if (get_bits_left(&h->gb) == 0) {
                     er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
-                                 h->mb_x - 1, h->mb_y,
-                                 ER_MB_END);
+                                 h->mb_x - 1, h->mb_y, ER_MB_END);
                     if (h->mb_x > lf_x_start)
                         loop_filter(h, lf_x_start, h->mb_x);