Merge commit '24abd806ea0cfb0d988d2f0044eac79cff12918c'
authorMichael Niedermayer <michaelni@gmx.at>
Fri, 6 Dec 2013 11:21:31 +0000 (12:21 +0100)
committerMichael Niedermayer <michaelni@gmx.at>
Fri, 6 Dec 2013 12:30:23 +0000 (13:30 +0100)
* commit '24abd806ea0cfb0d988d2f0044eac79cff12918c':
  ljpegenc: deMpegEncContextize

Conflicts:
libavcodec/ljpegenc.c
libavcodec/mpegvideo.h
libavcodec/mpegvideo_enc.c
tests/ref/vsynth/vsynth1-ljpeg
tests/ref/vsynth/vsynth2-ljpeg

Merged-by: Michael Niedermayer <michaelni@gmx.at>
1  2 
libavcodec/ljpegenc.c
libavcodec/mjpegenc.c
libavcodec/mpegvideo.h
libavcodec/mpegvideo_enc.c

@@@ -40,56 -60,38 +60,41 @@@ typedef struct LJpegEncContext 
  static int encode_picture_lossless(AVCodecContext *avctx, AVPacket *pkt,
                                     const AVFrame *pict, int *got_packet)
  {
-     MpegEncContext * const s = avctx->priv_data;
-     MJpegContext * const m = s->mjpeg_ctx;
-     const int width= s->width;
-     const int height= s->height;
-     AVFrame * const p = &s->current_picture.f;
+     LJpegEncContext *s = avctx->priv_data;
+     PutBitContext pb;
+     const int width  = avctx->width;
+     const int height = avctx->height;
      const int predictor= avctx->prediction_method+1;
-     const int mb_width  = (width  + s->mjpeg_hsample[0] - 1) / s->mjpeg_hsample[0];
-     const int mb_height = (height + s->mjpeg_vsample[0] - 1) / s->mjpeg_vsample[0];
-     int ret, max_pkt_size = FF_MIN_BUFFER_SIZE;
+     const int mb_width  = (width  + s->hsample[0] - 1) / s->hsample[0];
+     const int mb_height = (height + s->vsample[0] - 1) / s->vsample[0];
+     int max_pkt_size = FF_MIN_BUFFER_SIZE;
+     int ret, header_bits;
  
--    if (avctx->pix_fmt == AV_PIX_FMT_BGRA)
++    if(    avctx->pix_fmt == AV_PIX_FMT_BGR0
++        || avctx->pix_fmt == AV_PIX_FMT_BGRA
++        || avctx->pix_fmt == AV_PIX_FMT_BGR24)
          max_pkt_size += width * height * 3 * 4;
      else {
          max_pkt_size += mb_width * mb_height * 3 * 4
-                         * s->mjpeg_hsample[0] * s->mjpeg_vsample[0];
-     }
-     if (!s->rd_scratchpad) {
-         int alloc_size = FFALIGN(FFABS(pict->linesize[0]) + 64, 32);
-         s->me.scratchpad =
-         s->rd_scratchpad = av_mallocz(alloc_size * 4 * 16 * 2);
-         if (!s->rd_scratchpad) {
-             av_log(avctx, AV_LOG_ERROR, "failed to allocate context scratch buffers.\n");
-             return AVERROR(ENOMEM);
-         }
+                         * s->hsample[0] * s->vsample[0];
      }
 -    if ((ret = ff_alloc_packet(pkt, max_pkt_size)) < 0) {
 -        av_log(avctx, AV_LOG_ERROR, "Error getting output packet of size %d.\n", max_pkt_size);
 +
 +    if ((ret = ff_alloc_packet2(avctx, pkt, max_pkt_size)) < 0)
          return ret;
 -    }
  
-     init_put_bits(&s->pb, pkt->data, pkt->size);
+     init_put_bits(&pb, pkt->data, pkt->size);
  
-     av_frame_unref(p);
-     ret = av_frame_ref(p, pict);
-     if (ret < 0)
-         return ret;
-     p->pict_type= AV_PICTURE_TYPE_I;
-     p->key_frame= 1;
-     ff_mjpeg_encode_picture_header(avctx, &s->pb, &s->intra_scantable,
-                                    s->intra_matrix);
+     ff_mjpeg_encode_picture_header(avctx, &pb, &s->scantable,
+                                    s->matrix);
  
-     s->header_bits= put_bits_count(&s->pb);
+     header_bits = put_bits_count(&pb);
  
-     if(avctx->pix_fmt == AV_PIX_FMT_BGR0
 -    if(avctx->pix_fmt == AV_PIX_FMT_BGRA){
++    if(    avctx->pix_fmt == AV_PIX_FMT_BGR0
 +        || avctx->pix_fmt == AV_PIX_FMT_BGRA
 +        || avctx->pix_fmt == AV_PIX_FMT_BGR24){
          int x, y, i;
-         const int linesize= p->linesize[0];
-         uint16_t (*buffer)[4]= (void *) s->rd_scratchpad;
+         const int linesize = pict->linesize[0];
+         uint16_t (*buffer)[4] = s->scratch;
          int left[3], top[3], topleft[3];
  
          for(i=0; i<3; i++){
      }
  
      emms_c();
-     av_assert0(s->esc_pos == s->header_bits >> 3);
  
-     ff_mjpeg_escape_FF(&s->pb, s->esc_pos);
-     ff_mjpeg_encode_picture_trailer(&s->pb, s->header_bits);
-     s->picture_number++;
++    ff_mjpeg_escape_FF(&pb, header_bits >> 3);
+     ff_mjpeg_encode_picture_trailer(&pb, header_bits);
  
-     flush_put_bits(&s->pb);
-     pkt->size   = put_bits_ptr(&s->pb) - s->pb.buf;
+     flush_put_bits(&pb);
+     pkt->size   = put_bits_ptr(&pb) - pb.buf;
      pkt->flags |= AV_PKT_FLAG_KEY;
      *got_packet = 1;
  
  //    return (put_bits_count(&f->pb)+7)/8;
  }
  
 -    if (avctx->pix_fmt   == AV_PIX_FMT_BGRA) {
+ static av_cold int ljpeg_encode_close(AVCodecContext *avctx)
+ {
+     LJpegEncContext *s = avctx->priv_data;
+     av_frame_free(&avctx->coded_frame);
+     av_freep(&s->scratch);
+     return 0;
+ }
+ static av_cold int ljpeg_encode_init(AVCodecContext *avctx)
+ {
+     LJpegEncContext *s = avctx->priv_data;
+     int chroma_v_shift, chroma_h_shift;
+     if ((avctx->pix_fmt == AV_PIX_FMT_YUV420P ||
+          avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
+          avctx->pix_fmt == AV_PIX_FMT_YUV444P) &&
+         avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
+         av_log(avctx, AV_LOG_ERROR,
+                "Limited range YUV is non-standard, set strict_std_compliance to "
+                "at least unofficial to use it.\n");
+         return AVERROR(EINVAL);
+     }
+     avctx->coded_frame = av_frame_alloc();
+     if (!avctx->coded_frame)
+         return AVERROR(ENOMEM);
+     avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
+     avctx->coded_frame->key_frame = 1;
+     s->scratch = av_malloc_array(avctx->width + 1, sizeof(*s->scratch));
+     ff_dsputil_init(&s->dsp, avctx);
+     ff_init_scantable(s->dsp.idct_permutation, &s->scantable, ff_zigzag_direct);
+     av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift,
+                                      &chroma_v_shift);
++    if (   avctx->pix_fmt == AV_PIX_FMT_BGR0
++        || avctx->pix_fmt == AV_PIX_FMT_BGRA
++        || avctx->pix_fmt == AV_PIX_FMT_BGR24) {
+         s->vsample[0] = s->hsample[0] =
+         s->vsample[1] = s->hsample[1] =
+         s->vsample[2] = s->hsample[2] = 1;
++    } else if (avctx->pix_fmt == AV_PIX_FMT_YUV444P || avctx->pix_fmt == AV_PIX_FMT_YUVJ444P) {
++        s->vsample[0] = s->vsample[1] = s->vsample[2] = 2;
++        s->hsample[0] = s->hsample[1] = s->hsample[2] = 1;
+     } else {
+         s->vsample[0] = 2;
+         s->vsample[1] = 2 >> chroma_v_shift;
+         s->vsample[2] = 2 >> chroma_v_shift;
+         s->hsample[0] = 2;
+         s->hsample[1] = 2 >> chroma_h_shift;
+         s->hsample[2] = 2 >> chroma_h_shift;
+     }
+     ff_mjpeg_build_huffman_codes(s->huff_size_dc_luminance,
+                                  s->huff_code_dc_luminance,
+                                  avpriv_mjpeg_bits_dc_luminance,
+                                  avpriv_mjpeg_val_dc);
+     ff_mjpeg_build_huffman_codes(s->huff_size_dc_chrominance,
+                                  s->huff_code_dc_chrominance,
+                                  avpriv_mjpeg_bits_dc_chrominance,
+                                  avpriv_mjpeg_val_dc);
+     return 0;
+ }
  
- AVCodec ff_ljpeg_encoder = { //FIXME avoid MPV_* lossless JPEG should not need them
+ AVCodec ff_ljpeg_encoder = {
      .name           = "ljpeg",
      .long_name      = NULL_IF_CONFIG_SMALL("Lossless JPEG"),
      .type           = AVMEDIA_TYPE_VIDEO,
      .id             = AV_CODEC_ID_LJPEG,
-     .priv_data_size = sizeof(MpegEncContext),
-     .init           = ff_MPV_encode_init,
+     .priv_data_size = sizeof(LJpegEncContext),
+     .init           = ljpeg_encode_init,
      .encode2        = encode_picture_lossless,
-     .close          = ff_MPV_encode_end,
+     .close          = ljpeg_encode_close,
 -    .pix_fmts       = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUVJ420P,
 -                                                    AV_PIX_FMT_YUVJ422P,
 -                                                    AV_PIX_FMT_YUVJ444P,
 -                                                    AV_PIX_FMT_BGRA,
 -                                                    AV_PIX_FMT_YUV420P,
 -                                                    AV_PIX_FMT_YUV422P,
 -                                                    AV_PIX_FMT_YUVJ444P,
 -                                                    AV_PIX_FMT_NONE },
 +    .pix_fmts       = (const enum AVPixelFormat[]){
-         AV_PIX_FMT_BGR24, AV_PIX_FMT_BGRA, AV_PIX_FMT_BGR0,
++        AV_PIX_FMT_BGR24   , AV_PIX_FMT_BGRA    , AV_PIX_FMT_BGR0,
 +        AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,
-         AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P,
++        AV_PIX_FMT_YUV420P , AV_PIX_FMT_YUV444P , AV_PIX_FMT_YUV422P,
 +        AV_PIX_FMT_NONE},
  };
@@@ -211,16 -181,12 +211,14 @@@ void ff_mjpeg_encode_picture_header(AVC
      int chroma_h_shift, chroma_v_shift;
      const int lossless = avctx->codec_id != AV_CODEC_ID_MJPEG;
      int hsample[3], vsample[3];
-     MpegEncContext *s = avctx->priv_data;
-     av_assert0(avctx->codec->priv_data_size == sizeof(MpegEncContext));
 +    int i;
  
      av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift,
                                       &chroma_v_shift);
 -
      if (avctx->codec->id == AV_CODEC_ID_LJPEG &&
 -        avctx->pix_fmt   == AV_PIX_FMT_BGRA) {
 +        (avctx->pix_fmt == AV_PIX_FMT_BGR0
-             || s->avctx->pix_fmt == AV_PIX_FMT_BGRA
-             || s->avctx->pix_fmt == AV_PIX_FMT_BGR24)) {
++         || avctx->pix_fmt == AV_PIX_FMT_BGRA
++         || avctx->pix_fmt == AV_PIX_FMT_BGR24)) {
          vsample[0] = hsample[0] =
          vsample[1] = hsample[1] =
          vsample[2] = hsample[2] = 1;
      }
  
      put_bits(pb, 8, 0); /* Ah/Al (not used) */
-     s->esc_pos = put_bits_count(pb) >> 3;
-     for(i=1; i<s->slice_context_count; i++)
-         s->thread_context[i]->esc_pos = 0;
 +
 +end:
++    if (avctx->codec->priv_data_size == sizeof(MpegEncContext)) {
++        MpegEncContext *s = avctx->priv_data;
++        av_assert0(avctx->codec->priv_data_size == sizeof(MpegEncContext));
++
++        s->esc_pos = put_bits_count(pb) >> 3;
++        for(i=1; i<s->slice_context_count; i++)
++            s->thread_context[i]->esc_pos = 0;
++    }
  }
  
 -static void escape_FF(PutBitContext *pb, int start)
 +void ff_mjpeg_escape_FF(PutBitContext *pb, int start)
  {
 -    int size = put_bits_count(pb) - start * 8;
 +    int size;
      int i, ff_count;
      uint8_t *buf = pb->buf + start;
      int align= (-(size_t)(buf))&3;
@@@ -521,29 -437,6 +524,33 @@@ void ff_mjpeg_encode_mb(MpegEncContext 
      s->i_tex_bits += get_bits_diff(s);
  }
  
-         pic.data[i] += (pic.linesize[i] * (s->mjpeg_vsample[i] * (8 * s->mb_height -((s->height/V_MAX)&7)) - 1 ));
 +// maximum over s->mjpeg_vsample[i]
 +#define V_MAX 2
 +static int amv_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
 +                              const AVFrame *pic_arg, int *got_packet)
 +
 +{
 +    MpegEncContext *s = avctx->priv_data;
 +    AVFrame pic = *pic_arg;
 +    int i;
++    int chroma_h_shift, chroma_v_shift;
++
++    av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift, &chroma_v_shift);
 +
 +    //CODEC_FLAG_EMU_EDGE have to be cleared
 +    if(s->avctx->flags & CODEC_FLAG_EMU_EDGE)
 +        return -1;
 +
 +    //picture should be flipped upside-down
 +    for(i=0; i < 3; i++) {
++        int vsample = i ? 2 >> chroma_v_shift : 2;
++        pic.data[i] += (pic.linesize[i] * (vsample * (8 * s->mb_height -((s->height/V_MAX)&7)) - 1 ));
 +        pic.linesize[i] *= -1;
 +    }
 +    return ff_MPV_encode_picture(avctx, pkt, &pic, got_packet);
 +}
 +
 +#if CONFIG_MJPEG_ENCODER
  AVCodec ff_mjpeg_encoder = {
      .name           = "mjpeg",
      .long_name      = NULL_IF_CONFIG_SMALL("MJPEG (Motion JPEG)"),
@@@ -625,9 -609,6 +625,7 @@@ typedef struct MpegEncContext 
  
      /* MJPEG specific */
      struct MJpegContext *mjpeg_ctx;
-     int mjpeg_vsample[3];       ///< vertical sampling factors, default = {2, 1, 1}
-     int mjpeg_hsample[3];       ///< horizontal sampling factors, default = {2, 1, 1}
 +    int esc_pos;
  
      /* MSMPEG4 specific */
      int mv_table_index;
@@@ -256,29 -239,11 +255,14 @@@ av_cold int ff_MPV_encode_init(AVCodecC
              return -1;
          }
          break;
-     case AV_CODEC_ID_LJPEG:
-         if (avctx->pix_fmt != AV_PIX_FMT_YUVJ420P &&
-             avctx->pix_fmt != AV_PIX_FMT_YUVJ422P &&
-             avctx->pix_fmt != AV_PIX_FMT_YUVJ444P &&
-             avctx->pix_fmt != AV_PIX_FMT_BGR0     &&
-             avctx->pix_fmt != AV_PIX_FMT_BGRA     &&
-             avctx->pix_fmt != AV_PIX_FMT_BGR24    &&
-             ((avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
-               avctx->pix_fmt != AV_PIX_FMT_YUV422P &&
-               avctx->pix_fmt != AV_PIX_FMT_YUV444P) ||
-              avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL)) {
-             av_log(avctx, AV_LOG_ERROR, "colorspace not supported in LJPEG\n");
-             return -1;
-         }
-         break;
      case AV_CODEC_ID_MJPEG:
 +    case AV_CODEC_ID_AMV:
          if (avctx->pix_fmt != AV_PIX_FMT_YUVJ420P &&
              avctx->pix_fmt != AV_PIX_FMT_YUVJ422P &&
 +            avctx->pix_fmt != AV_PIX_FMT_YUVJ444P &&
              ((avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
 -              avctx->pix_fmt != AV_PIX_FMT_YUV422P) ||
 +              avctx->pix_fmt != AV_PIX_FMT_YUV422P &&
 +              avctx->pix_fmt != AV_PIX_FMT_YUV444P) ||
               avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL)) {
              av_log(avctx, AV_LOG_ERROR, "colorspace not supported in jpeg\n");
              return -1;
      if (avctx->inter_quant_bias != FF_DEFAULT_QUANT_BIAS)
          s->inter_quant_bias = avctx->inter_quant_bias;
  
-     avcodec_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift, &chroma_v_shift);
 +    av_log(avctx, AV_LOG_DEBUG, "intra_quant_bias = %d inter_quant_bias = %d\n",s->intra_quant_bias,s->inter_quant_bias);
 +
      if (avctx->codec_id == AV_CODEC_ID_MPEG4 &&
          s->avctx->time_base.den > (1 << 16) - 1) {
          av_log(avctx, AV_LOG_ERROR,
          avctx->delay  = s->low_delay ? 0 : (s->max_b_frames + 1);
          s->rtp_mode   = 1;
          break;
-     case AV_CODEC_ID_LJPEG:
      case AV_CODEC_ID_MJPEG:
 +    case AV_CODEC_ID_AMV:
          s->out_format = FMT_MJPEG;
          s->intra_only = 1; /* force intra only for jpeg */
-         if (avctx->codec->id == AV_CODEC_ID_LJPEG &&
-             (avctx->pix_fmt == AV_PIX_FMT_BGR0
-              || s->avctx->pix_fmt == AV_PIX_FMT_BGRA
-              || s->avctx->pix_fmt == AV_PIX_FMT_BGR24)) {
-             s->mjpeg_vsample[0] = s->mjpeg_hsample[0] =
-             s->mjpeg_vsample[1] = s->mjpeg_hsample[1] =
-             s->mjpeg_vsample[2] = s->mjpeg_hsample[2] = 1;
-         } else if (avctx->pix_fmt == AV_PIX_FMT_YUV444P || avctx->pix_fmt == AV_PIX_FMT_YUVJ444P) {
-             s->mjpeg_vsample[0] = s->mjpeg_vsample[1] = s->mjpeg_vsample[2] = 2;
-             s->mjpeg_hsample[0] = s->mjpeg_hsample[1] = s->mjpeg_hsample[2] = 1;
-         } else {
-             s->mjpeg_vsample[0] = 2;
-             s->mjpeg_vsample[1] = 2 >> chroma_v_shift;
-             s->mjpeg_vsample[2] = 2 >> chroma_v_shift;
-             s->mjpeg_hsample[0] = 2;
-             s->mjpeg_hsample[1] = 2 >> chroma_h_shift;
-             s->mjpeg_hsample[2] = 2 >> chroma_h_shift;
-         }
-         if (!(CONFIG_MJPEG_ENCODER || CONFIG_LJPEG_ENCODER) ||
+         if (!CONFIG_MJPEG_ENCODER ||
              ff_mjpeg_encode_init(s) < 0)
              return -1;
          avctx->delay = 0;