dcadec: Do not decode the XCh extension when downmixing to stereo
authorTim Walker <tdskywalker@gmail.com>
Sat, 12 Apr 2014 20:11:52 +0000 (22:11 +0200)
committerLuca Barbato <lu_zero@gentoo.org>
Sat, 12 Apr 2014 21:05:07 +0000 (23:05 +0200)
This is neither necessary nor currently supported.

Signed-off-by: Luca Barbato <lu_zero@gentoo.org>
libavcodec/dcadec.c

index 92edce7..49d7aac 100644 (file)
@@ -1850,6 +1850,16 @@ static int dca_decode_frame(AVCodecContext *avctx, void *data,
     if (s->amode < 16) {
         avctx->channel_layout = dca_core_channel_layout[s->amode];
 
+        if (s->prim_channels + !!s->lfe > 2 &&
+            avctx->request_channel_layout == AV_CH_LAYOUT_STEREO) {
+            /*
+             * Neither the core's auxiliary data nor our default tables contain
+             * downmix coefficients for the additional channel coded in the XCh
+             * extension, so when we're doing a Stereo downmix, don't decode it.
+             */
+            s->xch_disable = 1;
+        }
+
 #if FF_API_REQUEST_CHANNELS
 FF_DISABLE_DEPRECATION_WARNINGS
         if (s->xch_present && !s->xch_disable &&